repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
alibaba/nacos | common/src/main/java/com/alibaba/nacos/common/model/RestResultUtils.java | 2416 | /*
* Copyright 1999-2018 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.nacos.common.model;
import com.alibaba.nacos.common.model.core.IResultCode;
/**
* Rest result utils.
*
* @author <a href="mailto:liaochuntao@live.com">liaochuntao</a>
*/
public class RestResultUtils {
public static <T> RestResult<T> success() {
return RestResult.<T>builder().withCode(200).build();
}
public static <T> RestResult<T> success(T data) {
return RestResult.<T>builder().withCode(200).withData(data).build();
}
public static <T> RestResult<T> success(String msg, T data) {
return RestResult.<T>builder().withCode(200).withMsg(msg).withData(data).build();
}
public static <T> RestResult<T> success(int code, T data) {
return RestResult.<T>builder().withCode(code).withData(data).build();
}
public static <T> RestResult<T> failed() {
return RestResult.<T>builder().withCode(500).build();
}
public static <T> RestResult<T> failed(String errMsg) {
return RestResult.<T>builder().withCode(500).withMsg(errMsg).build();
}
public static <T> RestResult<T> failed(int code, T data) {
return RestResult.<T>builder().withCode(code).withData(data).build();
}
public static <T> RestResult<T> failed(int code, T data, String errMsg) {
return RestResult.<T>builder().withCode(code).withData(data).withMsg(errMsg).build();
}
public static <T> RestResult<T> failedWithMsg(int code, String errMsg) {
return RestResult.<T>builder().withCode(code).withMsg(errMsg).build();
}
public static <T> RestResult<T> buildResult(IResultCode resultCode, T data) {
return RestResult.<T>builder().withCode(resultCode.getCode()).withMsg(resultCode.getCodeMsg()).withData(data).build();
}
}
| apache-2.0 |
realityforge/gwt-packetio-example | src/main/java/org/realityforge/gwt/packetio/client/event/MessageEvent.java | 1278 | package org.realityforge.gwt.packetio.client.event;
import com.google.gwt.event.shared.EventHandler;
import java.util.Collections;
import java.util.Map;
import javax.annotation.Nonnull;
import org.realityforge.gwt.packetio.client.PacketIO;
public class MessageEvent
extends PacketIOEvent<MessageEvent.Handler>
{
public interface Handler
extends EventHandler
{
void onMessageEvent( @Nonnull MessageEvent event );
}
private static final Type<Handler> TYPE = new Type<>();
public static Type<Handler> getType()
{
return TYPE;
}
private final Map<String,String> _context;
private final String _data;
public MessageEvent( @Nonnull final PacketIO packetIO,
@Nonnull final Map<String,String> context,
@Nonnull final String data )
{
super( packetIO );
_context = Collections.unmodifiableMap( context );
_data = data;
}
@Nonnull
public Map<String, String> getContext()
{
return _context;
}
@Nonnull
public String getData()
{
return _data;
}
@Override
public Type<Handler> getAssociatedType()
{
return MessageEvent.getType();
}
@Override
protected void dispatch( @Nonnull final Handler handler )
{
handler.onMessageEvent( this );
}
}
| apache-2.0 |
futurice/freesound-android | app/src/test/java/com/futurice/freesound/feature/audio/ExoPlayerProgressObservableTest.java | 4020 | /*
* Copyright 2017 Futurice GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.futurice.freesound.feature.audio;
import com.google.android.exoplayer2.ExoPlayer;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import io.reactivex.observers.TestObserver;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyZeroInteractions;
import static org.mockito.Mockito.when;
public class ExoPlayerProgressObservableTest {
@Mock
private ExoPlayer exoPlayer;
@Captor
private ArgumentCaptor<ExoPlayer.EventListener> listenerCaptor;
private ExoPlayerProgressObservable exoPlayerProgressObservable;
public ExoPlayerProgressObservableTest() {
MockitoAnnotations.initMocks(this);
exoPlayerProgressObservable = new ExoPlayerProgressObservable(exoPlayer, false);
}
@Test
public void doesNothingToExoPlayer_beforeSubscribed_whenNoEmittingInitialValue() {
verifyZeroInteractions(exoPlayer);
}
@Test
public void addsListener_whenSubscribing() {
exoPlayerProgressObservable.subscribe();
verify(exoPlayer).addListener(any(ExoPlayer.EventListener.class));
}
@Test
public void removesListener_whenUnsubscribing() {
exoPlayerProgressObservable.subscribe().dispose();
verify(exoPlayer).removeListener(any(ExoPlayer.EventListener.class));
}
@Test
public void emitsCallbackValue() {
long expected = 100L;
TestObserver<Long> testObserver = exoPlayerProgressObservable.test();
new ExoPlayerTestEventGenerator()
.moveToProgressTime(expected)
.invokeListenerCallback();
testObserver.assertValue(expected)
.assertNotTerminated();
}
@Test
public void doesNotEmitAfterDisposed() {
TestObserver<Long> testObserver = exoPlayerProgressObservable.test();
testObserver.dispose();
new ExoPlayerTestEventGenerator()
.invokeListenerCallback();
testObserver.assertNoValues();
}
// Special tests for initial emit
@Test
public void doesNotEmitInitialValue_whenNotSet() {
ExoPlayerProgressObservable observable = new ExoPlayerProgressObservable(exoPlayer, false);
TestObserver<Long> testObserver = observable.test();
testObserver.assertNoValues();
}
@Test
public void emitsInitialValue_whenSet() {
long expected = 1000L;
when(exoPlayer.getCurrentPosition()).thenReturn(expected);
ExoPlayerProgressObservable observable = new ExoPlayerProgressObservable(exoPlayer, true);
TestObserver<Long> testObserver = observable.test();
testObserver.assertValue(expected);
}
// Helpers
private class ExoPlayerTestEventGenerator {
ExoPlayerTestEventGenerator() {
verify(exoPlayer).addListener(listenerCaptor.capture());
}
ExoPlayerTestEventGenerator moveToProgressTime(long progress) {
when(exoPlayer.getCurrentPosition()).thenReturn(progress);
return this;
}
ExoPlayerTestEventGenerator invokeListenerCallback() {
// don't care about value and are nullable anyway
listenerCaptor.getValue().onTimelineChanged(null, null);
return this;
}
}
}
| apache-2.0 |
rvillablanca/java-notes | src/main/java/capacitacion/capitulo3/dependenciaordenacion/SuperClase.java | 633 | /*
* El objetivo es ver el orden en que son inicializados todos los campos de una
* clase, en particular cuando se trata de una subclase.
*/
package capacitacion.capitulo3.dependenciaordenacion;
public class SuperClase {
private int campoSuper = inicializaCampoSuper();
{
System.out.println("En el bloque de super...");
campoSuper = 2;
}
public SuperClase() {
System.out.println("En el Constructor de super...");
campoSuper = 3;
}
private int inicializaCampoSuper() {
System.out.println("En el inicializador de super...");
return 1;
}
}
| apache-2.0 |
xiaotangai/KnowledgeBase | src/main/java/liu/study/api/java/awt/classes/XCardLayout.java | 2875 | package liu.study.api.java.awt.classes;
import java.awt.CardLayout;
import java.awt.Component;
import java.awt.Container;
import java.awt.Dimension;
public class XCardLayout extends CardLayout {
private static final long serialVersionUID = 1278869288290567497L;
@Override
public int getHgap() {
// TODO Auto-generated method stub
return super.getHgap();
}
@Override
public void setHgap(int hgap) {
// TODO Auto-generated method stub
super.setHgap(hgap);
}
@Override
public int getVgap() {
// TODO Auto-generated method stub
return super.getVgap();
}
@Override
public void setVgap(int vgap) {
// TODO Auto-generated method stub
super.setVgap(vgap);
}
@Override
public void addLayoutComponent(Component comp, Object constraints) {
// TODO Auto-generated method stub
super.addLayoutComponent(comp, constraints);
}
@Override
public void addLayoutComponent(String name, Component comp) {
// TODO Auto-generated method stub
super.addLayoutComponent(name, comp);
}
@Override
public void removeLayoutComponent(Component comp) {
// TODO Auto-generated method stub
super.removeLayoutComponent(comp);
}
@Override
public Dimension preferredLayoutSize(Container parent) {
// TODO Auto-generated method stub
return super.preferredLayoutSize(parent);
}
@Override
public Dimension minimumLayoutSize(Container parent) {
// TODO Auto-generated method stub
return super.minimumLayoutSize(parent);
}
@Override
public Dimension maximumLayoutSize(Container target) {
// TODO Auto-generated method stub
return super.maximumLayoutSize(target);
}
@Override
public float getLayoutAlignmentX(Container parent) {
// TODO Auto-generated method stub
return super.getLayoutAlignmentX(parent);
}
@Override
public float getLayoutAlignmentY(Container parent) {
// TODO Auto-generated method stub
return super.getLayoutAlignmentY(parent);
}
@Override
public void invalidateLayout(Container target) {
// TODO Auto-generated method stub
super.invalidateLayout(target);
}
@Override
public void layoutContainer(Container parent) {
// TODO Auto-generated method stub
super.layoutContainer(parent);
}
@Override
public void first(Container parent) {
// TODO Auto-generated method stub
super.first(parent);
}
@Override
public void next(Container parent) {
// TODO Auto-generated method stub
super.next(parent);
}
@Override
public void previous(Container parent) {
// TODO Auto-generated method stub
super.previous(parent);
}
@Override
public void last(Container parent) {
// TODO Auto-generated method stub
super.last(parent);
}
@Override
public void show(Container parent, String name) {
// TODO Auto-generated method stub
super.show(parent, name);
}
@Override
public String toString() {
// TODO Auto-generated method stub
return super.toString();
}
}
| apache-2.0 |
zhouyu411502/springboot-quick-build | springboot-quick-autoconfigures/springboot-autoconfigure-redis/src/main/java/org/springboot/quick/autoconfigure/redis/impl/RedisServiceImpl.java | 5866 | package org.springboot.quick.autoconfigure.redis.impl;
import java.io.UnsupportedEncodingException;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.springboot.quick.autoconfigure.redis.RedisService;
import org.springframework.dao.DataAccessException;
import org.springframework.data.redis.connection.RedisConnection;
import org.springframework.data.redis.core.HashOperations;
import org.springframework.data.redis.core.ListOperations;
import org.springframework.data.redis.core.RedisCallback;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.SetOperations;
import org.springframework.data.redis.core.ValueOperations;
import org.springframework.data.redis.core.ZSetOperations;
/**
* 封装redis 缓存服务器服务接口
*
* @author chababa
*
*/
//@Service(value = "redisService")
public class RedisServiceImpl implements RedisService {
private static String redisCode = "utf-8";
private RedisTemplate<String, String> redisTemplate;
public RedisServiceImpl(RedisTemplate<String, String> redisTemplate) {
this.redisTemplate = redisTemplate;
}
/**
* @param key
*/
public long del(final String... keys) {
return redisTemplate.execute(new RedisCallback<Long>() {
public Long doInRedis(RedisConnection connection) throws DataAccessException {
long result = 0;
for (int i = 0; i < keys.length; i++) {
result = connection.del(keys[i].getBytes());
}
return result;
}
});
}
/**
* @param key
* @param value
* @param liveTime
*/
public void set(final byte[] key, final byte[] value, final long liveTime) {
redisTemplate.execute(new RedisCallback<Long>() {
public Long doInRedis(RedisConnection connection) throws DataAccessException {
connection.set(key, value);
if (liveTime > 0) {
connection.expire(key, liveTime);
}
return 1L;
}
});
}
/**
* @param key
* @param value
* @param liveTime
*/
public void set(String key, String value, long liveTime) {
this.set(key.getBytes(), value.getBytes(), liveTime);
}
/**
* @param key
* @param value
*/
public void set(String key, String value) {
this.set(key, value, 0L);
}
/**
* @param key
* @param value
*/
public void set(byte[] key, byte[] value) {
this.set(key, value, 0L);
}
/**
* @param key
* @return
*/
public String get(final String key) {
return redisTemplate.execute(new RedisCallback<String>() {
public String doInRedis(RedisConnection connection) throws DataAccessException {
try {
return new String(connection.get(key.getBytes()), redisCode);
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
return "";
}
});
}
@Override
public String hget(String key, String hashKey){
return hashOps().get(key, hashKey);
}
@Override
public void hdel(String key, Object... hashKeys){
hashOps().delete(key, hashKeys);
}
@Override
public void hset(String key, String hashKey, String value){
hashOps().put(key, hashKey, value);
}
@Override
public List<String> hmget(String key, Collection<String> hashKeys){
return hashOps().multiGet(key, hashKeys);
}
@Override
public void hmset(String key, Map<String, String> m){
hashOps().putAll(key, m);
}
@Override
public Long sadd(String key, String... values){
return setOps().add(key, values);
}
@Override
public Set<String> smembers(String key){
return setOps().members(key);
}
@Override
public Boolean zadd(String key, String value, double score){
return zSetOps().add(key, value, score);
}
@Override
public Set<String> zrange(String key){
return zrange(key, 0, -1);
}
@Override
public Set<String> zrange(String key, int start, int end){
return zSetOps().range(key,start,end);
}
@Override
public Long rpush(String key, String value){
return listOps().rightPush(key, value);
}
@Override
public Long rpush(String key, String ... values){
return listOps().rightPushAll(key, values);
}
@Override
public Long lpush(String key, String value){
return listOps().leftPush(key, value);
}
@Override
public Long lpush(String key, String ... value){
return listOps().leftPushAll(key, value);
}
/**
* @param pattern
* @return
*/
public Set<String> keys(String pattern) {
return redisTemplate.keys(pattern);
}
/**
* @param key
* @return
*/
public boolean exists(final String key) {
return redisTemplate.execute(new RedisCallback<Boolean>() {
public Boolean doInRedis(RedisConnection connection) throws DataAccessException {
return connection.exists(key.getBytes());
}
});
}
/**
* @return
*/
public String flushDB() {
return redisTemplate.execute(new RedisCallback<String>() {
public String doInRedis(RedisConnection connection) throws DataAccessException {
connection.flushDb();
return "ok";
}
});
}
/**
* @return
*/
public long dbSize() {
return redisTemplate.execute(new RedisCallback<Long>() {
public Long doInRedis(RedisConnection connection) throws DataAccessException {
return connection.dbSize();
}
});
}
/**
* @return
*/
public String ping() {
return redisTemplate.execute(new RedisCallback<String>() {
public String doInRedis(RedisConnection connection) throws DataAccessException {
return connection.ping();
}
});
}
public HashOperations<String, String, String> hashOps(){
return redisTemplate.opsForHash();
}
public ValueOperations<String, String> valueOps(){
return redisTemplate.opsForValue();
}
public ListOperations<String, String> listOps(){
return redisTemplate.opsForList();
}
public ZSetOperations<String, String> zSetOps(){
return redisTemplate.opsForZSet();
}
public SetOperations<String, String> setOps(){
return redisTemplate.opsForSet();
}
}
| apache-2.0 |
torakiki/sambox | src/main/java/org/sejda/sambox/input/ExistingIndirectCOSObject.java | 2723 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sejda.sambox.input;
import java.io.IOException;
import java.util.Optional;
import org.sejda.sambox.cos.COSBase;
import org.sejda.sambox.cos.COSNull;
import org.sejda.sambox.cos.COSObjectKey;
import org.sejda.sambox.cos.COSVisitor;
import org.sejda.sambox.cos.DisposableCOSObject;
import org.sejda.sambox.cos.IndirectCOSObjectIdentifier;
/**
* An indirect object belonging to an existing pdf document. Indirect objects are defined in Chap 7.3.10 of PDF
* 32000-1:2008. The {@link COSBase} wrapped by an {@link ExistingIndirectCOSObject} is loaded on demand by querying the
* associated {@link IndirectObjectsProvider} when the {@link ExistingIndirectCOSObject#getCOSObject()} is called.
*
* @author Andrea Vacondio
*/
public class ExistingIndirectCOSObject extends COSBase implements DisposableCOSObject
{
private IndirectCOSObjectIdentifier id;
private IndirectObjectsProvider provider;
ExistingIndirectCOSObject(long objectNumber, int generationNumber,
IndirectObjectsProvider provider)
{
this.id = new IndirectCOSObjectIdentifier(new COSObjectKey(objectNumber, generationNumber),
provider.id());
this.provider = provider;
}
@Override
public COSBase getCOSObject()
{
COSBase baseObject = Optional.ofNullable(provider.get(id.objectIdentifier))
.orElse(COSNull.NULL);
baseObject.idIfAbsent(id);
return baseObject;
}
@Override
public void releaseCOSObject()
{
provider.release(id.objectIdentifier);
}
@Override
public void accept(COSVisitor visitor) throws IOException
{
getCOSObject().accept(visitor);
}
@Override
public IndirectCOSObjectIdentifier id()
{
return id;
}
@Override
public String toString()
{
return String.format("%s[%s]", super.toString(), id.toString());
}
}
| apache-2.0 |
junhaozhou/old-driver | app/src/main/java/com/littlechoc/olddriver/dao/ObdDao.java | 9355 | package com.littlechoc.olddriver.dao;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothSocket;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import com.littlechoc.commonutils.Logger;
import com.littlechoc.olddriver.Application;
import com.littlechoc.olddriver.model.sensor.ObdModel;
import com.littlechoc.olddriver.obd.commands.ObdCommandInterval;
import com.littlechoc.olddriver.obd.commands.ObdCommandProxy;
import com.littlechoc.olddriver.obd.commands.RawObdCommand;
import com.littlechoc.olddriver.obd.commands.SpeedObdCommand;
import com.littlechoc.olddriver.obd.commands.engine.EngineRPMObdCommand;
import com.littlechoc.olddriver.obd.commands.engine.EngineRuntimeObdCommand;
import com.littlechoc.olddriver.obd.commands.engine.MassAirFlowObdCommand;
import com.littlechoc.olddriver.obd.commands.pressure.FuelPressureObdCommand;
import com.littlechoc.olddriver.utils.FileUtils;
import com.littlechoc.olddriver.utils.JsonUtils;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import io.reactivex.functions.Action;
import io.reactivex.functions.Consumer;
import io.reactivex.functions.Function;
import io.reactivex.schedulers.Schedulers;
import io.reactivex.subjects.PublishSubject;
/**
* @author Junhao Zhou 2017/4/22
*/
public class ObdDao {
public interface Callback {
void onError(String msg);
void onCommandResult(ObdModel obdModel);
}
public static final String TAG = "ObdDao";
private static final UUID MY_UUID = UUID
.fromString("00001101-0000-1000-8000-00805F9B34FB");
private static final long INTERVAL_READ = 50;
private static final long INTERVAL_GENERATE = 10;
private final BlockingQueue<ObdCommandProxy> commandList;
private final List<ObdCommandInterval> toReadList;
private BluetoothDevice device;
private BluetoothSocket socket;
private String folder;
private BufferedOutputStream bos;
private InternalCallback internalCallback;
private WorkThread workThread;
private GenerateThread generateThread;
private Handler workHandler;
private Handler generateHandler;
private ReadRunnable readRunnable;
private PublishSubject<ObdModel> subject;
public ObdDao() {
commandList = new LinkedBlockingQueue<>();
toReadList = new ArrayList<>();
}
public void start(BluetoothDevice device, String folder, Callback callback) {
// init
this.folder = folder;
this.device = device;
internalCallback = new InternalCallback(callback);
generateThread = new GenerateThread();
generateThread.start();
generateHandler = new Handler(generateThread.getLooper());
workThread = new WorkThread();
workThread.start();
workHandler = new Handler(workThread.getLooper());
workHandler.post(new StartRunnable());
}
public void stop() {
try {
socket.close();
} catch (Exception e) {
e.printStackTrace();
}
workHandler.removeCallbacksAndMessages(null);
readRunnable = null;
workThread.quitSafely();
generateHandler.removeCallbacksAndMessages(null);
generateThread.quitSafely();
if (subject != null) {
subject.onComplete();
}
}
private class WorkThread extends HandlerThread {
public WorkThread() {
super("");
}
}
private class GenerateThread extends HandlerThread {
public GenerateThread() {
super("");
}
}
private class StartRunnable implements Runnable {
@Override
public void run() {
if (!createFile()) {
return;
}
initProcess();
//
loadCommands();
generateHandler.post(new GenerateRunnable());
// connect
if (connect()) {
readRunnable = new ReadRunnable();
workHandler.post(readRunnable);
}
}
private boolean createFile() {
File obdFile = FileUtils.createFile(folder, "obd.dat");
if (obdFile == null) {
internalCallback.onError("");
return false;
} else {
Logger.i(TAG, "#create obd file success");
}
try {
bos = new BufferedOutputStream(new FileOutputStream(obdFile));
return true;
} catch (FileNotFoundException e) {
Logger.e(TAG, Log.getStackTraceString(e));
internalCallback.onError(Log.getStackTraceString(e));
return false;
}
}
private void initProcess() {
subject = PublishSubject.create();
subject.subscribeOn(Schedulers.newThread())
.map(new Function<ObdModel, String>() {
@Override
public String apply(ObdModel obdModel) throws Exception {
return JsonUtils.newInstance().toJson(obdModel);
}
}).subscribe(new Consumer<String>() {
@Override
public void accept(String s) throws Exception {
bos.write((s + "\n").getBytes());
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
Logger.e(TAG, "Save obd command error: %s", throwable.getMessage());
}
}, new Action() {
@Override
public void run() throws Exception {
bos.flush();
FileUtils.safeCloseStream(bos);
}
});
}
private void loadCommands() {
toReadList.clear();
toReadList.add(new ObdCommandInterval(new SpeedObdCommand(), 300));
toReadList.add(new ObdCommandInterval(new EngineRPMObdCommand(), 300));
toReadList.add(new ObdCommandInterval(new MassAirFlowObdCommand(), 1000));
toReadList.add(new ObdCommandInterval(new FuelPressureObdCommand(), 1000));
toReadList.add(new ObdCommandInterval(new EngineRuntimeObdCommand(), 1000));
toReadList.add(new ObdCommandInterval(new RawObdCommand("01 33"), 5000));
}
private boolean connect() {
try {
socket = device.createRfcommSocketToServiceRecord(MY_UUID);
socket.connect();
return true;
} catch (IOException e) {
Logger.e(TAG, Log.getStackTraceString(e));
internalCallback.onError(Log.getStackTraceString(e));
return false;
}
}
}
private class ReadRunnable implements Runnable {
@Override
public void run() {
//
ObdCommandProxy command = getNextCommand();
if (command != null) {
try {
command.run(socket.getInputStream(), socket.getOutputStream());
long time = System.currentTimeMillis();
ObdModel obdModel = convert2ObdModel(command, time);
internalCallback.onCommandResult(obdModel);
saveResult(obdModel);
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
//
workHandler.postDelayed(readRunnable, INTERVAL_READ);
}
private ObdModel convert2ObdModel(ObdCommandProxy command, long time) {
ObdModel obdModel = new ObdModel();
obdModel.command = command.getCommand();
obdModel.data = command.getResult();
obdModel.name = command.getName();
obdModel.time = time;
obdModel.nanoTime = System.nanoTime();
obdModel.formattedData = command.getFormattedResult();
return obdModel;
}
private ObdCommandProxy getNextCommand() {
ObdCommandProxy command = null;
try {
command = commandList.take();
Logger.i(TAG, "#getNextCommand[%s]", command.getName());
} catch (InterruptedException e) {
e.printStackTrace();
}
return command;
}
private void saveResult(ObdModel obdModel) {
subject.onNext(obdModel);
}
}
private class GenerateRunnable implements Runnable {
@Override
public void run() {
long currentTime = System.currentTimeMillis();
try {
for (ObdCommandInterval command : toReadList) {
if (command.canAdd(currentTime)) {
command.setLastTime(currentTime);
Logger.d(TAG, "#add new command[%s]", command.getName());
commandList.put(new ObdCommandProxy(command.getOriginCommand()));
}
}
} catch (InterruptedException e) {
e.printStackTrace();
}
generateHandler.postDelayed(this, INTERVAL_GENERATE);
}
}
private class InternalCallback implements Callback {
private Callback callback;
public InternalCallback(Callback callback) {
this.callback = callback;
}
@Override
public void onError(final String msg) {
if (callback != null) {
Application.getInstance().runOnUiThread(new Runnable() {
@Override
public void run() {
callback.onError(msg);
}
});
}
}
@Override
public void onCommandResult(final ObdModel obdModel) {
if (callback != null) {
Application.getInstance().runOnUiThread(new Runnable() {
@Override
public void run() {
callback.onCommandResult(obdModel);
}
});
}
}
}
}
| apache-2.0 |
ontop/ontop | client/cli/src/main/java/it/unibz/inf/ontop/cli/OntopOBDAToR2RML.java | 13264 | package it.unibz.inf.ontop.cli;
import com.github.rvesse.airline.annotations.Command;
import com.github.rvesse.airline.annotations.Option;
import com.github.rvesse.airline.annotations.OptionType;
import com.github.rvesse.airline.annotations.help.BashCompletion;
import com.github.rvesse.airline.annotations.restrictions.Required;
import com.github.rvesse.airline.help.cli.bash.CompletionBehaviour;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Injector;
import it.unibz.inf.ontop.dbschema.*;
import it.unibz.inf.ontop.dbschema.impl.CachingMetadataLookup;
import it.unibz.inf.ontop.dbschema.impl.JDBCMetadataProviderFactory;
import it.unibz.inf.ontop.dbschema.impl.RawQuotedIDFactory;
import it.unibz.inf.ontop.exception.InvalidMappingSourceQueriesException;
import it.unibz.inf.ontop.exception.MetadataExtractionException;
import it.unibz.inf.ontop.injection.OntopSQLOWLAPIConfiguration;
import it.unibz.inf.ontop.injection.SQLPPMappingFactory;
import it.unibz.inf.ontop.model.term.ImmutableTerm;
import it.unibz.inf.ontop.model.term.TermFactory;
import it.unibz.inf.ontop.model.term.Variable;
import it.unibz.inf.ontop.spec.mapping.TargetAtom;
import it.unibz.inf.ontop.spec.mapping.TargetAtomFactory;
import it.unibz.inf.ontop.spec.mapping.pp.SQLPPMapping;
import it.unibz.inf.ontop.spec.mapping.pp.SQLPPMappingConverter;
import it.unibz.inf.ontop.spec.mapping.pp.SQLPPTriplesMap;
import it.unibz.inf.ontop.spec.mapping.pp.impl.OntopNativeSQLPPTriplesMap;
import it.unibz.inf.ontop.spec.mapping.pp.impl.SQLPPMappingConverterImpl;
import it.unibz.inf.ontop.spec.mapping.serializer.impl.R2RMLMappingSerializer;
import it.unibz.inf.ontop.spec.sqlparser.RAExpression;
import it.unibz.inf.ontop.substitution.ImmutableSubstitution;
import it.unibz.inf.ontop.substitution.SubstitutionFactory;
import it.unibz.inf.ontop.substitution.Var2VarSubstitution;
import it.unibz.inf.ontop.utils.ImmutableCollectors;
import it.unibz.inf.ontop.utils.LocalJDBCConnectionUtils;
import javax.annotation.Nullable;
import java.io.*;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import java.util.stream.Collectors;
@Command(name = "to-r2rml",
description = "Convert ontop native mapping format (.obda) to R2RML format")
public class OntopOBDAToR2RML implements OntopCommand {
@Option(type = OptionType.COMMAND, name = {"-i", "--input"}, title = "mapping.obda",
description = "Input mapping file in Ontop native format (.obda)")
@Required
@BashCompletion(behaviour = CompletionBehaviour.FILENAMES)
private String inputMappingFile;
@Option(type = OptionType.COMMAND, name = {"-t", "--ontology"}, title = "ontology.owl",
description = "OWL ontology file")
@BashCompletion(behaviour = CompletionBehaviour.FILENAMES)
@Nullable // optional
private String owlFile;
@Option(type = OptionType.COMMAND, name = {"-o", "--output"}, title = "mapping.ttl",
description = "Output mapping file in R2RML format (.ttl)")
@BashCompletion(behaviour = CompletionBehaviour.FILENAMES)
private String outputMappingFile;
@Option(type = OptionType.COMMAND, name = {"-p", "--properties"}, title = "properties file",
description = "Properties file")
@BashCompletion(behaviour = CompletionBehaviour.FILENAMES)
@Nullable // optional
private String propertiesFile;
@Option(type = OptionType.COMMAND, name = {"-d", "--db-metadata"}, title = "db-metadata file",
description = "User-supplied db-metadata file")
@BashCompletion(behaviour = CompletionBehaviour.FILENAMES)
String dbMetadataFile;
@Option(type = OptionType.COMMAND, name = {"-v", "--ontop-views"}, title = "Ontop view file",
description = "User-supplied view file")
@BashCompletion(behaviour = CompletionBehaviour.FILENAMES)
String ontopViewFile;
@Option(type = OptionType.COMMAND, name = {"--force"}, title = "Force the conversion",
description = "Force the conversion in the absence of DB metadata", arity = 0)
@BashCompletion(behaviour = CompletionBehaviour.FILENAMES)
@Nullable // optional
private Boolean force;
@Override
public void run() {
if (Strings.isNullOrEmpty(outputMappingFile)) {
outputMappingFile = inputMappingFile.substring(0, inputMappingFile.length() - ".obda".length())
.concat(".ttl");
}
OntopSQLOWLAPIConfiguration.Builder<? extends OntopSQLOWLAPIConfiguration.Builder<?>> configBuilder = OntopSQLOWLAPIConfiguration.defaultBuilder()
.nativeOntopMappingFile(inputMappingFile);
if (!Strings.isNullOrEmpty(propertiesFile)) {
configBuilder.propertyFile(propertiesFile);
}
else {
configBuilder.jdbcDriver("dummy")
.jdbcUrl("dummy")
.jdbcUser("")
.jdbcPassword("");
}
if (!Strings.isNullOrEmpty(owlFile)) {
configBuilder.ontologyFile(owlFile);
}
OntopSQLOWLAPIConfiguration config = configBuilder.build();
try {
SQLPPMapping ppMapping = extractAndNormalizePPMapping(config);
R2RMLMappingSerializer converter = new R2RMLMappingSerializer(config.getRdfFactory());
converter.write(new File(outputMappingFile), ppMapping);
System.out.println("R2RML mapping file " + outputMappingFile + " written!");
}
catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
}
private SQLPPMapping extractAndNormalizePPMapping(OntopSQLOWLAPIConfiguration config) throws Exception {
SQLPPMapping ppMapping = config.loadProvidedPPMapping();
if (!Strings.isNullOrEmpty(dbMetadataFile)) {
return normalizeWithDBMetadataFile(ppMapping, config);
}
else if (!Strings.isNullOrEmpty(propertiesFile)) {
return normalizeByConnectingToDB(ppMapping, config);
}
else if (force != null) {
return ppMapping;
}
else {
System.err.println("Access to DB metadata is required by default to respect column quoting rules of R2RML.\n" +
"Please provide a properties file containing the info to connect to the database.\n" +
"Specify the option --force to bypass this requirement.");
System.exit(2);
// Not reached
return null;
}
}
private SQLPPMapping normalizeWithDBMetadataFile(SQLPPMapping ppMapping, OntopSQLOWLAPIConfiguration config) throws IOException, MetadataExtractionException {
try (Reader dbMetadataReader = new FileReader(dbMetadataFile)) {
MetadataProvider dbMetadataProvider = config.getInjector().getInstance(SerializedMetadataProvider.Factory.class)
.getMetadataProvider(dbMetadataReader);
return normalize(ppMapping, dbMetadataProvider, config);
}
}
private SQLPPMapping normalizeByConnectingToDB(SQLPPMapping ppMapping, OntopSQLOWLAPIConfiguration config) throws MetadataExtractionException, SQLException, IOException {
try (Connection connection = LocalJDBCConnectionUtils.createConnection(config.getSettings())) {
JDBCMetadataProviderFactory metadataProviderFactory = config.getInjector().getInstance(JDBCMetadataProviderFactory.class);
DBMetadataProvider dbMetadataProvider = metadataProviderFactory.getMetadataProvider(connection);
return normalize(ppMapping, dbMetadataProvider, config);
}
}
private SQLPPMapping normalize(SQLPPMapping ppMapping, MetadataProvider dbMetadataProvider, OntopSQLOWLAPIConfiguration config)
throws IOException, MetadataExtractionException {
Injector injector = config.getInjector();
// DB metadata + view metadata
final MetadataProvider metadataProvider;
if (!Strings.isNullOrEmpty(ontopViewFile)) {
try(Reader viewReader = new FileReader(ontopViewFile)) {
metadataProvider = injector.getInstance(OntopViewMetadataProvider.Factory.class)
.getMetadataProvider(dbMetadataProvider, viewReader);
}
}
else
metadataProvider = dbMetadataProvider;
CachingMetadataLookup metadataLookup = new CachingMetadataLookup(metadataProvider);
OntopNativeMappingIdentifierNormalizer normalizer = new OntopNativeMappingIdentifierNormalizer(config, metadataLookup);
SQLPPMappingFactory sqlppMappingFactory = injector.getInstance(SQLPPMappingFactory.class);
return sqlppMappingFactory.createSQLPreProcessedMapping(
ppMapping.getTripleMaps().stream()
.map(normalizer::normalize)
.collect(ImmutableCollectors.toList()),
ppMapping.getPrefixManager());
}
private static class OntopNativeMappingIdentifierNormalizer {
final SubstitutionFactory substitutionFactory;
final TargetAtomFactory targetAtomFactory;
final TermFactory termFactory;
final SQLPPMappingConverterImpl converter;
final MetadataLookup metadataLookup;
final QuotedIDFactory idFactory, rawIdFactory;
private OntopNativeMappingIdentifierNormalizer(OntopSQLOWLAPIConfiguration config, MetadataLookup metadataLookup) {
substitutionFactory = config.getInjector().getInstance(SubstitutionFactory.class);
targetAtomFactory = config.getInjector().getInstance(TargetAtomFactory.class);
termFactory = config.getTermFactory();
converter = (SQLPPMappingConverterImpl)config.getInjector().getInstance(SQLPPMappingConverter.class);
this.metadataLookup = metadataLookup;
this.idFactory = metadataLookup.getQuotedIDFactory();
this.rawIdFactory = new RawQuotedIDFactory(idFactory);
}
private SQLPPTriplesMap normalize(SQLPPTriplesMap triplesMap) {
try {
RAExpression re = converter.getRAExpression(triplesMap, metadataLookup);
ImmutableMap<QuotedID, ImmutableTerm> attributeMap = re.getUnqualifiedAttributes();
Function<Variable, Optional<QuotedID>> lookup = var -> {
QuotedID standardId = idFactory.createAttributeID(var.getName());
if (attributeMap.containsKey(standardId))
return Optional.of(standardId);
QuotedID rawId = rawIdFactory.createAttributeID(var.getName());
if (attributeMap.containsKey(rawId))
return Optional.of(rawId);
return Optional.empty();
};
return new OntopNativeSQLPPTriplesMap(
triplesMap.getId(),
triplesMap.getSourceQuery(),
triplesMap.getTargetAtoms().stream()
.map(t -> normalize(t, lookup))
.collect(ImmutableCollectors.toList()));
}
catch (InvalidMappingSourceQueriesException | MetadataExtractionException e) {
throw new RuntimeException(e);
}
}
private TargetAtom normalize(TargetAtom target, Function<Variable, Optional<QuotedID>> lookup) {
ImmutableMap<Variable, Optional<QuotedID>> targetPreMap = target.getProjectionAtom().getArguments().stream()
.map(v -> target.getSubstitution().applyToVariable(v))
.flatMap(ImmutableTerm::getVariableStream)
.distinct()
.collect(ImmutableCollectors.toMap(Function.identity(), lookup));
if (targetPreMap.values().stream().anyMatch(t -> !t.isPresent()))
throw new RuntimeException(targetPreMap.entrySet().stream()
.filter(e -> !e.getValue().isPresent())
.map(Map.Entry::getKey)
.map(Variable::getName)
.collect(Collectors.joining(", ",
"The placeholder(s) ",
" in the target do(es) not occur in source query of the mapping assertion\n[" + target + "]")));
//noinspection OptionalGetWithoutIsPresent
ImmutableMap<Variable, Variable> targetMap = targetPreMap.entrySet().stream()
.filter(e -> !e.getKey().getName().equals(e.getValue().get().getSQLRendering()))
.collect(ImmutableCollectors.toMap(
Map.Entry::getKey,
e -> termFactory.getVariable(e.getValue().get().getSQLRendering())));
Var2VarSubstitution sub = substitutionFactory.getVar2VarSubstitution(targetMap);
ImmutableSubstitution<ImmutableTerm> newSubstitution = target.getSubstitution().transform(sub::apply);
return targetAtomFactory.getTargetAtom(target.getProjectionAtom(), newSubstitution);
}
}
}
| apache-2.0 |
holance/DumbThings | app/src/main/java/org/lunci/dumbthing/fragment/FunctionFragment.java | 4211 | /*
* Copyright 2015 Lunci Hua
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.lunci.dumbthing.fragment;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import org.lunci.dumbthing.R;
import org.lunci.dumbthing.dialog.CalendarDialog;
import org.lunci.dumbthing.service.DataServiceMessages;
import java.util.ArrayList;
/**
* A simple {@link Fragment} subclass.
* Use the {@link FunctionFragment#newInstance} factory method to
* create an instance of this fragment.
*/
public class FunctionFragment extends ServiceFragmentBase {
// TODO: Rename and change types and number of parameters
public static FunctionFragment newInstance() {
FunctionFragment fragment = new FunctionFragment();
Bundle args = new Bundle();
fragment.setArguments(args);
return fragment;
}
private Handler mHandler=new Handler(new Handler.Callback() {
@Override
public boolean handleMessage(Message msg) {
boolean succ=false;
switch (msg.what){
case DataServiceMessages.Service_Get_All_Dates_ASC_Finished:
if(msg.obj instanceof ArrayList) {
try{
final ArrayList<String> dates=(ArrayList<String>)msg.obj;
final CalendarDialog calendarDialog = CalendarDialog.newInstance(dates);
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
calendarDialog.show(getFragmentManager(), CalendarDialog.class.getSimpleName());
}
}, 200);
succ = true;
}catch (ClassCastException ex){
ex.printStackTrace();
}
}
break;
}
return succ;
}
});
private ViewHolder mViewHolder=new ViewHolder();
public FunctionFragment() {
// Required empty public constructor
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
}
}
@Override
public Handler getHandler() {
return mHandler;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
final View rootView= inflater.inflate(R.layout.fragment_functions, container, false);
final View calendarButton=rootView.findViewById(R.id.imageView_calendar);
mViewHolder.setCalendarButton(calendarButton);
mViewHolder.setup();
return rootView;
}
private final class ViewHolder{
public View getCalendarButton() {
return mCalendarButton;
}
public void setCalendarButton(View calendarButton) {
this.mCalendarButton = calendarButton;
}
private View mCalendarButton;
public void setup() throws NullPointerException{
mCalendarButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
sendMessageToService(Message.obtain(null, DataServiceMessages.Service_Get_All_Dates_ASC));
}
});
}
}
}
| apache-2.0 |
arquillian/arquillian-cube | docker/docker/src/main/java/org/arquillian/cube/docker/impl/client/config/AfterStop.java | 404 | package org.arquillian.cube.docker.impl.client.config;
public class AfterStop {
private Copy copy;
private Log log;
public AfterStop() {
}
public Copy getCopy() {
return copy;
}
public void setCopy(Copy copy) {
this.copy = copy;
}
public Log getLog() {
return log;
}
public void setLog(Log log) {
this.log = log;
}
}
| apache-2.0 |
eemirtekin/Sakai-10.6-TR | sitestats/sitestats-tool/src/java/org/sakaiproject/sitestats/tool/wicket/pages/ServerWidePage.java | 6749 | /**
* $URL: https://source.sakaiproject.org/svn/sitestats/tags/sakai-10.6/sitestats-tool/src/java/org/sakaiproject/sitestats/tool/wicket/pages/ServerWidePage.java $
* $Id: ServerWidePage.java 105078 2012-02-24 23:00:38Z ottenhoff@longsight.com $
*
* Copyright (c) 2006-2009 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sakaiproject.sitestats.tool.wicket.pages;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.wicket.AttributeModifier;
import org.apache.wicket.Component;
import org.apache.wicket.PageParameters;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.extensions.ajax.markup.html.IndicatingAjaxLink;
import org.apache.wicket.markup.html.IHeaderResponse;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.model.CompoundPropertyModel;
import org.apache.wicket.model.Model;
import org.sakaiproject.sitestats.api.StatsManager;
import org.sakaiproject.sitestats.tool.facade.Locator;
import org.sakaiproject.sitestats.tool.wicket.components.AdminMenu;
import org.sakaiproject.sitestats.tool.wicket.components.AjaxLazyLoadImage;
import org.sakaiproject.sitestats.tool.wicket.models.ServerWideModel;
/**
* @author Nuno Fernandes
*/
public class ServerWidePage extends BasePage {
private static final long serialVersionUID = 1L;
// UI Components
private Label reportTitle = null;
private Label reportDescription = null;
private AjaxLazyLoadImage reportChart = null;
private Label reportNotes = null;
private WebMarkupContainer selectors = null;
private ServerWideModel report = null;
private String siteId = null;
private int selectedWidth = 0;
private int selectedHeight = 0;
private List<Component> links = new ArrayList<Component>();
private Map<Component,Component> labels = new HashMap<Component,Component>();
public ServerWidePage() {
this(null);
}
public ServerWidePage(PageParameters params) {
siteId = Locator.getFacade().getToolManager().getCurrentPlacement().getContext();
boolean allowed = Locator.getFacade().getStatsAuthz().isUserAbleToViewSiteStatsAdmin(siteId);
if(allowed) {
renderBody();
}else{
redirectToInterceptPage(new NotAuthorizedPage());
}
}
@Override
public void renderHead(IHeaderResponse response) {
super.renderHead(response);
response.renderJavascriptReference(JQUERYSCRIPT);
}
private void renderBody() {
add(new AdminMenu("menu"));
// model
report = new ServerWideModel();
setDefaultModel(new CompoundPropertyModel(this));
Form form = new Form("serverWideReportForm");
add(form);
// title, description & notes
reportTitle = new Label("report.reportTitle");
reportTitle.setOutputMarkupId(true);
form.add(reportTitle);
reportDescription = new Label("report.reportDescription");
reportDescription.setOutputMarkupId(true);
form.add(reportDescription);
reportNotes = new Label("report.reportNotes");
reportNotes.setOutputMarkupId(true);
form.add(reportNotes);
// chart
reportChart = new AjaxLazyLoadImage("reportChart", getPage()) {
@Override
public byte[] getImageData() {
return getChartImage(selectedWidth, selectedHeight);
}
@Override
public byte[] getImageData(int width, int height) {
return getChartImage(width, height);
}
};
reportChart.setOutputMarkupId(true);
reportChart.setAutoDetermineChartSizeByAjax(".chartContainer");
form.add(reportChart);
// selectors
selectors = new WebMarkupContainer("selectors");
selectors.setOutputMarkupId(true);
form.add(selectors);
makeSelectorLink("reportMonthlyLogin", StatsManager.MONTHLY_LOGIN_REPORT);
makeSelectorLink("reportWeeklyLogin", StatsManager.WEEKLY_LOGIN_REPORT);
makeSelectorLink("reportDailyLogin", StatsManager.DAILY_LOGIN_REPORT);
makeSelectorLink("reportRegularUsers", StatsManager.REGULAR_USERS_REPORT);
makeSelectorLink("reportHourlyUsage", StatsManager.HOURLY_USAGE_REPORT);
makeSelectorLink("reportTopActivities", StatsManager.TOP_ACTIVITIES_REPORT);
makeSelectorLink("reportTool", StatsManager.TOOL_REPORT);
}
public void setReport(ServerWideModel report) {
this.report = report;
}
public ServerWideModel getReport() {
return report;
}
private byte[] getChartImage(int width, int height) {
int _width = (width <= 0) ? 350 : width;
int _height = (height <= 0) ? 200: height;
return Locator.getFacade().getServerWideReportManager().generateReportChart(
report.getSelectedView(), _width, _height
);
}
@SuppressWarnings("serial")
private void makeSelectorLink(final String id, final String view) {
IndicatingAjaxLink link = new IndicatingAjaxLink(id) {
@Override
public void onClick(AjaxRequestTarget target) {
// select view
report.setSelectedView(view);
// make title, description & notes visible
reportTitle.add(new AttributeModifier("style", true, new Model("display: block")));
reportDescription.add(new AttributeModifier("style", true, new Model("display: block")));
reportNotes.add(new AttributeModifier("style", true, new Model("display: block")));
reportChart.renderImage(target, true);
// toggle selectors link state
for(Component lbl : labels.values()) {
lbl.setVisible(false);
}
for(Component lnk : links) {
lnk.setVisible(true);
}
this.setVisible(false);
labels.get(this).setVisible(true);
// mark component for rendering
target.addComponent(selectors);
target.addComponent(reportTitle);
target.addComponent(reportDescription);
target.addComponent(reportNotes);
target.appendJavascript("setMainFrameHeightNoScroll( window.name, 650 )");
}
};
link.setVisible(true);
links.add(link);
selectors.add(link);
makeSelectorLabel(link, id + "Lbl");
}
private void makeSelectorLabel(final Component link, final String id) {
WebMarkupContainer label = new WebMarkupContainer(id);
label.setVisible(false);
labels.put(link, label);
selectors.add(label);
}
}
| apache-2.0 |
ujjwal9895/GoUbiquitous | app/src/main/java/com/example/android/sunshine/app/WatchListenerService.java | 1018 | package com.example.android.sunshine.app;
import android.app.Service;
import android.content.Intent;
import android.os.IBinder;
import android.util.Log;
import com.example.android.sunshine.app.sync.SunshineSyncAdapter;
import com.google.android.gms.wearable.DataEvent;
import com.google.android.gms.wearable.DataEventBuffer;
import com.google.android.gms.wearable.WearableListenerService;
public class WatchListenerService extends WearableListenerService {
public static final String LOG_TAG = WatchListenerService.class.getSimpleName();
public static final String WEATHER = "/weather";
@Override
public void onDataChanged(DataEventBuffer dataEvents) {
for (DataEvent dataEvent : dataEvents) {
if (dataEvent.getType() == DataEvent.TYPE_CHANGED) {
String path = dataEvent.getDataItem().getUri().getPath();
if (path.equals(WEATHER)) {
SunshineSyncAdapter.syncImmediately(this);
}
}
}
}
}
| apache-2.0 |
oehme/analysing-gradle-performance | my-app/src/test/java/org/gradle/test/performance/mediummonolithicjavaproject/p89/Test1797.java | 2110 | package org.gradle.test.performance.mediummonolithicjavaproject.p89;
import org.junit.Test;
import static org.junit.Assert.*;
public class Test1797 {
Production1797 objectUnderTest = new Production1797();
@Test
public void testProperty0() {
String value = "value";
objectUnderTest.setProperty0(value);
assertEquals(value, objectUnderTest.getProperty0());
}
@Test
public void testProperty1() {
String value = "value";
objectUnderTest.setProperty1(value);
assertEquals(value, objectUnderTest.getProperty1());
}
@Test
public void testProperty2() {
String value = "value";
objectUnderTest.setProperty2(value);
assertEquals(value, objectUnderTest.getProperty2());
}
@Test
public void testProperty3() {
String value = "value";
objectUnderTest.setProperty3(value);
assertEquals(value, objectUnderTest.getProperty3());
}
@Test
public void testProperty4() {
String value = "value";
objectUnderTest.setProperty4(value);
assertEquals(value, objectUnderTest.getProperty4());
}
@Test
public void testProperty5() {
String value = "value";
objectUnderTest.setProperty5(value);
assertEquals(value, objectUnderTest.getProperty5());
}
@Test
public void testProperty6() {
String value = "value";
objectUnderTest.setProperty6(value);
assertEquals(value, objectUnderTest.getProperty6());
}
@Test
public void testProperty7() {
String value = "value";
objectUnderTest.setProperty7(value);
assertEquals(value, objectUnderTest.getProperty7());
}
@Test
public void testProperty8() {
String value = "value";
objectUnderTest.setProperty8(value);
assertEquals(value, objectUnderTest.getProperty8());
}
@Test
public void testProperty9() {
String value = "value";
objectUnderTest.setProperty9(value);
assertEquals(value, objectUnderTest.getProperty9());
}
} | apache-2.0 |
guhongyeying/youxuangouwu | YouXuanShop/src/com/example/youxuan/task/XianShiTask.java | 1893 | package com.example.youxuan.task;
import java.util.ArrayList;
import java.util.List;
import com.alibaba.fastjson.JSON;
import com.example.jingpingouwu.R;
import com.example.youxuan.adapter.XianShiAdapter;
import com.example.youxuan.common.CommonDailogHelper;
import com.example.youxuan.common.CommonJSONHelper;
import com.example.youxuan.entity.DatumXianShi;
import com.example.youxuan.entity.XianShi;
import android.content.Context;
import android.os.AsyncTask;
import android.util.Log;
import android.view.View;
import android.widget.GridView;
import android.widget.ListView;
import android.widget.ProgressBar;
public class XianShiTask extends AsyncTask<String, Void, List<DatumXianShi>> {
private GridView XianShiGridView;
private List<DatumXianShi> list;
private Context context;
private ProgressBar bar;
private XianShiAdapter xsadapter;
public XianShiTask(GridView xianShiGridView, Context context) {
super();
this.XianShiGridView = xianShiGridView;
this.context = context;
}
public XianShiTask(GridView xianShiGridView, Context context,
ProgressBar bar) {
super();
this.XianShiGridView = xianShiGridView;
this.context = context;
this.bar = bar;
}
@Override
protected void onPreExecute() {
CommonDailogHelper.startProgressDialog(context);
}
@Override
protected List<DatumXianShi> doInBackground(String... params) {
String json = CommonJSONHelper.getJSON(params[0]);
// Log.i("json²âÊÔ", json);
XianShi xianShi = JSON.parseObject(json, XianShi.class);
list = new ArrayList<DatumXianShi>();
list = xianShi.getData();
return list;
}
@Override
protected void onPostExecute(List<DatumXianShi> result) {
XianShiAdapter adapter = new XianShiAdapter(result, context);
XianShiGridView.setAdapter(adapter);
CommonDailogHelper.stopProgressDialog();
}
}
| apache-2.0 |
yosriz/RxGooglePhotos | library/src/main/java/com/yosriz/gphotosclient/signin/GoogleSignIn.java | 1976 | package com.yosriz.gphotosclient.signin;
import com.google.android.gms.auth.api.signin.GoogleSignInAccount;
import android.content.Intent;
import android.support.v4.app.FragmentActivity;
import java.util.ArrayList;
import java.util.List;
import io.reactivex.Completable;
import io.reactivex.Single;
public class GoogleSignIn {
interface OnActivityResultListener {
void onActivityResult(int requestCode, int resultCode, Intent data);
}
public static class SignInAccount {
private final String token;
private final GoogleSignInAccount account;
SignInAccount(String token, GoogleSignInAccount account) {
this.token = token;
this.account = account;
}
public GoogleSignInAccount getAccount() {
return account;
}
public String getToken() {
return token;
}
}
private List<OnActivityResultListener> activityResultListeners = new ArrayList<>();
public Single<SignInAccount> getToken(final FragmentActivity activity) {
GoogleSignInOnSubscribe subscriber = new GoogleSignInOnSubscribe(activity);
activityResultListeners.add(subscriber);
return Single.create(subscriber)
.doOnDispose(() -> activityResultListeners.remove(subscriber))
.doAfterTerminate(() -> activityResultListeners.remove(subscriber));
}
public Single<SignInAccount> getTokenSilently(final FragmentActivity activity) {
return Single.create(new GoogleSilentSignInOnSubscribe(activity));
}
public Completable signOut(final FragmentActivity activity) {
return Completable.create(new GoogleSignOutOnSubscribe(activity));
}
public void onActivityResult(int requestCode, int resultCode, Intent data) {
for (OnActivityResultListener listener : activityResultListeners) {
listener.onActivityResult(requestCode, resultCode, data);
}
}
}
| apache-2.0 |
HubSpot/Blazar | BlazarService/src/main/java/com/hubspot/blazar/guice/BlazarSlackModule.java | 2156 | package com.hubspot.blazar.guice;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Optional;
import com.google.inject.Binder;
import com.google.inject.Module;
import com.google.inject.multibindings.Multibinder;
import com.hubspot.blazar.base.visitor.RepositoryBuildVisitor;
import com.hubspot.blazar.config.BlazarConfiguration;
import com.hubspot.blazar.config.BlazarSlackConfiguration;
import com.hubspot.blazar.visitor.repositorybuild.SlackDmNotificationVisitor;
import com.hubspot.blazar.visitor.repositorybuild.SlackRoomNotificationVisitor;
import com.hubspot.blazar.resources.SlackResource;
import com.hubspot.blazar.resources.UserFeedbackResource;
import com.ullink.slack.simpleslackapi.SlackSession;
import com.ullink.slack.simpleslackapi.impl.SlackSessionFactory;
/**
* This module handles the configuration of all slack-related features inside of Blazar.
* This module does not bind any slack related resources / clients / visitors etc. if slack is not configured.
*/
public class BlazarSlackModule implements Module {
private static final Logger LOG = LoggerFactory.getLogger(BlazarSlackModule.class);
private final Optional<BlazarSlackConfiguration> slackConfiguration;
public BlazarSlackModule(BlazarConfiguration configuration) {
this.slackConfiguration = configuration.getSlackConfiguration();
}
@Override
public void configure(Binder binder) {
if (!slackConfiguration.isPresent()) {
LOG.info("Slack is not configured, not binding slack related resources or slack build notification visitors");
return;
}
Multibinder<RepositoryBuildVisitor> repositoryBuildVisitorMultibinder = Multibinder.newSetBinder(binder, RepositoryBuildVisitor.class);
repositoryBuildVisitorMultibinder.addBinding().to(SlackDmNotificationVisitor.class);
repositoryBuildVisitorMultibinder.addBinding().to(SlackRoomNotificationVisitor.class);
binder.bind(SlackSession.class).toInstance(SlackSessionFactory.createWebSocketSlackSession(slackConfiguration.get().getSlackApiToken()));
binder.bind(SlackResource.class);
binder.bind(UserFeedbackResource.class);
}
}
| apache-2.0 |
dianping/cat | cat-home/src/main/java/com/dianping/cat/report/page/dependency/JspViewer.java | 1485 | /*
* Copyright (c) 2011-2018, Meituan Dianping. All Rights Reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dianping.cat.report.page.dependency;
import org.unidal.web.mvc.view.BaseJspViewer;
import com.dianping.cat.report.ReportPage;
public class JspViewer extends BaseJspViewer<ReportPage, Action, Context, Model> {
@Override
protected String getJspFilePath(Context ctx, Model model) {
Action action = model.getAction();
switch (action) {
case LINE_CHART:
return JspFile.LINE_CHART.getPath();
case TOPOLOGY:
return JspFile.TOPOLOGY.getPath();
case DEPENDENCY_DASHBOARD:
return JspFile.DEPENDENCY_DASHBOARD.getPath();
}
throw new RuntimeException("Unknown action: " + action);
}
}
| apache-2.0 |
SENA-CEET/1349397-Trimestre-4 | java/JDBC/daoGenerator/src/main/java/co/edu/sena/controller/factory/RaeHasTrimestreDaoFactory.java | 778 | /*
* This source file was generated by FireStorm/DAO.
*
* If you purchase a full license for FireStorm/DAO you can customize this header file.
*
* For more information please visit http://www.codefutures.com/products/firestorm
*/
package co.edu.sena.controller.factory;
import java.sql.Connection;
import co.edu.sena.controller.dao.*;
import co.edu.sena.controller.dao.jdbc.*;
public class RaeHasTrimestreDaoFactory
{
/**
* Method 'create'
*
* @return RaeHasTrimestreDao
*/
public static RaeHasTrimestreDao create()
{
return new RaeHasTrimestreDaoImpl();
}
/**
* Method 'create'
*
* @param conn
* @return RaeHasTrimestreDao
*/
public static RaeHasTrimestreDao create(Connection conn)
{
return new RaeHasTrimestreDaoImpl( conn );
}
}
| apache-2.0 |
aws/aws-sdk-java-v2 | core/sdk-core/src/main/java/software/amazon/awssdk/core/io/ResettableInputStream.java | 12897 | /*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package software.amazon.awssdk.core.io;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.channels.FileChannel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.awssdk.annotations.NotThreadSafe;
import software.amazon.awssdk.annotations.SdkProtectedApi;
import software.amazon.awssdk.core.exception.SdkClientException;
/**
* A mark-and-resettable input stream that can be used on files or file input
* streams.
*
* In particular, a {@link ResettableInputStream} allows the close operation to
* be disabled via {@link #disableClose()} (to avoid accidentally being closed).
* This is necessary when such input stream needs to be marked-and-reset
* multiple times but only as long as the stream has not been closed.
* <p>
* The creator of this input stream should therefore always call
* {@link #release()} in a finally block to truly release the underlying
* resources.
*
* @see ReleasableInputStream
*/
@NotThreadSafe
@SdkProtectedApi
public class ResettableInputStream extends ReleasableInputStream {
private static final Logger log = LoggerFactory.getLogger(ResettableInputStream.class);
private final File file; // null if the file is not known
private FileInputStream fis; // never null
private FileChannel fileChannel; // never null
/**
* Marked position of the file; default to zero.
*/
private long markPos;
/**
* @param file
* must not be null. Upon successful construction the the file
* will be opened with an input stream automatically marked at
* the starting position of the given file.
* <p>
* Note the creation of a {@link ResettableInputStream} would
* entail physically opening a file. If the opened file is meant
* to be closed only (in a finally block) by the very same code
* block that created it, then it is necessary that the release
* method must not be called while the execution is made in other
* stack frames.
*
* In such case, as other stack frames may inadvertently or
* indirectly call the close method of the stream, the creator of
* the stream would need to explicitly disable the accidental
* closing via {@link ResettableInputStream#disableClose()}, so
* that the release method becomes the only way to truly close
* the opened file.
*/
public ResettableInputStream(File file) throws IOException {
this(new FileInputStream(file), file);
}
/**
* <p>
* Note the creation of a {@link ResettableInputStream} would entail
* physically opening a file. If the opened file is meant to be closed only
* (in a finally block) by the very same code block that created it, then it
* is necessary that the release method must not be called while the
* execution is made in other stack frames.
*
* In such case, as other stack frames may inadvertently or indirectly call
* the close method of the stream, the creator of the stream would need to
* explicitly disable the accidental closing via
* {@link ResettableInputStream#disableClose()}, so that the release method
* becomes the only way to truly close the opened file.
*
* @param fis
* file input stream; must not be null. Upon successful
* construction the input stream will be automatically marked at
* the current position of the given file input stream.
*/
public ResettableInputStream(FileInputStream fis) throws IOException {
this(fis, null);
}
/**
* @param file
* can be null if not known
*/
private ResettableInputStream(FileInputStream fis, File file) throws IOException {
super(fis);
this.file = file;
this.fis = fis;
this.fileChannel = fis.getChannel();
this.markPos = fileChannel.position();
}
/**
* Convenient factory method to construct a new resettable input stream for
* the given file, converting any IOException into SdkClientException.
* <p>
* Note the creation of a {@link ResettableInputStream} would entail
* physically opening a file. If the opened file is meant to be closed only
* (in a finally block) by the very same code block that created it, then it
* is necessary that the release method must not be called while the
* execution is made in other stack frames.
*
* In such case, as other stack frames may inadvertently or indirectly call
* the close method of the stream, the creator of the stream would need to
* explicitly disable the accidental closing via
* {@link ResettableInputStream#disableClose()}, so that the release method
* becomes the only way to truly close the opened file.
*/
public static ResettableInputStream newResettableInputStream(File file) {
return newResettableInputStream(file, null);
}
/**
* Convenient factory method to construct a new resettable input stream for
* the given file, converting any IOException into SdkClientException
* with the given error message.
* <p>
* Note the creation of a {@link ResettableInputStream} would entail
* physically opening a file. If the opened file is meant to be closed only
* (in a finally block) by the very same code block that created it, then it
* is necessary that the release method must not be called while the
* execution is made in other stack frames.
*
* In such case, as other stack frames may inadvertently or indirectly call
* the close method of the stream, the creator of the stream would need to
* explicitly disable the accidental closing via
* {@link ResettableInputStream#disableClose()}, so that the release method
* becomes the only way to truly close the opened file.
*/
public static ResettableInputStream newResettableInputStream(File file,
String errmsg) {
try {
return new ResettableInputStream(file);
} catch (IOException e) {
throw errmsg == null
? SdkClientException.builder().cause(e).build()
: SdkClientException.builder().message(errmsg).cause(e).build();
}
}
/**
* Convenient factory method to construct a new resettable input stream for
* the given file input stream, converting any IOException into
* SdkClientException.
* <p>
* Note the creation of a {@link ResettableInputStream} would entail
* physically opening a file. If the opened file is meant to be closed only
* (in a finally block) by the very same code block that created it, then it
* is necessary that the release method must not be called while the
* execution is made in other stack frames.
*
* In such case, as other stack frames may inadvertently or indirectly call
* the close method of the stream, the creator of the stream would need to
* explicitly disable the accidental closing via
* {@link ResettableInputStream#disableClose()}, so that the release method
* becomes the only way to truly close the opened file.
*/
public static ResettableInputStream newResettableInputStream(
FileInputStream fis) {
return newResettableInputStream(fis, null);
}
/**
* Convenient factory method to construct a new resettable input stream for
* the given file input stream, converting any IOException into
* SdkClientException with the given error message.
* <p>
* Note the creation of a {@link ResettableInputStream} would entail
* physically opening a file. If the opened file is meant to be closed only
* (in a finally block) by the very same code block that created it, then it
* is necessary that the release method must not be called while the
* execution is made in other stack frames.
*
* In such case, as other stack frames may inadvertently or indirectly call
* the close method of the stream, the creator of the stream would need to
* explicitly disable the accidental closing via
* {@link ResettableInputStream#disableClose()}, so that the release method
* becomes the only way to truly close the opened file.
*/
public static ResettableInputStream newResettableInputStream(
FileInputStream fis, String errmsg) {
try {
return new ResettableInputStream(fis);
} catch (IOException e) {
throw SdkClientException.builder().message(errmsg).cause(e).build();
}
}
@Override
public final boolean markSupported() {
return true;
}
/**
* Marks the current position in this input stream. A subsequent call to
* the <code>reset</code> method repositions this stream at the last marked
* position so that subsequent reads re-read the same bytes.
* This method works as long as the underlying file has not been closed.
* <p>
* Note the creation of a {@link ResettableInputStream} would entail
* physically opening a file. If the opened file is meant to be closed only
* (in a finally block) by the very same code block that created it, then it
* is necessary that the release method must not be called while the
* execution is made in other stack frames.
*
* In such case, as other stack frames may inadvertently or indirectly call
* the close method of the stream, the creator of the stream would need to
* explicitly disable the accidental closing via
* {@link ResettableInputStream#disableClose()}, so that the release method
* becomes the only way to truly close the opened file.
*
* @param ignored
* ignored
*/
@Override
public void mark(int ignored) {
abortIfNeeded();
try {
markPos = fileChannel.position();
} catch (IOException e) {
throw SdkClientException.builder().message("Failed to mark the file position").cause(e).build();
}
if (log.isTraceEnabled()) {
log.trace("File input stream marked at position " + markPos);
}
}
/**
* Repositions this stream to the position at the time the
* <code>mark</code> method was last called on this input stream.
* This method works as long as the underlying file has not been closed.
* <p>
* Note the creation of a {@link ResettableInputStream} would entail
* physically opening a file. If the opened file is meant to be closed only
* (in a finally block) by the very same code block that created it, then it
* is necessary that the release method must not be called while the
* execution is made in other stack frames.
*
* In such case, as other stack frames may inadvertently or indirectly call
* the close method of the stream, the creator of the stream would need to
* explicitly disable the accidental closing via
* {@link ResettableInputStream#disableClose()}, so that the release method
* becomes the only way to truly close the opened file.
*/
@Override
public void reset() throws IOException {
abortIfNeeded();
fileChannel.position(markPos);
if (log.isTraceEnabled()) {
log.trace("Reset to position " + markPos);
}
}
@Override
public int available() throws IOException {
abortIfNeeded();
return fis.available();
}
@Override
public int read() throws IOException {
abortIfNeeded();
return fis.read();
}
@Override
public long skip(long n) throws IOException {
abortIfNeeded();
return fis.skip(n);
}
@Override
public int read(byte[] arg0, int arg1, int arg2) throws IOException {
abortIfNeeded();
return fis.read(arg0, arg1, arg2);
}
/**
* Returns the underlying file, if known; or null if not;
*/
public File getFile() {
return file;
}
}
| apache-2.0 |
gustavoanatoly/hbase | hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureUtil.java | 10500 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.procedure2;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.Modifier;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ProcedureInfo;
import org.apache.hadoop.hbase.ProcedureState;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
import org.apache.hadoop.hbase.util.ForeignExceptionUtil;
import org.apache.hadoop.hbase.util.NonceKey;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
/**
* Helper to convert to/from ProcedureProtos
*/
@InterfaceAudience.Private
public final class ProcedureUtil {
private ProcedureUtil() { }
// ==========================================================================
// Reflection helpers to create/validate a Procedure object
// ==========================================================================
public static Procedure newProcedure(final String className) throws BadProcedureException {
try {
final Class<?> clazz = Class.forName(className);
if (!Modifier.isPublic(clazz.getModifiers())) {
throw new Exception("the " + clazz + " class is not public");
}
final Constructor<?> ctor = clazz.getConstructor();
assert ctor != null : "no constructor found";
if (!Modifier.isPublic(ctor.getModifiers())) {
throw new Exception("the " + clazz + " constructor is not public");
}
return (Procedure)ctor.newInstance();
} catch (Exception e) {
throw new BadProcedureException("The procedure class " + className +
" must be accessible and have an empty constructor", e);
}
}
public static void validateClass(final Procedure proc) throws BadProcedureException {
try {
final Class<?> clazz = proc.getClass();
if (!Modifier.isPublic(clazz.getModifiers())) {
throw new Exception("the " + clazz + " class is not public");
}
final Constructor<?> ctor = clazz.getConstructor();
assert ctor != null;
if (!Modifier.isPublic(ctor.getModifiers())) {
throw new Exception("the " + clazz + " constructor is not public");
}
} catch (Exception e) {
throw new BadProcedureException("The procedure class " + proc.getClass().getName() +
" must be accessible and have an empty constructor", e);
}
}
// ==========================================================================
// convert to and from Procedure object
// ==========================================================================
/**
* Helper to convert the procedure to protobuf.
* Used by ProcedureStore implementations.
*/
public static ProcedureProtos.Procedure convertToProtoProcedure(final Procedure proc)
throws IOException {
Preconditions.checkArgument(proc != null);
validateClass(proc);
final ProcedureProtos.Procedure.Builder builder = ProcedureProtos.Procedure.newBuilder()
.setClassName(proc.getClass().getName())
.setProcId(proc.getProcId())
.setState(proc.getState())
.setSubmittedTime(proc.getSubmittedTime())
.setLastUpdate(proc.getLastUpdate());
if (proc.hasParent()) {
builder.setParentId(proc.getParentProcId());
}
if (proc.hasTimeout()) {
builder.setTimeout(proc.getTimeout());
}
if (proc.hasOwner()) {
builder.setOwner(proc.getOwner());
}
final int[] stackIds = proc.getStackIndexes();
if (stackIds != null) {
for (int i = 0; i < stackIds.length; ++i) {
builder.addStackId(stackIds[i]);
}
}
if (proc.hasException()) {
RemoteProcedureException exception = proc.getException();
builder.setException(
RemoteProcedureException.toProto(exception.getSource(), exception.getCause()));
}
final byte[] result = proc.getResult();
if (result != null) {
builder.setResult(UnsafeByteOperations.unsafeWrap(result));
}
final ByteString.Output stateStream = ByteString.newOutput();
try {
proc.serializeStateData(stateStream);
if (stateStream.size() > 0) {
builder.setStateData(stateStream.toByteString());
}
} finally {
stateStream.close();
}
if (proc.getNonceKey() != null) {
builder.setNonceGroup(proc.getNonceKey().getNonceGroup());
builder.setNonce(proc.getNonceKey().getNonce());
}
return builder.build();
}
/**
* Helper to convert the protobuf procedure.
* Used by ProcedureStore implementations.
*
* TODO: OPTIMIZATION: some of the field never change during the execution
* (e.g. className, procId, parentId, ...).
* We can split in 'data' and 'state', and the store
* may take advantage of it by storing the data only on insert().
*/
public static Procedure convertToProcedure(final ProcedureProtos.Procedure proto) throws IOException {
// Procedure from class name
final Procedure proc = newProcedure(proto.getClassName());
// set fields
proc.setProcId(proto.getProcId());
proc.setState(proto.getState());
proc.setSubmittedTime(proto.getSubmittedTime());
proc.setLastUpdate(proto.getLastUpdate());
if (proto.hasParentId()) {
proc.setParentProcId(proto.getParentId());
}
if (proto.hasOwner()) {
proc.setOwner(proto.getOwner());
}
if (proto.hasTimeout()) {
proc.setTimeout(proto.getTimeout());
}
if (proto.getStackIdCount() > 0) {
proc.setStackIndexes(proto.getStackIdList());
}
if (proto.hasException()) {
assert proc.getState() == ProcedureProtos.ProcedureState.FAILED ||
proc.getState() == ProcedureProtos.ProcedureState.ROLLEDBACK :
"The procedure must be failed (waiting to rollback) or rolledback";
proc.setFailure(RemoteProcedureException.fromProto(proto.getException()));
}
if (proto.hasResult()) {
proc.setResult(proto.getResult().toByteArray());
}
if (proto.getNonce() != HConstants.NO_NONCE) {
proc.setNonceKey(new NonceKey(proto.getNonceGroup(), proto.getNonce()));
}
// we want to call deserialize even when the stream is empty, mainly for testing.
proc.deserializeStateData(proto.getStateData().newInput());
return proc;
}
// ==========================================================================
// convert to and from ProcedureInfo object
// ==========================================================================
/**
* @return Convert the current {@link ProcedureInfo} into a Protocol Buffers Procedure
* instance.
*/
public static ProcedureProtos.Procedure convertToProtoProcedure(final ProcedureInfo procInfo) {
final ProcedureProtos.Procedure.Builder builder = ProcedureProtos.Procedure.newBuilder();
builder.setClassName(procInfo.getProcName());
builder.setProcId(procInfo.getProcId());
builder.setSubmittedTime(procInfo.getSubmittedTime());
builder.setState(ProcedureProtos.ProcedureState.valueOf(procInfo.getProcState().name()));
builder.setLastUpdate(procInfo.getLastUpdate());
if (procInfo.hasParentId()) {
builder.setParentId(procInfo.getParentId());
}
if (procInfo.hasOwner()) {
builder.setOwner(procInfo.getProcOwner());
}
if (procInfo.isFailed()) {
builder.setException(ForeignExceptionUtil.toProtoForeignException(procInfo.getException()));
}
if (procInfo.hasResultData()) {
builder.setResult(UnsafeByteOperations.unsafeWrap(procInfo.getResult()));
}
return builder.build();
}
/**
* Helper to convert the protobuf object.
* @return Convert the current Protocol Buffers Procedure to {@link ProcedureInfo}
* instance.
*/
public static ProcedureInfo convertToProcedureInfo(final ProcedureProtos.Procedure procProto) {
NonceKey nonceKey = null;
if (procProto.getNonce() != HConstants.NO_NONCE) {
nonceKey = new NonceKey(procProto.getNonceGroup(), procProto.getNonce());
}
return new ProcedureInfo(procProto.getProcId(), procProto.getClassName(),
procProto.hasOwner() ? procProto.getOwner() : null,
convertToProcedureState(procProto.getState()),
procProto.hasParentId() ? procProto.getParentId() : -1, nonceKey,
procProto.hasException() ?
ForeignExceptionUtil.toIOException(procProto.getException()) : null,
procProto.getLastUpdate(), procProto.getSubmittedTime(),
procProto.hasResult() ? procProto.getResult().toByteArray() : null);
}
public static ProcedureState convertToProcedureState(ProcedureProtos.ProcedureState state) {
return ProcedureState.valueOf(state.name());
}
public static ProcedureInfo convertToProcedureInfo(final Procedure proc) {
return convertToProcedureInfo(proc, null);
}
/**
* Helper to create the ProcedureInfo from Procedure.
*/
public static ProcedureInfo convertToProcedureInfo(final Procedure proc,
final NonceKey nonceKey) {
final RemoteProcedureException exception = proc.hasException() ? proc.getException() : null;
return new ProcedureInfo(proc.getProcId(), proc.toStringClass(), proc.getOwner(),
convertToProcedureState(proc.getState()),
proc.hasParent() ? proc.getParentProcId() : -1, nonceKey,
exception != null ? exception.unwrapRemoteIOException() : null,
proc.getLastUpdate(), proc.getSubmittedTime(), proc.getResult());
}
} | apache-2.0 |
atary/DisCaCloud | src/org/cloudbus/cloudsim/Host.java | 15015 | /*
* Title: CloudSim Toolkit Description: CloudSim (Cloud Simulation) Toolkit for Modeling and
* Simulation of Clouds Licence: GPL - http://www.gnu.org/copyleft/gpl.html
*
* Copyright (c) 2009-2012, The University of Melbourne, Australia
*/
package org.cloudbus.cloudsim;
import java.util.ArrayList;
import java.util.List;
import org.cloudbus.cloudsim.core.CloudSim;
import org.cloudbus.cloudsim.lists.PeList;
import org.cloudbus.cloudsim.provisioners.BwProvisioner;
import org.cloudbus.cloudsim.provisioners.RamProvisioner;
/**
* Host executes actions related to management of virtual machines (e.g., creation and destruction).
* A host has a defined policy for provisioning memory and bw, as well as an allocation policy for
* Pe's to virtual machines. A host is associated to a datacenter. It can host virtual machines.
*
* @author Rodrigo N. Calheiros
* @author Anton Beloglazov
* @since CloudSim Toolkit 1.0
*/
public class Host {
/** The id. */
private int id;
/** The storage. */
private long storage;
/** The ram provisioner. */
private RamProvisioner ramProvisioner;
/** The bw provisioner. */
private BwProvisioner bwProvisioner;
/** The allocation policy. */
private VmScheduler vmScheduler;
/** The vm list. */
private final List<? extends Vm> vmList = new ArrayList<Vm>();
/** The pe list. */
private List<? extends Pe> peList;
/** Tells whether this machine is working properly or has failed. */
private boolean failed;
/** The vms migrating in. */
private final List<Vm> vmsMigratingIn = new ArrayList<Vm>();
/** The datacenter where the host is placed. */
private Datacenter datacenter;
/**
* Instantiates a new host.
*
* @param id the id
* @param ramProvisioner the ram provisioner
* @param bwProvisioner the bw provisioner
* @param storage the storage
* @param peList the pe list
* @param vmScheduler the vm scheduler
*/
public Host(
int id,
RamProvisioner ramProvisioner,
BwProvisioner bwProvisioner,
long storage,
List<? extends Pe> peList,
VmScheduler vmScheduler) {
setId(id);
setRamProvisioner(ramProvisioner);
setBwProvisioner(bwProvisioner);
setStorage(storage);
setVmScheduler(vmScheduler);
setPeList(peList);
setFailed(false);
}
/**
* Requests updating of processing of cloudlets in the VMs running in this host.
*
* @param currentTime the current time
* @return expected time of completion of the next cloudlet in all VMs in this host.
* Double.MAX_VALUE if there is no future events expected in this host
* @pre currentTime >= 0.0
* @post $none
*/
public double updateVmsProcessing(double currentTime) {
double smallerTime = Double.MAX_VALUE;
for (Vm vm : getVmList()) {
double time = vm.updateVmProcessing(currentTime, getVmScheduler().getAllocatedMipsForVm(vm));
if (time > 0.0 && time < smallerTime) {
smallerTime = time;
}
}
return smallerTime;
}
/**
* Adds the migrating in vm.
*
* @param vm the vm
*/
public void addMigratingInVm(Vm vm) {
vm.setInMigration(true);
if (!getVmsMigratingIn().contains(vm)) {
if (getStorage() < vm.getSize()) {
Log.printLine("[VmScheduler.addMigratingInVm] Allocation of VM #" + vm.getId() + " to Host #"
+ getId() + " failed by storage");
System.exit(0);
}
if (!getRamProvisioner().allocateRamForVm(vm, vm.getCurrentRequestedRam())) {
Log.printLine("[VmScheduler.addMigratingInVm] Allocation of VM #" + vm.getId() + " to Host #"
+ getId() + " failed by RAM");
System.exit(0);
}
if (!getBwProvisioner().allocateBwForVm(vm, vm.getCurrentRequestedBw())) {
Log.printLine("[VmScheduler.addMigratingInVm] Allocation of VM #" + vm.getId() + " to Host #"
+ getId() + " failed by BW");
System.exit(0);
}
getVmScheduler().getVmsMigratingIn().add(vm.getUid());
if (!getVmScheduler().allocatePesForVm(vm, vm.getCurrentRequestedMips())) {
Log.printLine("[VmScheduler.addMigratingInVm] Allocation of VM #" + vm.getId() + " to Host #"
+ getId() + " failed by MIPS");
System.exit(0);
}
setStorage(getStorage() - vm.getSize());
getVmsMigratingIn().add(vm);
getVmList().add(vm);
updateVmsProcessing(CloudSim.clock());
vm.getHost().updateVmsProcessing(CloudSim.clock());
}
}
/**
* Removes the migrating in vm.
*
* @param vm the vm
*/
public void removeMigratingInVm(Vm vm) {
vmDeallocate(vm);
getVmsMigratingIn().remove(vm);
getVmList().remove(vm);
getVmScheduler().getVmsMigratingIn().remove(vm.getUid());
vm.setInMigration(false);
}
/**
* Reallocate migrating in vms.
*/
public void reallocateMigratingInVms() {
for (Vm vm : getVmsMigratingIn()) {
if (!getVmList().contains(vm)) {
getVmList().add(vm);
}
if (!getVmScheduler().getVmsMigratingIn().contains(vm.getUid())) {
getVmScheduler().getVmsMigratingIn().add(vm.getUid());
}
getRamProvisioner().allocateRamForVm(vm, vm.getCurrentRequestedRam());
getBwProvisioner().allocateBwForVm(vm, vm.getCurrentRequestedBw());
getVmScheduler().allocatePesForVm(vm, vm.getCurrentRequestedMips());
setStorage(getStorage() - vm.getSize());
}
}
/**
* Checks if is suitable for vm.
*
* @param vm the vm
* @return true, if is suitable for vm
*/
public boolean isSuitableForVm(Vm vm) {
return (getVmScheduler().getPeCapacity() >= vm.getCurrentRequestedMaxMips()
&& getVmScheduler().getAvailableMips() >= vm.getCurrentRequestedTotalMips()
&& getRamProvisioner().isSuitableForVm(vm, vm.getCurrentRequestedRam()) && getBwProvisioner()
.isSuitableForVm(vm, vm.getCurrentRequestedBw()));
}
/**
* Allocates PEs and memory to a new VM in the Host.
*
* @param vm Vm being started
* @return $true if the VM could be started in the host; $false otherwise
* @pre $none
* @post $none
*/
public boolean vmCreate(Vm vm) {
if (getStorage() < vm.getSize()) {
Log.printLine("[VmScheduler.vmCreate] Allocation of VM #" + vm.getId() + " to Host #" + getId()
+ " failed by storage");
System.out.println("storage " + datacenter.getId());
return false;
}
if (!getRamProvisioner().allocateRamForVm(vm, vm.getCurrentRequestedRam())) {
Log.printLine("[VmScheduler.vmCreate] Allocation of VM #" + vm.getId() + " to Host #" + getId()
+ " failed by RAM");
System.out.println("Ram " + datacenter.getId());
return false;
}
if (!getBwProvisioner().allocateBwForVm(vm, vm.getCurrentRequestedBw())) {
Log.printLine("[VmScheduler.vmCreate] Allocation of VM #" + vm.getId() + " to Host #" + getId()
+ " failed by BW");
getRamProvisioner().deallocateRamForVm(vm);
System.out.println("bw " + datacenter.getId());
return false;
}
if (!getVmScheduler().allocatePesForVm(vm, vm.getCurrentRequestedMips())) {
Log.printLine("[VmScheduler.vmCreate] Allocation of VM #" + vm.getId() + " to Host #" + getId()
+ " failed by MIPS");
getRamProvisioner().deallocateRamForVm(vm);
getBwProvisioner().deallocateBwForVm(vm);
System.out.println("mips " + datacenter.getId());
return false;
}
setStorage(getStorage() - vm.getSize());
getVmList().add(vm);
vm.setHost(this);
return true;
}
/**
* Destroys a VM running in the host.
*
* @param vm the VM
* @pre $none
* @post $none
*/
public void vmDestroy(Vm vm) {
if (vm != null) {
vmDeallocate(vm);
getVmList().remove(vm);
vm.setHost(null);
}
}
/**
* Destroys all VMs running in the host.
*
* @pre $none
* @post $none
*/
public void vmDestroyAll() {
vmDeallocateAll();
for (Vm vm : getVmList()) {
vm.setHost(null);
setStorage(getStorage() + vm.getSize());
}
getVmList().clear();
}
/**
* Deallocate all hostList for the VM.
*
* @param vm the VM
*/
protected void vmDeallocate(Vm vm) {
getRamProvisioner().deallocateRamForVm(vm);
getBwProvisioner().deallocateBwForVm(vm);
getVmScheduler().deallocatePesForVm(vm);
setStorage(getStorage() + vm.getSize());
}
/**
* Deallocate all hostList for the VM.
*/
protected void vmDeallocateAll() {
getRamProvisioner().deallocateRamForAllVms();
getBwProvisioner().deallocateBwForAllVms();
getVmScheduler().deallocatePesForAllVms();
}
/**
* Returns a VM object.
*
* @param vmId the vm id
* @param userId ID of VM's owner
* @return the virtual machine object, $null if not found
* @pre $none
* @post $none
*/
public Vm getVm(int vmId, int userId) {
for (Vm vm : getVmList()) {
if (vm.getId() == vmId && vm.getUserId() == userId) {
return vm;
}
}
return null;
}
/**
* Gets the pes number.
*
* @return the pes number
*/
public int getNumberOfPes() {
return getPeList().size();
}
/**
* Gets the free pes number.
*
* @return the free pes number
*/
public int getNumberOfFreePes() {
return PeList.getNumberOfFreePes(getPeList());
}
/**
* Gets the total mips.
*
* @return the total mips
*/
public int getTotalMips() {
return PeList.getTotalMips(getPeList());
}
/**
* Allocates PEs for a VM.
*
* @param vm the vm
* @param mipsShare the mips share
* @return $true if this policy allows a new VM in the host, $false otherwise
* @pre $none
* @post $none
*/
public boolean allocatePesForVm(Vm vm, List<Double> mipsShare) {
return getVmScheduler().allocatePesForVm(vm, mipsShare);
}
/**
* Releases PEs allocated to a VM.
*
* @param vm the vm
* @pre $none
* @post $none
*/
public void deallocatePesForVm(Vm vm) {
getVmScheduler().deallocatePesForVm(vm);
}
/**
* Returns the MIPS share of each Pe that is allocated to a given VM.
*
* @param vm the vm
* @return an array containing the amount of MIPS of each pe that is available to the VM
* @pre $none
* @post $none
*/
public List<Double> getAllocatedMipsForVm(Vm vm) {
return getVmScheduler().getAllocatedMipsForVm(vm);
}
/**
* Gets the total allocated MIPS for a VM over all the PEs.
*
* @param vm the vm
* @return the allocated mips for vm
*/
public double getTotalAllocatedMipsForVm(Vm vm) {
return getVmScheduler().getTotalAllocatedMipsForVm(vm);
}
/**
* Returns maximum available MIPS among all the PEs.
*
* @return max mips
*/
public double getMaxAvailableMips() {
return getVmScheduler().getMaxAvailableMips();
}
/**
* Gets the free mips.
*
* @return the free mips
*/
public double getAvailableMips() {
return getVmScheduler().getAvailableMips();
}
/**
* Gets the machine bw.
*
* @return the machine bw
* @pre $none
* @post $result > 0
*/
public long getBw() {
return getBwProvisioner().getBw();
}
/**
* Gets the machine memory.
*
* @return the machine memory
* @pre $none
* @post $result > 0
*/
public int getRam() {
return getRamProvisioner().getRam();
}
/**
* Gets the machine storage.
*
* @return the machine storage
* @pre $none
* @post $result >= 0
*/
public long getStorage() {
return storage;
}
/**
* Gets the id.
*
* @return the id
*/
public int getId() {
return id;
}
/**
* Sets the id.
*
* @param id the new id
*/
protected void setId(int id) {
this.id = id;
}
/**
* Gets the ram provisioner.
*
* @return the ram provisioner
*/
public RamProvisioner getRamProvisioner() {
return ramProvisioner;
}
/**
* Sets the ram provisioner.
*
* @param ramProvisioner the new ram provisioner
*/
protected void setRamProvisioner(RamProvisioner ramProvisioner) {
this.ramProvisioner = ramProvisioner;
}
/**
* Gets the bw provisioner.
*
* @return the bw provisioner
*/
public BwProvisioner getBwProvisioner() {
return bwProvisioner;
}
/**
* Sets the bw provisioner.
*
* @param bwProvisioner the new bw provisioner
*/
protected void setBwProvisioner(BwProvisioner bwProvisioner) {
this.bwProvisioner = bwProvisioner;
}
/**
* Gets the VM scheduler.
*
* @return the VM scheduler
*/
public VmScheduler getVmScheduler() {
return vmScheduler;
}
/**
* Sets the VM scheduler.
*
* @param vmScheduler the vm scheduler
*/
protected void setVmScheduler(VmScheduler vmScheduler) {
this.vmScheduler = vmScheduler;
}
/**
* Gets the pe list.
*
* @param <T> the generic type
* @return the pe list
*/
@SuppressWarnings("unchecked")
public <T extends Pe> List<T> getPeList() {
return (List<T>) peList;
}
/**
* Sets the pe list.
*
* @param <T> the generic type
* @param peList the new pe list
*/
protected <T extends Pe> void setPeList(List<T> peList) {
this.peList = peList;
}
/**
* Gets the vm list.
*
* @param <T> the generic type
* @return the vm list
*/
@SuppressWarnings("unchecked")
public <T extends Vm> List<T> getVmList() {
return (List<T>) vmList;
}
/**
* Sets the storage.
*
* @param storage the new storage
*/
protected void setStorage(long storage) {
this.storage = storage;
}
/**
* Checks if is failed.
*
* @return true, if is failed
*/
public boolean isFailed() {
return failed;
}
/**
* Sets the PEs of this machine to a FAILED status. NOTE: <tt>resName</tt> is used for debugging
* purposes, which is <b>ON</b> by default. Use {@link #setFailed(boolean)} if you do not want
* this information.
*
* @param resName the name of the resource
* @param failed the failed
* @return <tt>true</tt> if successful, <tt>false</tt> otherwise
*/
public boolean setFailed(String resName, boolean failed) {
// all the PEs are failed (or recovered, depending on fail)
this.failed = failed;
PeList.setStatusFailed(getPeList(), resName, getId(), failed);
return true;
}
/**
* Sets the PEs of this machine to a FAILED status.
*
* @param failed the failed
* @return <tt>true</tt> if successful, <tt>false</tt> otherwise
*/
public boolean setFailed(boolean failed) {
// all the PEs are failed (or recovered, depending on fail)
this.failed = failed;
PeList.setStatusFailed(getPeList(), failed);
return true;
}
/**
* Sets the particular Pe status on this Machine.
*
* @param peId the pe id
* @param status Pe status, either <tt>Pe.FREE</tt> or <tt>Pe.BUSY</tt>
* @return <tt>true</tt> if the Pe status has changed, <tt>false</tt> otherwise (Pe id might not
* be exist)
* @pre peID >= 0
* @post $none
*/
public boolean setPeStatus(int peId, int status) {
return PeList.setPeStatus(getPeList(), peId, status);
}
/**
* Gets the vms migrating in.
*
* @return the vms migrating in
*/
public List<Vm> getVmsMigratingIn() {
return vmsMigratingIn;
}
/**
* Gets the data center.
*
* @return the data center where the host runs
*/
public Datacenter getDatacenter() {
return datacenter;
}
/**
* Sets the data center.
*
* @param datacenter the data center from this host
*/
public void setDatacenter(Datacenter datacenter) {
this.datacenter = datacenter;
}
}
| apache-2.0 |
balazssimon/meta-java | src/metadslx.core/src/generated/java/metadslx/core/MetaPropertyKind.java | 170 | package metadslx.core;
public enum MetaPropertyKind {
Normal,
Readonly,
Lazy,
Derived,
Containment,
Synthetized,
Inherited;
}
| apache-2.0 |
liucloo/ijkPlayer-Danmaku | DanmakuFlameMaster/src/main/java/master/flame/danmaku/controller/DrawTask.java | 12784 | /*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package master.flame.danmaku.controller;
import android.graphics.Canvas;
import master.flame.danmaku.danmaku.model.AbsDisplayer;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.DanmakuTimer;
import master.flame.danmaku.danmaku.model.IDanmakuIterator;
import master.flame.danmaku.danmaku.model.IDanmakus;
import master.flame.danmaku.danmaku.model.android.DanmakuContext;
import master.flame.danmaku.danmaku.model.android.DanmakuContext.ConfigChangedCallback;
import master.flame.danmaku.danmaku.model.android.DanmakuContext.DanmakuConfigTag;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.renderer.IRenderer;
import master.flame.danmaku.danmaku.renderer.IRenderer.RenderingState;
import master.flame.danmaku.danmaku.renderer.android.DanmakuRenderer;
public class DrawTask implements IDrawTask {
protected final DanmakuContext mContext;
protected final AbsDisplayer mDisp;
protected IDanmakus danmakuList;
protected BaseDanmakuParser mParser;
TaskListener mTaskListener;
IRenderer mRenderer;
DanmakuTimer mTimer;
private IDanmakus danmakus = new Danmakus(Danmakus.ST_BY_LIST);
protected boolean clearRetainerFlag;
private long mStartRenderTime = 0;
private RenderingState mRenderingState = new RenderingState();
protected boolean mReadyState;
private long mLastBeginMills;
private long mLastEndMills;
private boolean mIsHidden;
private BaseDanmaku mLastDanmaku;
private ConfigChangedCallback mConfigChangedCallback = new ConfigChangedCallback() {
@Override
public boolean onDanmakuConfigChanged(DanmakuContext config, DanmakuConfigTag tag, Object... values) {
return DrawTask.this.onDanmakuConfigChanged(config, tag, values);
}
};
public DrawTask(DanmakuTimer timer, DanmakuContext context,
TaskListener taskListener) {
if (context == null) {
throw new IllegalArgumentException("context is null");
}
mContext = context;
mDisp = context.getDisplayer();
mTaskListener = taskListener;
mRenderer = new DanmakuRenderer(context);
mRenderer.setVerifierEnabled(mContext.isPreventOverlappingEnabled() || mContext.isMaxLinesLimited());
initTimer(timer);
Boolean enable = mContext.isDuplicateMergingEnabled();
if (enable != null) {
if(enable) {
mContext.mDanmakuFilters.registerFilter(DanmakuFilters.TAG_DUPLICATE_FILTER);
} else {
mContext.mDanmakuFilters.unregisterFilter(DanmakuFilters.TAG_DUPLICATE_FILTER);
}
}
}
protected void initTimer(DanmakuTimer timer) {
mTimer = timer;
}
@Override
public synchronized void addDanmaku(BaseDanmaku item) {
if (danmakuList == null)
return;
boolean added = false;
if (item.isLive) {
removeUnusedLiveDanmakusIn(10);
}
item.index = danmakuList.size();
if (mLastBeginMills <= item.time && item.time <= mLastEndMills) {
synchronized (danmakus) {
added = danmakus.addItem(item);
}
} else if (item.isLive) {
mLastBeginMills = mLastEndMills = 0;
}
synchronized (danmakuList) {
added = danmakuList.addItem(item);
}
if (added && mTaskListener != null) {
mTaskListener.onDanmakuAdd(item);
}
if (mLastDanmaku == null || (item != null && mLastDanmaku != null && item.time > mLastDanmaku.time)) {
mLastDanmaku = item;
}
}
@Override
public synchronized void removeAllDanmakus() {
if (danmakuList == null || danmakuList.isEmpty())
return;
danmakuList.clear();
}
protected void onDanmakuRemoved(BaseDanmaku danmaku) {
// TODO call callback here
}
@Override
public synchronized void removeAllLiveDanmakus() {
if (danmakus == null || danmakus.isEmpty())
return;
synchronized (danmakus) {
IDanmakuIterator it = danmakus.iterator();
while (it.hasNext()) {
BaseDanmaku danmaku = it.next();
if (danmaku.isLive) {
it.remove();
onDanmakuRemoved(danmaku);
}
}
}
}
protected synchronized void removeUnusedLiveDanmakusIn(int msec) {
if (danmakuList == null || danmakuList.isEmpty())
return;
long startTime = System.currentTimeMillis();
IDanmakuIterator it = danmakuList.iterator();
while (it.hasNext()) {
BaseDanmaku danmaku = it.next();
boolean isTimeout = danmaku.isTimeOut();
if (isTimeout && danmaku.isLive) {
it.remove();
onDanmakuRemoved(danmaku);
}
if (!isTimeout || System.currentTimeMillis() - startTime > msec) {
break;
}
}
}
@Override
public IDanmakus getVisibleDanmakusOnTime(long time) {
long beginMills = time - mContext.mDanmakuFactory.MAX_DANMAKU_DURATION - 100;
long endMills = time + mContext.mDanmakuFactory.MAX_DANMAKU_DURATION;
IDanmakus subDanmakus = danmakuList.sub(beginMills, endMills);
IDanmakus visibleDanmakus = new Danmakus();
if (null != subDanmakus && !subDanmakus.isEmpty()) {
IDanmakuIterator iterator = subDanmakus.iterator();
while (iterator.hasNext()) {
BaseDanmaku danmaku = iterator.next();
if (danmaku.isShown() && !danmaku.isOutside()) {
visibleDanmakus.addItem(danmaku);
}
}
}
return visibleDanmakus;
}
@Override
public synchronized RenderingState draw(AbsDisplayer displayer) {
return drawDanmakus(displayer,mTimer);
}
@Override
public void reset() {
if (danmakus != null)
danmakus.clear();
if (mRenderer != null)
mRenderer.clear();
}
@Override
public void seek(long mills) {
reset();
// requestClear();
mContext.mGlobalFlagValues.updateVisibleFlag();
mStartRenderTime = mills < 1000 ? 0 : mills;
}
@Override
public void clearDanmakusOnScreen(long currMillis) {
reset();
mContext.mGlobalFlagValues.updateVisibleFlag();
mStartRenderTime = currMillis;
}
@Override
public void start() {
mContext.registerConfigChangedCallback(mConfigChangedCallback);
}
@Override
public void quit() {
mContext.unregisterAllConfigChangedCallbacks();
if (mRenderer != null)
mRenderer.release();
}
public void prepare() {
assert (mParser != null);
loadDanmakus(mParser);
if (mTaskListener != null) {
mTaskListener.ready();
mReadyState = true;
}
}
protected void loadDanmakus(BaseDanmakuParser parser) {
danmakuList = parser.setConfig(mContext).setDisplayer(mDisp).setTimer(mTimer).getDanmakus();
if (danmakuList != null && !danmakuList.isEmpty()) {
if (danmakuList.first().flags == null) {
IDanmakuIterator it = danmakuList.iterator();
while (it.hasNext()) {
BaseDanmaku item = it.next();
if (item != null) {
item.flags = mContext.mGlobalFlagValues;
}
}
}
}
mContext.mGlobalFlagValues.resetAll();
if(danmakuList != null) {
mLastDanmaku = danmakuList.last();
}
}
public void setParser(BaseDanmakuParser parser) {
mParser = parser;
mReadyState = false;
}
protected RenderingState drawDanmakus(AbsDisplayer disp, DanmakuTimer timer) {
if (clearRetainerFlag) {
mRenderer.clearRetainer();
clearRetainerFlag = false;
}
if (danmakuList != null) {
Canvas canvas = (Canvas) disp.getExtraData();
DrawHelper.clearCanvas(canvas);
if (mIsHidden) {
return mRenderingState;
}
long beginMills = timer.currMillisecond - mContext.mDanmakuFactory.MAX_DANMAKU_DURATION - 100;
long endMills = timer.currMillisecond + mContext.mDanmakuFactory.MAX_DANMAKU_DURATION;
if(mLastBeginMills > beginMills || timer.currMillisecond > mLastEndMills) {
IDanmakus subDanmakus = danmakuList.sub(beginMills, endMills);
if(subDanmakus != null) {
danmakus = subDanmakus;
} else {
danmakus.clear();
}
mLastBeginMills = beginMills;
mLastEndMills = endMills;
} else {
beginMills = mLastBeginMills;
endMills = mLastEndMills;
}
if (danmakus != null && !danmakus.isEmpty()) {
RenderingState renderingState = mRenderingState = mRenderer.draw(mDisp, danmakus, mStartRenderTime);
if (renderingState.nothingRendered) {
if(mTaskListener != null && mLastDanmaku != null && mLastDanmaku.isTimeOut()) {
mTaskListener.onDanmakusDrawingFinished();
}
if (renderingState.beginTime == RenderingState.UNKNOWN_TIME) {
renderingState.beginTime = beginMills;
}
if (renderingState.endTime == RenderingState.UNKNOWN_TIME) {
renderingState.endTime = endMills;
}
}
return renderingState;
} else {
mRenderingState.nothingRendered = true;
mRenderingState.beginTime = beginMills;
mRenderingState.endTime = endMills;
return mRenderingState;
}
}
return null;
}
public void requestClear() {
mLastBeginMills = mLastEndMills = 0;
mIsHidden = false;
}
public void requestClearRetainer() {
clearRetainerFlag = true;
}
public boolean onDanmakuConfigChanged(DanmakuContext config, DanmakuConfigTag tag,
Object... values) {
boolean handled = handleOnDanmakuConfigChanged(config, tag, values);
if (mTaskListener != null) {
mTaskListener.onDanmakuConfigChanged();
}
return handled;
}
protected boolean handleOnDanmakuConfigChanged(DanmakuContext config, DanmakuConfigTag tag, Object[] values) {
boolean handled = false;
if (tag == null || DanmakuConfigTag.MAXIMUM_NUMS_IN_SCREEN.equals(tag)) {
handled = true;
} else if (DanmakuConfigTag.DUPLICATE_MERGING_ENABLED.equals(tag)) {
Boolean enable = (Boolean) values[0];
if (enable != null) {
if (enable) {
mContext.mDanmakuFilters.registerFilter(DanmakuFilters.TAG_DUPLICATE_FILTER);
} else {
mContext.mDanmakuFilters.unregisterFilter(DanmakuFilters.TAG_DUPLICATE_FILTER);
}
handled = true;
}
} else if (DanmakuConfigTag.SCALE_TEXTSIZE.equals(tag) || DanmakuConfigTag.SCROLL_SPEED_FACTOR.equals(tag)) {
requestClearRetainer();
handled = false;
} else if (DanmakuConfigTag.MAXIMUN_LINES.equals(tag) || DanmakuConfigTag.OVERLAPPING_ENABLE.equals(tag)) {
if (mRenderer != null) {
mRenderer.setVerifierEnabled(mContext.isPreventOverlappingEnabled() || mContext.isMaxLinesLimited());
}
handled = true;
}
return handled;
}
@Override
public void requestHide() {
mIsHidden = true;
}
}
| apache-2.0 |
piyush-malaviya/ClockView | clockviewlib/src/androidTest/java/com/pcm/clockviewlib/ExampleInstrumentedTest.java | 749 | package com.pcm.clockviewlib;
import android.content.Context;
import android.support.test.InstrumentationRegistry;
import android.support.test.runner.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumentation test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() throws Exception {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getTargetContext();
assertEquals("com.pcm.clockviewlib.test", appContext.getPackageName());
}
}
| apache-2.0 |
aws4j/dynamo-mapper | src/main/java/org/aws4j/data/dynamo/annotation/PartialKeyProperty.java | 356 | package org.aws4j.data.dynamo.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention( RetentionPolicy.RUNTIME )
@Target( {ElementType.FIELD, ElementType.METHOD} )
public @interface PartialKeyProperty {
public String by();
} | apache-2.0 |
Busy-Brain/Portal | src/main/java/com/mk/portal/framework/page/html/tags/LiTag.java | 4365 | package com.mk.portal.framework.page.html.tags;
import com.mk.portal.framework.html.objects.Tag;
import com.mk.portal.framework.page.html.attributes.AccesskeyAttribute;
import com.mk.portal.framework.page.html.attributes.ClassAttribute;
import com.mk.portal.framework.page.html.attributes.ContenteditableAttribute;
import com.mk.portal.framework.page.html.attributes.ContextmenuAttribute;
import com.mk.portal.framework.page.html.attributes.DirAttribute;
import com.mk.portal.framework.page.html.attributes.DraggableAttribute;
import com.mk.portal.framework.page.html.attributes.DropzoneAttribute;
import com.mk.portal.framework.page.html.attributes.HiddenAttribute;
import com.mk.portal.framework.page.html.attributes.IdAttribute;
import com.mk.portal.framework.page.html.attributes.LangAttribute;
import com.mk.portal.framework.page.html.attributes.SpellcheckAttribute;
import com.mk.portal.framework.page.html.attributes.StyleAttribute;
import com.mk.portal.framework.page.html.attributes.TabindexAttribute;
import com.mk.portal.framework.page.html.attributes.TitleAttribute;
import com.mk.portal.framework.page.html.attributes.TranslateAttribute;
public class LiTag extends Tag {
private AccesskeyAttribute accesskey;
private ClassAttribute classAttribute;
private ContenteditableAttribute contenteditable;
private ContextmenuAttribute contextmenu;
private DirAttribute dir;
private DraggableAttribute draggable;
private DropzoneAttribute dropzone;
private HiddenAttribute hidden;
private IdAttribute id;
private LangAttribute lang;
private SpellcheckAttribute spellcheck;
private StyleAttribute style;
private TabindexAttribute tabindex;
private TitleAttribute title;
private TranslateAttribute translate;
@Override
public boolean hasEndTag() {
return true;
}
@Override
public String getTagName() {
return "li";
}
public AccesskeyAttribute getAccesskeyAttribute() {
return this.accesskey;
}
public void setAccesskeyAttribute(AccesskeyAttribute accesskey) {
this.accesskey=accesskey;
}
public ClassAttribute getClassAttribute() {
return this.classAttribute;
}
public void setClassAttribute(ClassAttribute classAttribute) {
this.classAttribute=classAttribute;
}
public ContenteditableAttribute getContenteditableAttribute() {
return this.contenteditable;
}
public void setContenteditableAttribute(ContenteditableAttribute contenteditable) {
this.contenteditable=contenteditable;
}
public ContextmenuAttribute getContextmenuAttribute() {
return this.contextmenu;
}
public void setContextmenuAttribute(ContextmenuAttribute contextmenu) {
this.contextmenu=contextmenu;
}
public DirAttribute getDirAttribute() {
return this.dir;
}
public void setDirAttribute(DirAttribute dir) {
this.dir=dir;
}
public DraggableAttribute getDraggableAttribute() {
return this.draggable;
}
public void setDraggableAttribute(DraggableAttribute draggable) {
this.draggable=draggable;
}
public DropzoneAttribute getDropzoneAttribute() {
return this.dropzone;
}
public void setDropzoneAttribute(DropzoneAttribute dropzone) {
this.dropzone=dropzone;
}
public HiddenAttribute getHiddenAttribute() {
return this.hidden;
}
public void setHiddenAttribute(HiddenAttribute hidden) {
this.hidden=hidden;
}
public IdAttribute getIdAttribute() {
return this.id;
}
public void setIdAttribute(IdAttribute id) {
this.id=id;
}
public LangAttribute getLangAttribute() {
return this.lang;
}
public void setLangAttribute(LangAttribute lang) {
this.lang=lang;
}
public SpellcheckAttribute getSpellcheckAttribute() {
return this.spellcheck;
}
public void setSpellcheckAttribute(SpellcheckAttribute spellcheck) {
this.spellcheck=spellcheck;
}
public StyleAttribute getStyleAttribute() {
return this.style;
}
public void setStyleAttribute(StyleAttribute style) {
this.style=style;
}
public TabindexAttribute getTabindexAttribute() {
return this.tabindex;
}
public void setTabindexAttribute(TabindexAttribute tabindex) {
this.tabindex=tabindex;
}
public TitleAttribute getTitleAttribute() {
return this.title;
}
public void setTitleAttribute(TitleAttribute title) {
this.title=title;
}
public TranslateAttribute getTranslateAttribute() {
return this.translate;
}
public void setTranslateAttribute(TranslateAttribute translate) {
this.translate=translate;
}
}
| apache-2.0 |
paplorinc/intellij-community | plugins/tasks/tasks-core/src/com/intellij/tasks/actions/OpenTaskDialog.java | 6877 | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.tasks.actions;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.options.binding.BindControl;
import com.intellij.openapi.options.binding.ControlBinder;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.ui.ValidationInfo;
import com.intellij.tasks.*;
import com.intellij.tasks.impl.LocalTaskImpl;
import com.intellij.tasks.impl.TaskManagerImpl;
import com.intellij.tasks.impl.TaskStateCombo;
import com.intellij.tasks.impl.TaskUtil;
import com.intellij.tasks.ui.TaskDialogPanel;
import com.intellij.tasks.ui.TaskDialogPanelProvider;
import com.intellij.ui.DocumentAdapter;
import com.intellij.ui.components.JBCheckBox;
import com.intellij.ui.components.JBTextField;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.Collection;
import java.util.List;
/**
* @author Dmitry Avdeev
*/
public class OpenTaskDialog extends DialogWrapper {
private final static Logger LOG = Logger.getInstance("#com.intellij.tasks.actions.SimpleOpenTaskDialog");
private static final String UPDATE_STATE_ENABLED = "tasks.open.task.update.state.enabled";
private JPanel myPanel;
@BindControl(value = "clearContext", instant = true)
private JCheckBox myClearContext;
private JBCheckBox myUpdateState;
private TaskStateCombo myTaskStateCombo;
private JPanel myAdditionalPanel;
private JBTextField myNameField;
private final Project myProject;
private final LocalTaskImpl myTask;
private final List<TaskDialogPanel> myPanels;
public OpenTaskDialog(@NotNull final Project project, @NotNull final Task task) {
super(project, false);
myProject = project;
myTask = new LocalTaskImpl(task);
myTaskStateCombo.setProject(myProject);
myTaskStateCombo.setTask(myTask);
setTitle("Open Task");
myNameField.setText(TaskUtil.getTrimmedSummary(task));
myNameField.setEnabled(!task.isIssue());
TaskManagerImpl taskManager = (TaskManagerImpl)TaskManager.getManager(myProject);
ControlBinder binder = new ControlBinder(taskManager.getState());
binder.bindAnnotations(this);
binder.reset();
if (!TaskStateCombo.stateUpdatesSupportedFor(task)) {
myUpdateState.setVisible(false);
myTaskStateCombo.setVisible(false);
}
final boolean stateUpdatesEnabled = PropertiesComponent.getInstance(project).getBoolean(UPDATE_STATE_ENABLED, false);
myUpdateState.setSelected(stateUpdatesEnabled);
myUpdateState.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
final boolean selected = myUpdateState.isSelected();
PropertiesComponent.getInstance(project).setValue(UPDATE_STATE_ENABLED, String.valueOf(selected));
updateFields();
if (selected) {
myTaskStateCombo.scheduleUpdateOnce();
}
}
});
TaskManagerImpl.Config state = taskManager.getState();
myClearContext.setSelected(state.clearContext);
updateFields();
if (myUpdateState.isSelected()) {
myTaskStateCombo.scheduleUpdateOnce();
}
myAdditionalPanel.setLayout(new BoxLayout(myAdditionalPanel, BoxLayout.Y_AXIS));
myPanels = TaskDialogPanelProvider.getOpenTaskPanels(project, myTask);
for (TaskDialogPanel panel : myPanels) {
myAdditionalPanel.add(panel.getPanel());
}
myNameField.getDocument().addDocumentListener(new DocumentAdapter() {
@Override
protected void textChanged(@NotNull DocumentEvent e) {
LocalTaskImpl oldTask = new LocalTaskImpl(myTask);
myTask.setSummary(myNameField.getText());
for (TaskDialogPanel panel : myPanels) {
panel.taskNameChanged(oldTask, myTask);
}
}
});
init();
}
private void updateFields() {
myTaskStateCombo.setEnabled(myUpdateState.isSelected());
}
@Override
protected void doOKAction() {
createTask();
super.doOKAction();
}
public void createTask() {
final TaskManagerImpl taskManager = (TaskManagerImpl)TaskManager.getManager(myProject);
if (myUpdateState.isSelected()) {
final CustomTaskState taskState = myTaskStateCombo.getSelectedState();
final TaskRepository repository = myTask.getRepository();
if (repository != null && taskState != null) {
try {
repository.setTaskState(myTask, taskState);
repository.setPreferredOpenTaskState(taskState);
}
catch (Exception ex) {
Messages.showErrorDialog(myProject, ex.getMessage(), "Cannot Set State For Issue");
LOG.warn(ex);
}
}
}
for (TaskDialogPanel panel : myPanels) {
panel.commit();
}
taskManager.activateTask(myTask, isClearContext(), true);
if (myTask.getType() == TaskType.EXCEPTION && AnalyzeTaskStacktraceAction.hasTexts(myTask)) {
AnalyzeTaskStacktraceAction.analyzeStacktrace(myTask, myProject);
}
}
private boolean isClearContext() {
return myClearContext.isSelected();
}
@Override
@NonNls
protected String getDimensionServiceKey() {
return "SimpleOpenTaskDialog";
}
@Override
public JComponent getPreferredFocusedComponent() {
for (TaskDialogPanel panel : myPanels) {
final JComponent component = panel.getPreferredFocusedComponent();
if (component != null) {
return component;
}
}
if (myNameField.getText().trim().isEmpty()) {
return myNameField;
}
if (myTaskStateCombo.isVisible() && myTaskStateCombo.isEnabled()){
return myTaskStateCombo.getComboBox();
}
return null;
}
@Nullable
@Override
protected ValidationInfo doValidate() {
String taskName = myNameField.getText().trim();
if (taskName.isEmpty()) {
return new ValidationInfo("Task name should not be empty", myNameField);
}
for (TaskDialogPanel panel : myPanels) {
ValidationInfo validate = panel.validate();
if (validate != null) return validate;
}
return null;
}
@Override
protected JComponent createCenterPanel() {
return myPanel;
}
private void createUIComponents() {
myTaskStateCombo = new TaskStateCombo() {
@Nullable
@Override
protected CustomTaskState getPreferredState(@NotNull TaskRepository repository, @NotNull Collection<CustomTaskState> available) {
return repository.getPreferredOpenTaskState();
}
};
}
}
| apache-2.0 |
rundeck/rundeck | rundeck-authz/rundeck-authz-core/src/main/java/org/rundeck/core/auth/access/UnauthorizedAccess.java | 638 | package org.rundeck.core.auth.access;
import lombok.Getter;
/**
* Authorization check failed
*/
@Getter
public class UnauthorizedAccess
extends Exception
{
public UnauthorizedAccess(final String action, final String type, final String name) {
super(String.format("Unauthorized for %s access to %s %s", action, type, name));
this.action = action;
this.type = type;
this.name = name;
}
/**
* Action
*/
private final String action;
/**
* Resource type
*/
private final String type;
/**
* Resource name
*/
private final String name;
}
| apache-2.0 |
lmjacksoniii/hazelcast | hazelcast/src/main/java/com/hazelcast/security/permission/CachePermission.java | 1960 | /*
* Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.security.permission;
public class CachePermission extends InstancePermission {
private static final int PUT = 4;
private static final int REMOVE = 8;
private static final int READ = 16;
private static final int LISTEN = 32;
private static final int ALL = CREATE | DESTROY | PUT | REMOVE | READ | LISTEN;
public CachePermission(String name, String... actions) {
super(name, actions);
}
@Override
protected int initMask(String[] actions) {
int mask = NONE;
for (String action : actions) {
if (ActionConstants.ACTION_ALL.equals(action)) {
return ALL;
}
if (ActionConstants.ACTION_CREATE.equals(action)) {
mask |= CREATE;
} else if (ActionConstants.ACTION_DESTROY.equals(action)) {
mask |= DESTROY;
} else if (ActionConstants.ACTION_PUT.equals(action)) {
mask |= PUT;
} else if (ActionConstants.ACTION_REMOVE.equals(action)) {
mask |= REMOVE;
} else if (ActionConstants.ACTION_READ.equals(action)) {
mask |= READ;
} else if (ActionConstants.ACTION_LISTEN.equals(action)) {
mask |= LISTEN;
}
}
return mask;
}
}
| apache-2.0 |
jmimo/netty-icap | src/main/java/ch/mimo/netty/example/icap/preview/IcapClientChannelPipeline.java | 1485 | /*******************************************************************************
* Copyright 2012 Michael Mimo Moratti
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package ch.mimo.netty.example.icap.preview;
import static org.jboss.netty.channel.Channels.pipeline;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.channel.ChannelPipelineFactory;
import ch.mimo.netty.handler.codec.icap.IcapRequestEncoder;
import ch.mimo.netty.handler.codec.icap.IcapResponseDecoder;
public class IcapClientChannelPipeline implements ChannelPipelineFactory {
@Override
public ChannelPipeline getPipeline() throws Exception {
ChannelPipeline pipeline = pipeline();
pipeline.addLast("encoder",new IcapRequestEncoder());
pipeline.addLast("decoder",new IcapResponseDecoder());
pipeline.addLast("handler",new IcapClientHandler());
return pipeline;
}
}
| apache-2.0 |
vivdaxiang/DaXiangLibrary | src/com/daxiang/android/http/ssl/bothway/SSLClient.java | 2877 | package com.daxiang.android.http.ssl.bothway;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.net.UnknownHostException;
import javax.net.ssl.SSLSocket;
import javax.net.ssl.SSLSocketFactory;
/**
* 双向验证的SSL客户端;
*
* @author daxiang
* @date 2016年7月18日
* @time 下午6:06:02
*/
public class SSLClient {
private String userName = "大象";
private String password = "222222";
private SSLSocket clientSocket;
public SSLClient() {
// 清除以前的Java系统环境变量的值;
System.clearProperty("javax.net.ssl.keyStore");
System.clearProperty("javax.net.ssl.keyStorePassword");
System.clearProperty("javax.net.ssl.keyStoreType");
System.clearProperty("javax.net.ssl.trustStore");
System.clearProperty("javax.net.ssl.trustStorePassword");
System.clearProperty("javax.net.ssl.trustStoreType");
// 设置Java系统的环境变量的值;
System.setProperty("javax.net.ssl.keyStore", "e:\\ssl\\client.keystore");
System.setProperty("javax.net.ssl.keyStorePassword", "vivdaxiang");
// 本示例的数字证书库的类型为JKS,如果为PKCS#12,则设置为PKCS12;
System.setProperty("javax.net.ssl.keyStoreType", "JKS");
System.setProperty("javax.net.ssl.trustStore", "e:\\ssl\\clienttrust.keystore");
System.setProperty("javax.net.ssl.trustStorePassword", "vivdaxiang");
System.setProperty("javax.net.ssl.trustStoreType", "JKS");
try {
clientSocket = (SSLSocket) SSLSocketFactory.getDefault().createSocket("localhost", SSLServer.serverPort);
} catch (UnknownHostException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
public void requestServer() {
try {
PrintWriter bw = new PrintWriter(new OutputStreamWriter(clientSocket.getOutputStream()));
bw.println(userName);
bw.println(password);
bw.flush();
BufferedReader br = new BufferedReader(new InputStreamReader(clientSocket.getInputStream()));
String temp;
String message = "服务器返回:";
while ((temp = br.readLine()) != null) {
message = message + temp;
}
System.out.println("---这是客户端输出的信息----");
System.out.println(message);
clientSocket.close();
bw.close();
br.close();
} catch (IOException e) {
e.printStackTrace();
}
}
// 运行之前,先要使用Java的keytool工具生成客户端的数字证书库文件(client.keystore)和已信任的数字证书库文件(clienttrust.keystore),
// 并把服务端的数字证书导入到客户端的已信任数字证书库文件中
public static void main(String[] args) {
SSLClient sslClient = new SSLClient();
sslClient.requestServer();
}
}
| apache-2.0 |
wapalxj/Android_C2_UI | C2_UI/c5_15_webview/src/main/java/com/ui/vero1/c5_15_webview/MainActivity.java | 1983 | package com.ui.vero1.c5_15_webview;
import android.content.Intent;
import android.net.Uri;
import android.os.Build;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.webkit.WebChromeClient;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
public class MainActivity extends AppCompatActivity {
private WebView webView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// Uri uri=Uri.parse("http://www.baidu.com");
// Intent i=new Intent(Intent.ACTION_VIEW,uri);
// startActivity(i);
webView=(WebView)findViewById(R.id.webview);
webView.getSettings().setJavaScriptEnabled(true);
webView.getSettings().setDomStorageEnabled(true);
webView.loadUrl("www.baidu.com");
// webView.setWebViewClient(new WebViewClient() {
// @Override
// public boolean shouldOverrideUrlLoading(WebView view, String url) {
// //在WebView中打开
// if (Uri.parse(url).getHost().equals("www.baidu.com")){
// return false;
// //在系统浏览器中打开
// }else {
// Intent i=new Intent(Intent.ACTION_VIEW,Uri.parse(url));
// startActivity(i);
// return true;
// }
// }
// });
}
//按back键回到前一个页面,而不是退出Activity
// @Override
// public boolean onKeyDown(int keyCode, KeyEvent event) {
// if (keyCode==KeyEvent.KEYCODE_BACK&&webView.canGoBack()){
// webView.goBack();
// return true;
// }
// return false;
// }
}
| apache-2.0 |
marcusbb/bag-o-util | src/main/java/provision/services/propagation/RequestIDTypeGenerator.java | 373 | package provision.services.propagation;
import provision.services.logging.Logger;
public class RequestIDTypeGenerator implements IDGenerator {
public String generateID() {
try {
return new RequestID().toString();
} catch (Exception e) {
Logger.error(RequestIDTypeGenerator.class.getName(), "generateID", null,e.getMessage(),e);
}
return null;
}
}
| apache-2.0 |
shakamunyi/beam | sdks/java/core/src/main/java/org/apache/beam/sdk/runners/inprocess/InProcessBundleOutputManager.java | 1996 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.runners.inprocess;
import org.apache.beam.sdk.runners.inprocess.InProcessPipelineRunner.CommittedBundle;
import org.apache.beam.sdk.runners.inprocess.InProcessPipelineRunner.UncommittedBundle;
import org.apache.beam.sdk.util.DoFnRunners.OutputManager;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.values.TupleTag;
import java.util.Map;
/**
* An {@link OutputManager} that outputs to {@link CommittedBundle Bundles} used by the
* {@link InProcessPipelineRunner}.
*/
public class InProcessBundleOutputManager implements OutputManager {
private final Map<TupleTag<?>, UncommittedBundle<?>> bundles;
public static InProcessBundleOutputManager create(
Map<TupleTag<?>, UncommittedBundle<?>> outputBundles) {
return new InProcessBundleOutputManager(outputBundles);
}
public InProcessBundleOutputManager(Map<TupleTag<?>, UncommittedBundle<?>> bundles) {
this.bundles = bundles;
}
@SuppressWarnings("unchecked")
@Override
public <T> void output(TupleTag<T> tag, WindowedValue<T> output) {
@SuppressWarnings("rawtypes")
UncommittedBundle bundle = bundles.get(tag);
bundle.add(output);
}
}
| apache-2.0 |
jaadds/carbon-apimgt | components/apimgt/org.wso2.carbon.apimgt.rest.api.publisher.v1.common/src/gen/java/org/wso2/carbon/apimgt/rest/api/publisher/v1/dto/APIProductDTO.java | 32240 | package org.wso2.carbon.apimgt.rest.api.publisher.v1.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.APICorsConfigurationDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.APIMonetizationInfoDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.APIProductBusinessInformationDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.APIScopeDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.ProductAPIDTO;
import javax.validation.constraints.*;
import io.swagger.annotations.*;
import java.util.Objects;
import javax.xml.bind.annotation.*;
import org.wso2.carbon.apimgt.rest.api.common.annotations.Scope;
import com.fasterxml.jackson.annotation.JsonCreator;
import javax.validation.Valid;
public class APIProductDTO {
private String id = null;
private String name = null;
private String context = null;
private String description = null;
private String provider = null;
private Boolean hasThumbnail = null;
@XmlType(name="StateEnum")
@XmlEnum(String.class)
public enum StateEnum {
CREATED("CREATED"),
PUBLISHED("PUBLISHED");
private String value;
StateEnum (String v) {
value = v;
}
public String value() {
return value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static StateEnum fromValue(String v) {
for (StateEnum b : StateEnum.values()) {
if (String.valueOf(b.value).equals(v)) {
return b;
}
}
return null;
}
}
private StateEnum state = null;
private Boolean enableSchemaValidation = null;
private Boolean enableStore = null;
private String testKey = null;
private Boolean responseCachingEnabled = null;
private Integer cacheTimeout = null;
@XmlType(name="VisibilityEnum")
@XmlEnum(String.class)
public enum VisibilityEnum {
PUBLIC("PUBLIC"),
PRIVATE("PRIVATE"),
RESTRICTED("RESTRICTED");
private String value;
VisibilityEnum (String v) {
value = v;
}
public String value() {
return value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static VisibilityEnum fromValue(String v) {
for (VisibilityEnum b : VisibilityEnum.values()) {
if (String.valueOf(b.value).equals(v)) {
return b;
}
}
return null;
}
}
private VisibilityEnum visibility = VisibilityEnum.PUBLIC;
private List<String> visibleRoles = new ArrayList<String>();
private List<String> visibleTenants = new ArrayList<String>();
@XmlType(name="AccessControlEnum")
@XmlEnum(String.class)
public enum AccessControlEnum {
NONE("NONE"),
RESTRICTED("RESTRICTED");
private String value;
AccessControlEnum (String v) {
value = v;
}
public String value() {
return value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static AccessControlEnum fromValue(String v) {
for (AccessControlEnum b : AccessControlEnum.values()) {
if (String.valueOf(b.value).equals(v)) {
return b;
}
}
return null;
}
}
private AccessControlEnum accessControl = AccessControlEnum.NONE;
private List<String> accessControlRoles = new ArrayList<String>();
private List<String> gatewayEnvironments = new ArrayList<String>();
@XmlType(name="ApiTypeEnum")
@XmlEnum(String.class)
public enum ApiTypeEnum {
API("API"),
APIPRODUCT("APIPRODUCT");
private String value;
ApiTypeEnum (String v) {
value = v;
}
public String value() {
return value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static ApiTypeEnum fromValue(String v) {
for (ApiTypeEnum b : ApiTypeEnum.values()) {
if (String.valueOf(b.value).equals(v)) {
return b;
}
}
return null;
}
}
private ApiTypeEnum apiType = null;
private List<String> transport = new ArrayList<String>();
private List<String> tags = new ArrayList<String>();
private List<String> policies = new ArrayList<String>();
private String apiThrottlingPolicy = null;
private String authorizationHeader = null;
private List<String> securityScheme = new ArrayList<String>();
@XmlType(name="SubscriptionAvailabilityEnum")
@XmlEnum(String.class)
public enum SubscriptionAvailabilityEnum {
CURRENT_TENANT("CURRENT_TENANT"),
ALL_TENANTS("ALL_TENANTS"),
SPECIFIC_TENANTS("SPECIFIC_TENANTS");
private String value;
SubscriptionAvailabilityEnum (String v) {
value = v;
}
public String value() {
return value;
}
@Override
public String toString() {
return String.valueOf(value);
}
@JsonCreator
public static SubscriptionAvailabilityEnum fromValue(String v) {
for (SubscriptionAvailabilityEnum b : SubscriptionAvailabilityEnum.values()) {
if (String.valueOf(b.value).equals(v)) {
return b;
}
}
return null;
}
}
private SubscriptionAvailabilityEnum subscriptionAvailability = SubscriptionAvailabilityEnum.ALL_TENANTS;
@Scope(name = "apim:api_publish", description="", value ="")
private List<String> subscriptionAvailableTenants = new ArrayList<String>();
private Map<String, String> additionalProperties = new HashMap<String, String>();
private APIMonetizationInfoDTO monetization = null;
private APIProductBusinessInformationDTO businessInformation = null;
private APICorsConfigurationDTO corsConfiguration = null;
private String createdTime = null;
private String lastUpdatedTime = null;
private List<ProductAPIDTO> apis = new ArrayList<ProductAPIDTO>();
private List<APIScopeDTO> scopes = new ArrayList<APIScopeDTO>();
private List<String> categories = new ArrayList<String>();
/**
* UUID of the api product
**/
public APIProductDTO id(String id) {
this.id = id;
return this;
}
@ApiModelProperty(example = "01234567-0123-0123-0123-012345678901", value = "UUID of the api product ")
@JsonProperty("id")
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
/**
* Name of the API Product
**/
public APIProductDTO name(String name) {
this.name = name;
return this;
}
@ApiModelProperty(example = "PizzaShackAPIProduct", required = true, value = "Name of the API Product")
@JsonProperty("name")
@NotNull
@Size(min=1,max=50) public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
/**
**/
public APIProductDTO context(String context) {
this.context = context;
return this;
}
@ApiModelProperty(example = "pizzaproduct", value = "")
@JsonProperty("context")
@Size(min=1,max=60) public String getContext() {
return context;
}
public void setContext(String context) {
this.context = context;
}
/**
* A brief description about the API
**/
public APIProductDTO description(String description) {
this.description = description;
return this;
}
@ApiModelProperty(example = "This is a simple API for Pizza Shack online pizza delivery store", value = "A brief description about the API")
@JsonProperty("description")
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
/**
* If the provider value is not given, the user invoking the API will be used as the provider.
**/
public APIProductDTO provider(String provider) {
this.provider = provider;
return this;
}
@ApiModelProperty(example = "admin", value = "If the provider value is not given, the user invoking the API will be used as the provider. ")
@JsonProperty("provider")
@Size(max=50) public String getProvider() {
return provider;
}
public void setProvider(String provider) {
this.provider = provider;
}
/**
**/
public APIProductDTO hasThumbnail(Boolean hasThumbnail) {
this.hasThumbnail = hasThumbnail;
return this;
}
@ApiModelProperty(example = "false", value = "")
@JsonProperty("hasThumbnail")
public Boolean isHasThumbnail() {
return hasThumbnail;
}
public void setHasThumbnail(Boolean hasThumbnail) {
this.hasThumbnail = hasThumbnail;
}
/**
* State of the API product. Only published api products are visible on the Developer Portal
**/
public APIProductDTO state(StateEnum state) {
this.state = state;
return this;
}
@ApiModelProperty(value = "State of the API product. Only published api products are visible on the Developer Portal ")
@JsonProperty("state")
public StateEnum getState() {
return state;
}
public void setState(StateEnum state) {
this.state = state;
}
/**
**/
public APIProductDTO enableSchemaValidation(Boolean enableSchemaValidation) {
this.enableSchemaValidation = enableSchemaValidation;
return this;
}
@ApiModelProperty(example = "false", value = "")
@JsonProperty("enableSchemaValidation")
public Boolean isEnableSchemaValidation() {
return enableSchemaValidation;
}
public void setEnableSchemaValidation(Boolean enableSchemaValidation) {
this.enableSchemaValidation = enableSchemaValidation;
}
/**
**/
public APIProductDTO enableStore(Boolean enableStore) {
this.enableStore = enableStore;
return this;
}
@ApiModelProperty(example = "true", value = "")
@JsonProperty("enableStore")
public Boolean isEnableStore() {
return enableStore;
}
public void setEnableStore(Boolean enableStore) {
this.enableStore = enableStore;
}
/**
**/
public APIProductDTO testKey(String testKey) {
this.testKey = testKey;
return this;
}
@ApiModelProperty(example = "8swdwj9080edejhj", value = "")
@JsonProperty("testKey")
public String getTestKey() {
return testKey;
}
public void setTestKey(String testKey) {
this.testKey = testKey;
}
/**
**/
public APIProductDTO responseCachingEnabled(Boolean responseCachingEnabled) {
this.responseCachingEnabled = responseCachingEnabled;
return this;
}
@ApiModelProperty(example = "true", value = "")
@JsonProperty("responseCachingEnabled")
public Boolean isResponseCachingEnabled() {
return responseCachingEnabled;
}
public void setResponseCachingEnabled(Boolean responseCachingEnabled) {
this.responseCachingEnabled = responseCachingEnabled;
}
/**
**/
public APIProductDTO cacheTimeout(Integer cacheTimeout) {
this.cacheTimeout = cacheTimeout;
return this;
}
@ApiModelProperty(example = "300", value = "")
@JsonProperty("cacheTimeout")
public Integer getCacheTimeout() {
return cacheTimeout;
}
public void setCacheTimeout(Integer cacheTimeout) {
this.cacheTimeout = cacheTimeout;
}
/**
* The visibility level of the API. Accepts one of the following. PUBLIC, PRIVATE, RESTRICTED.
**/
public APIProductDTO visibility(VisibilityEnum visibility) {
this.visibility = visibility;
return this;
}
@ApiModelProperty(example = "PUBLIC", value = "The visibility level of the API. Accepts one of the following. PUBLIC, PRIVATE, RESTRICTED.")
@JsonProperty("visibility")
public VisibilityEnum getVisibility() {
return visibility;
}
public void setVisibility(VisibilityEnum visibility) {
this.visibility = visibility;
}
/**
* The user roles that are able to access the API
**/
public APIProductDTO visibleRoles(List<String> visibleRoles) {
this.visibleRoles = visibleRoles;
return this;
}
@ApiModelProperty(example = "[]", value = "The user roles that are able to access the API")
@JsonProperty("visibleRoles")
public List<String> getVisibleRoles() {
return visibleRoles;
}
public void setVisibleRoles(List<String> visibleRoles) {
this.visibleRoles = visibleRoles;
}
/**
**/
public APIProductDTO visibleTenants(List<String> visibleTenants) {
this.visibleTenants = visibleTenants;
return this;
}
@ApiModelProperty(example = "[]", value = "")
@JsonProperty("visibleTenants")
public List<String> getVisibleTenants() {
return visibleTenants;
}
public void setVisibleTenants(List<String> visibleTenants) {
this.visibleTenants = visibleTenants;
}
/**
* Defines whether the API Product is restricted to certain set of publishers or creators or is it visible to all the publishers and creators. If the accessControl restriction is none, this API Product can be modified by all the publishers and creators, if not it can only be viewable/modifiable by certain set of publishers and creators, based on the restriction.
**/
public APIProductDTO accessControl(AccessControlEnum accessControl) {
this.accessControl = accessControl;
return this;
}
@ApiModelProperty(value = "Defines whether the API Product is restricted to certain set of publishers or creators or is it visible to all the publishers and creators. If the accessControl restriction is none, this API Product can be modified by all the publishers and creators, if not it can only be viewable/modifiable by certain set of publishers and creators, based on the restriction. ")
@JsonProperty("accessControl")
public AccessControlEnum getAccessControl() {
return accessControl;
}
public void setAccessControl(AccessControlEnum accessControl) {
this.accessControl = accessControl;
}
/**
* The user roles that are able to view/modify as API Product publisher or creator.
**/
public APIProductDTO accessControlRoles(List<String> accessControlRoles) {
this.accessControlRoles = accessControlRoles;
return this;
}
@ApiModelProperty(example = "[]", value = "The user roles that are able to view/modify as API Product publisher or creator.")
@JsonProperty("accessControlRoles")
public List<String> getAccessControlRoles() {
return accessControlRoles;
}
public void setAccessControlRoles(List<String> accessControlRoles) {
this.accessControlRoles = accessControlRoles;
}
/**
* List of gateway environments the API Product is available
**/
public APIProductDTO gatewayEnvironments(List<String> gatewayEnvironments) {
this.gatewayEnvironments = gatewayEnvironments;
return this;
}
@ApiModelProperty(example = "[\"Production and Sandbox\"]", value = "List of gateway environments the API Product is available ")
@JsonProperty("gatewayEnvironments")
public List<String> getGatewayEnvironments() {
return gatewayEnvironments;
}
public void setGatewayEnvironments(List<String> gatewayEnvironments) {
this.gatewayEnvironments = gatewayEnvironments;
}
/**
* The API type to be used. Accepted values are API, APIPRODUCT
**/
public APIProductDTO apiType(ApiTypeEnum apiType) {
this.apiType = apiType;
return this;
}
@ApiModelProperty(example = "APIPRODUCT", value = "The API type to be used. Accepted values are API, APIPRODUCT")
@JsonProperty("apiType")
public ApiTypeEnum getApiType() {
return apiType;
}
public void setApiType(ApiTypeEnum apiType) {
this.apiType = apiType;
}
/**
* Supported transports for the API (http and/or https).
**/
public APIProductDTO transport(List<String> transport) {
this.transport = transport;
return this;
}
@ApiModelProperty(example = "[\"http\",\"https\"]", value = "Supported transports for the API (http and/or https). ")
@JsonProperty("transport")
public List<String> getTransport() {
return transport;
}
public void setTransport(List<String> transport) {
this.transport = transport;
}
/**
**/
public APIProductDTO tags(List<String> tags) {
this.tags = tags;
return this;
}
@ApiModelProperty(example = "[\"pizza\",\"food\"]", value = "")
@JsonProperty("tags")
public List<String> getTags() {
return tags;
}
public void setTags(List<String> tags) {
this.tags = tags;
}
/**
**/
public APIProductDTO policies(List<String> policies) {
this.policies = policies;
return this;
}
@ApiModelProperty(example = "[\"Unlimited\"]", value = "")
@JsonProperty("policies")
public List<String> getPolicies() {
return policies;
}
public void setPolicies(List<String> policies) {
this.policies = policies;
}
/**
* The API level throttling policy selected for the particular API Product
**/
public APIProductDTO apiThrottlingPolicy(String apiThrottlingPolicy) {
this.apiThrottlingPolicy = apiThrottlingPolicy;
return this;
}
@ApiModelProperty(example = "Unlimited", value = "The API level throttling policy selected for the particular API Product")
@JsonProperty("apiThrottlingPolicy")
public String getApiThrottlingPolicy() {
return apiThrottlingPolicy;
}
public void setApiThrottlingPolicy(String apiThrottlingPolicy) {
this.apiThrottlingPolicy = apiThrottlingPolicy;
}
/**
* Name of the Authorization header used for invoking the API. If it is not set, Authorization header name specified in tenant or system level will be used.
**/
public APIProductDTO authorizationHeader(String authorizationHeader) {
this.authorizationHeader = authorizationHeader;
return this;
}
@ApiModelProperty(example = "Authorization", value = "Name of the Authorization header used for invoking the API. If it is not set, Authorization header name specified in tenant or system level will be used. ")
@JsonProperty("authorizationHeader")
public String getAuthorizationHeader() {
return authorizationHeader;
}
public void setAuthorizationHeader(String authorizationHeader) {
this.authorizationHeader = authorizationHeader;
}
/**
* Types of API security, the current API secured with. It can be either OAuth2 or mutual SSL or both. If it is not set OAuth2 will be set as the security for the current API.
**/
public APIProductDTO securityScheme(List<String> securityScheme) {
this.securityScheme = securityScheme;
return this;
}
@ApiModelProperty(example = "[\"oauth2\"]", value = "Types of API security, the current API secured with. It can be either OAuth2 or mutual SSL or both. If it is not set OAuth2 will be set as the security for the current API. ")
@JsonProperty("securityScheme")
public List<String> getSecurityScheme() {
return securityScheme;
}
public void setSecurityScheme(List<String> securityScheme) {
this.securityScheme = securityScheme;
}
/**
* The subscription availability. Accepts one of the following. CURRENT_TENANT, ALL_TENANTS or SPECIFIC_TENANTS.
**/
public APIProductDTO subscriptionAvailability(SubscriptionAvailabilityEnum subscriptionAvailability) {
this.subscriptionAvailability = subscriptionAvailability;
return this;
}
@ApiModelProperty(example = "CURRENT_TENANT", value = "The subscription availability. Accepts one of the following. CURRENT_TENANT, ALL_TENANTS or SPECIFIC_TENANTS.")
@JsonProperty("subscriptionAvailability")
public SubscriptionAvailabilityEnum getSubscriptionAvailability() {
return subscriptionAvailability;
}
public void setSubscriptionAvailability(SubscriptionAvailabilityEnum subscriptionAvailability) {
this.subscriptionAvailability = subscriptionAvailability;
}
/**
**/
public APIProductDTO subscriptionAvailableTenants(List<String> subscriptionAvailableTenants) {
this.subscriptionAvailableTenants = subscriptionAvailableTenants;
return this;
}
@ApiModelProperty(example = "[]", value = "")
@JsonProperty("subscriptionAvailableTenants")
public List<String> getSubscriptionAvailableTenants() {
return subscriptionAvailableTenants;
}
public void setSubscriptionAvailableTenants(List<String> subscriptionAvailableTenants) {
this.subscriptionAvailableTenants = subscriptionAvailableTenants;
}
/**
* Map of custom properties of API
**/
public APIProductDTO additionalProperties(Map<String, String> additionalProperties) {
this.additionalProperties = additionalProperties;
return this;
}
@ApiModelProperty(value = "Map of custom properties of API")
@JsonProperty("additionalProperties")
public Map<String, String> getAdditionalProperties() {
return additionalProperties;
}
public void setAdditionalProperties(Map<String, String> additionalProperties) {
this.additionalProperties = additionalProperties;
}
/**
**/
public APIProductDTO monetization(APIMonetizationInfoDTO monetization) {
this.monetization = monetization;
return this;
}
@ApiModelProperty(value = "")
@Valid
@JsonProperty("monetization")
public APIMonetizationInfoDTO getMonetization() {
return monetization;
}
public void setMonetization(APIMonetizationInfoDTO monetization) {
this.monetization = monetization;
}
/**
**/
public APIProductDTO businessInformation(APIProductBusinessInformationDTO businessInformation) {
this.businessInformation = businessInformation;
return this;
}
@ApiModelProperty(value = "")
@Valid
@JsonProperty("businessInformation")
public APIProductBusinessInformationDTO getBusinessInformation() {
return businessInformation;
}
public void setBusinessInformation(APIProductBusinessInformationDTO businessInformation) {
this.businessInformation = businessInformation;
}
/**
**/
public APIProductDTO corsConfiguration(APICorsConfigurationDTO corsConfiguration) {
this.corsConfiguration = corsConfiguration;
return this;
}
@ApiModelProperty(value = "")
@Valid
@JsonProperty("corsConfiguration")
public APICorsConfigurationDTO getCorsConfiguration() {
return corsConfiguration;
}
public void setCorsConfiguration(APICorsConfigurationDTO corsConfiguration) {
this.corsConfiguration = corsConfiguration;
}
/**
**/
public APIProductDTO createdTime(String createdTime) {
this.createdTime = createdTime;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("createdTime")
public String getCreatedTime() {
return createdTime;
}
public void setCreatedTime(String createdTime) {
this.createdTime = createdTime;
}
/**
**/
public APIProductDTO lastUpdatedTime(String lastUpdatedTime) {
this.lastUpdatedTime = lastUpdatedTime;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("lastUpdatedTime")
public String getLastUpdatedTime() {
return lastUpdatedTime;
}
public void setLastUpdatedTime(String lastUpdatedTime) {
this.lastUpdatedTime = lastUpdatedTime;
}
/**
* APIs and resources in the API Product.
**/
public APIProductDTO apis(List<ProductAPIDTO> apis) {
this.apis = apis;
return this;
}
@ApiModelProperty(example = "[{\"name\":\"PizzaShackAPI\",\"apiId\":\"01234567-0123-0123-0123-012345678901\",\"version\":\"1.0\",\"operations\":[{\"target\":\"/order/{orderId}\",\"verb\":\"POST\",\"authType\":\"Application & Application User\",\"throttlingPolicy\":\"Unlimited\"},{\"target\":\"/menu\",\"verb\":\"GET\",\"authType\":\"Application & Application User\",\"throttlingPolicy\":\"Unlimited\"}]}]", value = "APIs and resources in the API Product. ")
@Valid
@JsonProperty("apis")
public List<ProductAPIDTO> getApis() {
return apis;
}
public void setApis(List<ProductAPIDTO> apis) {
this.apis = apis;
}
/**
**/
public APIProductDTO scopes(List<APIScopeDTO> scopes) {
this.scopes = scopes;
return this;
}
@ApiModelProperty(example = "[]", value = "")
@Valid
@JsonProperty("scopes")
public List<APIScopeDTO> getScopes() {
return scopes;
}
public void setScopes(List<APIScopeDTO> scopes) {
this.scopes = scopes;
}
/**
* API categories
**/
public APIProductDTO categories(List<String> categories) {
this.categories = categories;
return this;
}
@ApiModelProperty(example = "[]", value = "API categories ")
@JsonProperty("categories")
public List<String> getCategories() {
return categories;
}
public void setCategories(List<String> categories) {
this.categories = categories;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
APIProductDTO apIProduct = (APIProductDTO) o;
return Objects.equals(id, apIProduct.id) &&
Objects.equals(name, apIProduct.name) &&
Objects.equals(context, apIProduct.context) &&
Objects.equals(description, apIProduct.description) &&
Objects.equals(provider, apIProduct.provider) &&
Objects.equals(hasThumbnail, apIProduct.hasThumbnail) &&
Objects.equals(state, apIProduct.state) &&
Objects.equals(enableSchemaValidation, apIProduct.enableSchemaValidation) &&
Objects.equals(enableStore, apIProduct.enableStore) &&
Objects.equals(testKey, apIProduct.testKey) &&
Objects.equals(responseCachingEnabled, apIProduct.responseCachingEnabled) &&
Objects.equals(cacheTimeout, apIProduct.cacheTimeout) &&
Objects.equals(visibility, apIProduct.visibility) &&
Objects.equals(visibleRoles, apIProduct.visibleRoles) &&
Objects.equals(visibleTenants, apIProduct.visibleTenants) &&
Objects.equals(accessControl, apIProduct.accessControl) &&
Objects.equals(accessControlRoles, apIProduct.accessControlRoles) &&
Objects.equals(gatewayEnvironments, apIProduct.gatewayEnvironments) &&
Objects.equals(apiType, apIProduct.apiType) &&
Objects.equals(transport, apIProduct.transport) &&
Objects.equals(tags, apIProduct.tags) &&
Objects.equals(policies, apIProduct.policies) &&
Objects.equals(apiThrottlingPolicy, apIProduct.apiThrottlingPolicy) &&
Objects.equals(authorizationHeader, apIProduct.authorizationHeader) &&
Objects.equals(securityScheme, apIProduct.securityScheme) &&
Objects.equals(subscriptionAvailability, apIProduct.subscriptionAvailability) &&
Objects.equals(subscriptionAvailableTenants, apIProduct.subscriptionAvailableTenants) &&
Objects.equals(additionalProperties, apIProduct.additionalProperties) &&
Objects.equals(monetization, apIProduct.monetization) &&
Objects.equals(businessInformation, apIProduct.businessInformation) &&
Objects.equals(corsConfiguration, apIProduct.corsConfiguration) &&
Objects.equals(createdTime, apIProduct.createdTime) &&
Objects.equals(lastUpdatedTime, apIProduct.lastUpdatedTime) &&
Objects.equals(apis, apIProduct.apis) &&
Objects.equals(scopes, apIProduct.scopes) &&
Objects.equals(categories, apIProduct.categories);
}
@Override
public int hashCode() {
return Objects.hash(id, name, context, description, provider, hasThumbnail, state, enableSchemaValidation, enableStore, testKey, responseCachingEnabled, cacheTimeout, visibility, visibleRoles, visibleTenants, accessControl, accessControlRoles, gatewayEnvironments, apiType, transport, tags, policies, apiThrottlingPolicy, authorizationHeader, securityScheme, subscriptionAvailability, subscriptionAvailableTenants, additionalProperties, monetization, businessInformation, corsConfiguration, createdTime, lastUpdatedTime, apis, scopes, categories);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class APIProductDTO {\n");
sb.append(" id: ").append(toIndentedString(id)).append("\n");
sb.append(" name: ").append(toIndentedString(name)).append("\n");
sb.append(" context: ").append(toIndentedString(context)).append("\n");
sb.append(" description: ").append(toIndentedString(description)).append("\n");
sb.append(" provider: ").append(toIndentedString(provider)).append("\n");
sb.append(" hasThumbnail: ").append(toIndentedString(hasThumbnail)).append("\n");
sb.append(" state: ").append(toIndentedString(state)).append("\n");
sb.append(" enableSchemaValidation: ").append(toIndentedString(enableSchemaValidation)).append("\n");
sb.append(" enableStore: ").append(toIndentedString(enableStore)).append("\n");
sb.append(" testKey: ").append(toIndentedString(testKey)).append("\n");
sb.append(" responseCachingEnabled: ").append(toIndentedString(responseCachingEnabled)).append("\n");
sb.append(" cacheTimeout: ").append(toIndentedString(cacheTimeout)).append("\n");
sb.append(" visibility: ").append(toIndentedString(visibility)).append("\n");
sb.append(" visibleRoles: ").append(toIndentedString(visibleRoles)).append("\n");
sb.append(" visibleTenants: ").append(toIndentedString(visibleTenants)).append("\n");
sb.append(" accessControl: ").append(toIndentedString(accessControl)).append("\n");
sb.append(" accessControlRoles: ").append(toIndentedString(accessControlRoles)).append("\n");
sb.append(" gatewayEnvironments: ").append(toIndentedString(gatewayEnvironments)).append("\n");
sb.append(" apiType: ").append(toIndentedString(apiType)).append("\n");
sb.append(" transport: ").append(toIndentedString(transport)).append("\n");
sb.append(" tags: ").append(toIndentedString(tags)).append("\n");
sb.append(" policies: ").append(toIndentedString(policies)).append("\n");
sb.append(" apiThrottlingPolicy: ").append(toIndentedString(apiThrottlingPolicy)).append("\n");
sb.append(" authorizationHeader: ").append(toIndentedString(authorizationHeader)).append("\n");
sb.append(" securityScheme: ").append(toIndentedString(securityScheme)).append("\n");
sb.append(" subscriptionAvailability: ").append(toIndentedString(subscriptionAvailability)).append("\n");
sb.append(" subscriptionAvailableTenants: ").append(toIndentedString(subscriptionAvailableTenants)).append("\n");
sb.append(" additionalProperties: ").append(toIndentedString(additionalProperties)).append("\n");
sb.append(" monetization: ").append(toIndentedString(monetization)).append("\n");
sb.append(" businessInformation: ").append(toIndentedString(businessInformation)).append("\n");
sb.append(" corsConfiguration: ").append(toIndentedString(corsConfiguration)).append("\n");
sb.append(" createdTime: ").append(toIndentedString(createdTime)).append("\n");
sb.append(" lastUpdatedTime: ").append(toIndentedString(lastUpdatedTime)).append("\n");
sb.append(" apis: ").append(toIndentedString(apis)).append("\n");
sb.append(" scopes: ").append(toIndentedString(scopes)).append("\n");
sb.append(" categories: ").append(toIndentedString(categories)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| apache-2.0 |
jimv39/qvcsos | qvcse-gui/src/main/java/com/qumasoft/guitools/qwin/dialog/GetDirectoryDialog.java | 10807 | /* Copyright 2004-2021 Jim Voris
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.qumasoft.guitools.qwin.dialog;
import com.qumasoft.guitools.qwin.QWinFrame;
import com.qumasoft.guitools.qwin.operation.OperationGetDirectory;
import com.qumasoft.qvcslib.commandargs.GetDirectoryCommandArgs;
import com.qumasoft.qvcslib.Utility;
/**
* Get directory dialog.
* @author Jim Voris
*/
public class GetDirectoryDialog extends AbstractQWinCommandDialog {
private static final long serialVersionUID = -8486091819481601982L;
private final OperationGetDirectory operationGetDirectory;
/**
* Create a get directory dialog.
* @param parent the parent frame.
* @param operation the get directory operation that will do the work.
*/
public GetDirectoryDialog(java.awt.Frame parent, OperationGetDirectory operation) {
super(parent, true);
initComponents();
this.operationGetDirectory = operation;
populateComponents();
setFont();
center();
}
/**
* This method is called from within the constructor to initialize the form. WARNING: Do NOT modify this code. The content of this method is always regenerated
* by the Form Editor.
*/
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
recurseDirectoriesCheckBox = new javax.swing.JCheckBox();
overwriteWorkfileComboBox = new javax.swing.JComboBox();
timestampComboBox = new javax.swing.JComboBox();
okButton = new javax.swing.JButton();
cancelButton = new javax.swing.JButton();
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
setTitle("Get Directory");
setResizable(false);
addWindowListener(new java.awt.event.WindowAdapter() {
public void windowClosing(java.awt.event.WindowEvent evt) {
formWindowClosing(evt);
}
});
recurseDirectoriesCheckBox.setFont(new java.awt.Font("Arial", 0, 12)); // NOI18N
recurseDirectoriesCheckBox.setText("Recurse Directories");
recurseDirectoriesCheckBox.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
recurseDirectoriesCheckBoxActionPerformed(evt);
}
});
overwriteWorkfileComboBox.setFont(new java.awt.Font("Arial", 0, 12)); // NOI18N
overwriteWorkfileComboBox.setMaximumRowCount(10);
overwriteWorkfileComboBox.setToolTipText("");
timestampComboBox.setFont(new java.awt.Font("Arial", 0, 12)); // NOI18N
okButton.setFont(new java.awt.Font("Arial", 0, 12)); // NOI18N
okButton.setText(" OK ");
okButton.setMaximumSize(new java.awt.Dimension(80, 25));
okButton.setMinimumSize(new java.awt.Dimension(80, 25));
okButton.setPreferredSize(new java.awt.Dimension(80, 25));
okButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
okButtonActionPerformed(evt);
}
});
cancelButton.setFont(new java.awt.Font("Arial", 0, 12)); // NOI18N
cancelButton.setText("Cancel");
cancelButton.setMaximumSize(new java.awt.Dimension(80, 25));
cancelButton.setMinimumSize(new java.awt.Dimension(80, 25));
cancelButton.setPreferredSize(new java.awt.Dimension(80, 25));
cancelButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
cancelButtonActionPerformed(evt);
}
});
org.jdesktop.layout.GroupLayout layout = new org.jdesktop.layout.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(org.jdesktop.layout.GroupLayout.TRAILING, layout.createSequentialGroup()
.addContainerGap()
.add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.TRAILING)
.add(org.jdesktop.layout.GroupLayout.LEADING, overwriteWorkfileComboBox, 0, 422, Short.MAX_VALUE)
.add(org.jdesktop.layout.GroupLayout.LEADING, layout.createSequentialGroup()
.add(recurseDirectoriesCheckBox)
.add(0, 0, Short.MAX_VALUE))
.add(org.jdesktop.layout.GroupLayout.LEADING, layout.createSequentialGroup()
.add(okButton, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 100, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED, 222, Short.MAX_VALUE)
.add(cancelButton, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 100, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))
.add(org.jdesktop.layout.GroupLayout.LEADING, timestampComboBox, 0, 422, Short.MAX_VALUE))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(layout.createSequentialGroup()
.addContainerGap()
.add(recurseDirectoriesCheckBox)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED)
.add(overwriteWorkfileComboBox, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED)
.add(timestampComboBox, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED, 143, Short.MAX_VALUE)
.add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE)
.add(okButton, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.add(cancelButton, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))
.add(18, 18, 18))
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void recurseDirectoriesCheckBoxActionPerformed(java.awt.event.ActionEvent evt)//GEN-FIRST:event_recurseDirectoriesCheckBoxActionPerformed
{//GEN-HEADEREND:event_recurseDirectoriesCheckBoxActionPerformed
Object recurseFlag = recurseDirectoriesCheckBox.getSelectedObjects();
if (recurseFlag != null) {
QWinFrame.getQWinFrame().setRecurseFlag(true);
} else {
QWinFrame.getQWinFrame().setRecurseFlag(false);
}
}//GEN-LAST:event_recurseDirectoriesCheckBoxActionPerformed
private void formWindowClosing(java.awt.event.WindowEvent evt)//GEN-FIRST:event_formWindowClosing
{//GEN-HEADEREND:event_formWindowClosing
setVisible(false);
dispose();
}//GEN-LAST:event_formWindowClosing
private void cancelButtonActionPerformed(java.awt.event.ActionEvent evt)//GEN-FIRST:event_cancelButtonActionPerformed
{//GEN-HEADEREND:event_cancelButtonActionPerformed
formWindowClosing(null);
}//GEN-LAST:event_cancelButtonActionPerformed
private void okButtonActionPerformed(java.awt.event.ActionEvent evt)//GEN-FIRST:event_okButtonActionPerformed
{//GEN-HEADEREND:event_okButtonActionPerformed
formWindowClosing(null); // Run the update on the Swing thread.
// Let the user know that the password change worked.
GetDirectoryCommandArgs commandArgs = new GetDirectoryCommandArgs();
commandArgs.setRecurseFlag((recurseDirectoriesCheckBox.getSelectedObjects() != null));
commandArgs.setOverwriteBehavior(getOverwriteBehavior());
commandArgs.setTimeStampBehavior(getTimestampBehavior());
commandArgs.setUserName(QWinFrame.getQWinFrame().getLoggedInUserName());
operationGetDirectory.completeOperation(commandArgs);
}//GEN-LAST:event_okButtonActionPerformed
@Override
public void dismissDialog() {
formWindowClosing(null);
}
private Utility.OverwriteBehavior getOverwriteBehavior() {
Utility.OverwriteBehavior overwriteBehavior = null;
Object overwriteString = overwriteWorkfileComboBox.getSelectedObjects();
if (overwriteString != null) {
OverwriteWorkfileComboModel model = (OverwriteWorkfileComboModel) overwriteWorkfileComboBox.getModel();
overwriteBehavior = model.getSelectedOverwriteBehavior();
}
return overwriteBehavior;
}
private Utility.TimestampBehavior getTimestampBehavior() {
Utility.TimestampBehavior timeStampBehavior = null;
Object timeStampString = timestampComboBox.getSelectedObjects();
if (timeStampString != null) {
TimestampComboModel model = (TimestampComboModel) timestampComboBox.getModel();
timeStampBehavior = model.getSelectedTimeStampBehavior();
}
return timeStampBehavior;
}
private void populateComponents() {
timestampComboBox.setModel(new TimestampComboModel());
overwriteWorkfileComboBox.setModel(new OverwriteWorkfileComboModel());
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton cancelButton;
private javax.swing.JButton okButton;
private javax.swing.JComboBox overwriteWorkfileComboBox;
private javax.swing.JCheckBox recurseDirectoriesCheckBox;
private javax.swing.JComboBox timestampComboBox;
// End of variables declaration//GEN-END:variables
}
| apache-2.0 |
dfish3r/vt-password | src/test/java/edu/vt/middleware/password/RegexRuleTest.java | 1895 | /*
$Id$
Copyright (C) 2003-2013 Virginia Tech.
All rights reserved.
SEE LICENSE FOR MORE INFORMATION
Author: Middleware Services
Email: middleware@vt.edu
Version: $Revision$
Updated: $Date$
*/
package edu.vt.middleware.password;
import org.testng.AssertJUnit;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
/**
* Unit test for {@link RegexRule}.
*
* @author Middleware Services
* @version $Revision$
*/
public class RegexRuleTest extends AbstractRuleTest
{
/**
* @return Test data.
*
* @throws Exception On test data generation failure.
*/
@DataProvider(name = "passwords")
public Object[][] passwords()
throws Exception
{
return
new Object[][] {
// test valid password
{
new RegexRule("\\d\\d\\d\\d"),
new PasswordData(new Password("p4zRcv8#n65")),
null,
},
// test entire password
{
new RegexRule("^[\\p{Alpha}]+\\d\\d\\d\\d$"),
new PasswordData(new Password("pwUiNh0248")),
codes(RegexRule.ERROR_CODE),
},
// test find password
{
new RegexRule("\\d\\d\\d\\d"),
new PasswordData(new Password("pwUi0248xwK")),
codes(RegexRule.ERROR_CODE),
},
};
}
/** @throws Exception On test failure. */
@Test(groups = {"passtest"})
public void resolveMessage()
throws Exception
{
final Rule rule = new RegexRule("\\d\\d\\d\\d");
final RuleResult result = rule.validate(
new PasswordData(new Password("pwUiNh0248")));
for (RuleResultDetail detail : result.getDetails()) {
AssertJUnit.assertEquals(
String.format("Password matches the illegal sequence '%s'.", "0248"),
DEFAULT_RESOLVER.resolve(detail));
AssertJUnit.assertNotNull(EMPTY_RESOLVER.resolve(detail));
}
}
}
| apache-2.0 |
Cordlesswire/BoomBox | app/src/main/java/com/example/android/boombox/SongsFragment.java | 13724 | package com.example.android.boombox;
import android.content.Context;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.os.Handler;
import java.util.ArrayList;
import android.widget.ImageView;
import android.widget.SeekBar;
import android.widget.TextView;
import java.util.concurrent.TimeUnit;
import org.w3c.dom.Text;
/**
* A simple {@link Fragment} subclass.
*/
public class SongsFragment extends Fragment {
//Declare Variables
private Handler songHandler = new Handler();
private ImageView previousButton, rewindButton, pauseButton, playButton, forwardButton, nextButton;
private SeekBar seekBar;
private TextView songTitle;
private double startTime = 0;
private double finalTime = 0;
private int forwardTime = 5000;
private int backwardTime = 5000;
//Keeps track of Current Song
private int currentIndex;
private String artistName;
private String songName;
private MediaPlayer mMediaPlayer;
private MediaPlayer.OnCompletionListener mCompletionListener = new MediaPlayer.OnCompletionListener() {
@Override
public void onCompletion(MediaPlayer mp) {
releaseMediaPlayer();
}
};
private AudioManager mAudioManager;
AudioManager.OnAudioFocusChangeListener afListener = new AudioManager.OnAudioFocusChangeListener() {
public void onAudioFocusChange(int focusChange) {
if (focusChange == AudioManager.AUDIOFOCUS_GAIN) {
mMediaPlayer.start();
} else if (focusChange == AudioManager.AUDIOFOCUS_LOSS) {
releaseMediaPlayer();
} else if (focusChange == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT || focusChange == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK) {
mMediaPlayer.pause();
mMediaPlayer.seekTo(0);
}
}
};
public SongsFragment() {
// Required empty public constructor
}
public static int oneTimeOnly = 0;
@Override
public View onCreateView(LayoutInflater inflater, final ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.words_list, container, false);
//Initialize Views and Buttons
previousButton = (ImageView) rootView.findViewById(R.id.previousButton);
rewindButton = (ImageView) rootView.findViewById(R.id.rewindButton);
pauseButton = (ImageView) rootView.findViewById(R.id.pauseButton);
playButton = (ImageView) rootView.findViewById(R.id.playButton);
forwardButton = (ImageView) rootView.findViewById(R.id.forwardButton);
nextButton = (ImageView) rootView.findViewById(R.id.nextButton);
//Disable buttons because we can't skip a song if it has not started playing yet
previousButton.setEnabled(false);
forwardButton.setEnabled(false);
nextButton.setEnabled(false);
rewindButton.setEnabled(false);
pauseButton.setEnabled(false);
playButton.setEnabled(false);
seekBar = (SeekBar) rootView.findViewById(R.id.seekBar);
seekBar.setClickable(false);
songTitle = (TextView) rootView.findViewById(R.id.songInformation);
mAudioManager = (AudioManager) getActivity().getSystemService(Context.AUDIO_SERVICE);
final ArrayList<Word> songs = new ArrayList<>();
songs.add(new Word("Childish Gambino", "3005", R.drawable.childish_gambino_because, R.raw.childish_gambino_3005));
songs.add(new Word("Childish Gambino", "Freestyle", R.drawable.childish_gambino, R.raw.childish_gambino_freestyle));
songs.add(new Word("Gas Lab", "Chemistry", R.drawable.gas_lab, R.raw.gas_lab_chemistry));
songs.add(new Word("Gas Lab (feat. Natayla & Traum Diggs)", "Jazz Hop", R.drawable.gas_lab, R.raw.gas_lab_jazz_hop));
songs.add(new Word("J.Cole", "Losing my Balance", R.drawable.j_cole, R.raw.jcole_losing_my_balance));
songs.add(new Word("JABS (feat. Willow)", "Payíva (Prod. JABS)", R.drawable.jabs_willow, R.raw.jabs_payiva));
songs.add(new Word("Téo", "Enlightened Now", R.drawable.teo, R.raw.teo_enlightened_now));
songs.add(new Word("Téo", "How Low", R.drawable.teo, R.raw.teo_how_low));
songs.add(new Word("Téo", "Selfless-ish", R.drawable.teo, R.raw.teo_selflessish));
songs.add(new Word("Tyler The Creator (feat. Frank Ocean) ", "She", R.drawable.tyler_the_creator, R.raw.tyler_the_creator_she));
songs.add(new Word("Willow (feat. SZA) ", "9", R.drawable.willow, R.raw.willow_9));
songs.add(new Word("Willow ", "Female Energy", R.drawable.willow, R.raw.willow_female_energy));
songs.add(new Word("Willow ", "Marceline", R.drawable.willow, R.raw.willow_marceline));
WordAdapter itemsAdapter = new WordAdapter(getActivity(), songs, R.color.category_colors);
ListView listView = (ListView) rootView.findViewById(R.id.list);
//final LinearLayout musicControls = (LinearLayout) rootView.findViewById(R.id.musicControls);
listView.setAdapter(itemsAdapter);
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
//Hide the music controls until a user clicks on a song for better User Experience
// musicControls.setVisibility(View.VISIBLE);
final Word word = songs.get(position);
releaseMediaPlayer();
int requestResult = mAudioManager.requestAudioFocus(afListener,
AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT);
if (requestResult == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
mMediaPlayer = MediaPlayer.create(getActivity(), word.getSoundResourceId());
artistName= word.getArtistName();
songName= word.getTitle();
mMediaPlayer.start();
songTitle.setText(artistName + " - "+ songName + ".mp3");
//Enable buttons so that they are clickable
//previousButton.setEnabled(true); Still need to fix Previous track method
forwardButton.setEnabled(true);
nextButton.setEnabled(true);
rewindButton.setEnabled(true);
pauseButton.setEnabled(true);
//Method to Play Next Song
nextButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
//Pause any sound that's currently playing to be able to load the next song
mMediaPlayer.pause();
//Increment our current song Index
currentIndex ++;
if (currentIndex < (songs.size() - 1)) {
mMediaPlayer = MediaPlayer.create(getActivity(), word.getSoundResourceId() + currentIndex);
mMediaPlayer.start();
songTitle.setText(artistName + " - "+ songName + ".mp3");
}
}
});
//Method to Play Previous Song
previousButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
//Pause any sound that's currently playing to be able to go to the previous song
mMediaPlayer.pause();
if (currentIndex > 1) {
mMediaPlayer = MediaPlayer.create(getActivity(),word.getSoundResourceId() - 1);
mMediaPlayer.start();
songTitle.setText(artistName + " - "+ songName + ".mp3");
}
}
});
mMediaPlayer.setOnCompletionListener(mCompletionListener);
finalTime = mMediaPlayer.getDuration();
startTime = mMediaPlayer.getCurrentPosition();
if (oneTimeOnly == 0) {
seekBar.setMax((int) finalTime);
oneTimeOnly = 1;
}
//Methods to Display Song Duration
//endTimeView.setText(String.format("%d min, %d sec",
//TimeUnit.MILLISECONDS.toMinutes((long) finalTime),
//TimeUnit.MILLISECONDS.toSeconds((long) finalTime) -
// TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes((long)
// finalTime)))
//);
//startTimeView.setText(String.format("%d min, %d sec",
//TimeUnit.MILLISECONDS.toMinutes((long) startTime),
//TimeUnit.MILLISECONDS.toSeconds((long) startTime) -
//TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes((long)
//startTime)))
// );
seekBar.setProgress((int) startTime);
seekBar.setMax(mMediaPlayer.getDuration());
songHandler.postDelayed(UpdateSongTime, 100);
}
}
});
//Method to fast forward the track
forwardButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
int temp = (int) startTime;
if((temp+forwardTime)<=finalTime){
startTime = startTime + forwardTime;
mMediaPlayer.seekTo((int) startTime);
}
}
});
//Method to rewind the track
rewindButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
int temp = (int) startTime;
if ((temp-backwardTime)> 0) {
startTime = startTime - backwardTime;
mMediaPlayer.seekTo((int) startTime);
}
}
});
//Method to Pause song
pauseButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (mMediaPlayer.isPlaying()) {
mMediaPlayer.pause();
//"Pause" is not in display so the user can see the "Play" Button to Resume music
pauseButton.setVisibility(View.GONE);
//Displays "Play" button and replaces the "Pause" button on the controls layout
playButton.setVisibility(View.VISIBLE);
playButton.setEnabled(true);
//Disable Control buttons since we don't need them....when song is not Playing
previousButton.setEnabled(false);
forwardButton.setEnabled(false);
nextButton.setEnabled(false);
rewindButton.setEnabled(false);
}
}
});
//Method to Play song if it was Paused
playButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mMediaPlayer.start();
//"Play" is not in display so the user can see the "Pause" Button to Pause the music if needed
playButton.setVisibility(View.GONE);
//Displays "Pause" button and replaces the "Play" button on the controls layout
pauseButton.setVisibility(View.VISIBLE);
//Enable buttons so that the user can user them to manage songs
previousButton.setEnabled(true);
forwardButton.setEnabled(true);
nextButton.setEnabled(true);
rewindButton.setEnabled(true);
}
});
return rootView;
}
private Runnable UpdateSongTime = new Runnable() {
public void run() {
startTime = mMediaPlayer.getCurrentPosition();
seekBar.setProgress((int) startTime);
songHandler.postDelayed(this, 100);
}
};
@Override
public void onStop() {
super.onStop();
releaseMediaPlayer();
}
private void releaseMediaPlayer() {
// If the media player is not null, then it may be currently playing a sound.
if (mMediaPlayer != null) {
// Regardless of the current state of the media player, release its resources
// because we no longer need it.
mMediaPlayer.release();
// Set the media player back to null. For our code, we've decided that
// setting the media player to null is an easy way to tell that the media player
// is not configured to play an audio file at the moment.
mMediaPlayer = null;
mAudioManager.abandonAudioFocus(afListener);
}
}
} | apache-2.0 |
geosolutions-it/jai-ext | jt-scale/src/test/java/it/geosolutions/jaiext/scale/BicubicScaleTest.java | 6201 | /* JAI-Ext - OpenSource Java Advanced Image Extensions Library
* http://www.geo-solutions.it/
* Copyright 2014 GeoSolutions
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.geosolutions.jaiext.scale;
import static org.junit.Assert.*;
import java.awt.*;
import java.awt.image.DataBuffer;
import java.awt.image.RenderedImage;
import javax.media.jai.ImageLayout;
import javax.media.jai.Interpolation;
import javax.media.jai.JAI;
import javax.media.jai.PlanarImage;
import javax.media.jai.ROI;
import javax.media.jai.ROIShape;
import javax.media.jai.RenderedOp;
import org.junit.Test;
/**
* This test-class extends the TestScale class and is used for testing the bicubic interpolation inside the Scale operation.
* The first method tests the scale operation without the presence of a ROI or a No Data Range. The 2nd method introduces a
* ROI object calculated using a ROI RasterAccessor while the 3rd method uses an Iterator on the ROI Object. The 4th method
* performs the scale operation with all the components. The last method is similar to the 4th method but executes its operations
* on binary images.
*/
public class BicubicScaleTest extends TestScale{
@Test
public void testImageScaling() {
boolean roiPresent=false;
boolean noDataRangeUsed=false;
boolean isBinary=false;
boolean bicubic2DIsabled= true;
boolean useROIAccessor=false;
testGlobal(useROIAccessor,isBinary,bicubic2DIsabled,noDataRangeUsed
,roiPresent,InterpolationType.BICUBIC_INTERP, TestSelection.NO_ROI_ONLY_DATA,ScaleType.MAGNIFY);
testGlobal(useROIAccessor,isBinary,bicubic2DIsabled,noDataRangeUsed
,roiPresent,InterpolationType.BICUBIC_INTERP, TestSelection.NO_ROI_ONLY_DATA,ScaleType.REDUCTION);
}
@Test
public void testImageScalingROIAccessor() {
boolean roiPresent=true;
boolean noDataRangeUsed=false;
boolean isBinary=false;
boolean bicubic2DIsabled= true;
boolean useROIAccessor=true;
testGlobal(useROIAccessor,isBinary,bicubic2DIsabled,noDataRangeUsed
,roiPresent,InterpolationType.BICUBIC_INTERP,TestSelection.ROI_ACCESSOR_ONLY_DATA,ScaleType.MAGNIFY);
testGlobal(useROIAccessor,isBinary,bicubic2DIsabled,noDataRangeUsed
,roiPresent,InterpolationType.BICUBIC_INTERP,TestSelection.ROI_ACCESSOR_ONLY_DATA,ScaleType.REDUCTION);
}
@Test
public void testImageScalingROIBounds() {
boolean roiPresent=true;
boolean noDataRangeUsed=false;
boolean isBinary=false;
boolean bicubic2DIsabled= true;
boolean useROIAccessor=false;
testGlobal(useROIAccessor,isBinary,bicubic2DIsabled,noDataRangeUsed
,roiPresent,InterpolationType.BICUBIC_INTERP,TestSelection.ROI_ONLY_DATA,ScaleType.MAGNIFY);
testGlobal(useROIAccessor,isBinary,bicubic2DIsabled,noDataRangeUsed
,roiPresent,InterpolationType.BICUBIC_INTERP,TestSelection.ROI_ONLY_DATA,ScaleType.REDUCTION);
}
@Test
public void testImageScalingTotal() {
boolean roiPresent=true;
boolean noDataRangeUsed=true;
boolean isBinary=false;
boolean bicubic2DIsabled= true;
boolean useROIAccessor=true;
testGlobal(useROIAccessor,isBinary,bicubic2DIsabled,noDataRangeUsed
,roiPresent,InterpolationType.BICUBIC_INTERP,TestSelection.ROI_ACCESSOR_NO_DATA,ScaleType.MAGNIFY);
testGlobal(useROIAccessor,isBinary,bicubic2DIsabled,noDataRangeUsed
,roiPresent,InterpolationType.BICUBIC_INTERP,TestSelection.ROI_ACCESSOR_NO_DATA,ScaleType.REDUCTION);
}
@Test
public void testImageScalingBinary() {
boolean roiPresent=true;
boolean noDataRangeUsed=true;
boolean isBinary=true;
boolean bicubic2DIsabled= true;
boolean useROIAccessor=true;
testGlobal(useROIAccessor,isBinary,bicubic2DIsabled,noDataRangeUsed
,roiPresent,InterpolationType.BICUBIC_INTERP,TestSelection.BINARY_ROI_ACCESSOR_NO_DATA,ScaleType.MAGNIFY);
testGlobal(useROIAccessor,isBinary,bicubic2DIsabled,noDataRangeUsed
,roiPresent,InterpolationType.BICUBIC_INTERP,TestSelection.BINARY_ROI_ACCESSOR_NO_DATA,ScaleType.REDUCTION);
}
@Test
public void testInterpolationNoDataBleedByte() {
assertNoDataBleedByte(Interpolation.getInstance(Interpolation.INTERP_BICUBIC));
}
@Test
public void testInterpolationNoDataBleedShort() {
assertNoDataBleedShort(Interpolation.getInstance(Interpolation.INTERP_BICUBIC));
}
@Test
public void testInterpolationNoDataBleedFloat() {
assertNoDataBleedFloat(Interpolation.getInstance(Interpolation.INTERP_BICUBIC));
}
@Test
public void testInterpolationNoDataBleedDouble() {
assertNoDataBleedDouble(Interpolation.getInstance(Interpolation.INTERP_BICUBIC));
}
@Test
public void testInterpolateInHole() {
assertInterpolateInHole(Interpolation.getInstance(Interpolation.INTERP_BICUBIC));
}
@Test
public void testPackedImage() {
super.testPackedImage(InterpolationType.BICUBIC_INTERP);
}
@Test
public void testNoDataOutput() {
super.testNoDataOutput(InterpolationType.BICUBIC_INTERP);
}
@Test
public void testROILayout() {
testROILayout(Interpolation.INTERP_BICUBIC);
}
}
| apache-2.0 |
codeabovelab/haven-platform | cluster-manager/src/main/java/com/codeabovelab/dm/cluman/model/WithCluster.java | 840 | /*
* Copyright 2016 Code Above Lab LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.codeabovelab.dm.cluman.model;
/**
* Iface for objects with cluster name property. It needed for {@link com.codeabovelab.dm.cluman.cluster.filter.ClusterFilter }
*/
public interface WithCluster {
String getCluster();
}
| apache-2.0 |
hhclam/bazel | src/main/java/com/google/devtools/build/lib/rules/objc/ObjcCommon.java | 33238 | // Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.objc;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.ASSET_CATALOG;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.BREAKPAD_FILE;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.BUNDLE_FILE;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.BUNDLE_IMPORT_DIR;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.CC_LIBRARY;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.DEFINE;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.FLAG;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.FORCE_LOAD_FOR_XCODEGEN;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.FORCE_LOAD_LIBRARY;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.FRAMEWORK_DIR;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.FRAMEWORK_FILE;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.Flag.USES_CPP;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.Flag.USES_SWIFT;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.GENERAL_RESOURCE_DIR;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.GENERAL_RESOURCE_FILE;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.HEADER;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.IMPORTED_LIBRARY;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.INCLUDE;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.INCLUDE_SYSTEM;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.LIBRARY;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.LINKED_BINARY;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.LINKOPT;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.MODULE_MAP;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.SDK_DYLIB;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.SDK_FRAMEWORK;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.SOURCE;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.STORYBOARD;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.STRINGS;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.TOP_LEVEL_MODULE_MAP;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.WEAK_SDK_FRAMEWORK;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.XCASSETS_DIR;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.XCDATAMODEL;
import static com.google.devtools.build.lib.rules.objc.ObjcProvider.XIB;
import static com.google.devtools.build.lib.vfs.PathFragment.TO_PATH_FRAGMENT;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.UnmodifiableIterator;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.analysis.RuleConfiguredTarget.Mode;
import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.devtools.build.lib.analysis.TransitiveInfoCollection;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.packages.BuildType;
import com.google.devtools.build.lib.rules.apple.AppleToolchain;
import com.google.devtools.build.lib.rules.cpp.CcCommon;
import com.google.devtools.build.lib.rules.cpp.CcLinkParams;
import com.google.devtools.build.lib.rules.cpp.CcLinkParamsProvider;
import com.google.devtools.build.lib.rules.cpp.CppCompilationContext;
import com.google.devtools.build.lib.rules.cpp.CppModuleMap;
import com.google.devtools.build.lib.rules.cpp.CppRunfilesProvider;
import com.google.devtools.build.lib.syntax.Type;
import com.google.devtools.build.lib.util.FileType;
import com.google.devtools.build.lib.util.Pair;
import com.google.devtools.build.lib.util.Preconditions;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Contains information common to multiple objc_* rules, and provides a unified API for extracting
* and accessing it.
*/
// TODO(bazel-team): Decompose and subsume area-specific logic and data into the various *Support
// classes. Make sure to distinguish rule output (providers, runfiles, ...) from intermediate,
// rule-internal information. Any provider created by a rule should not be read, only published.
public final class ObjcCommon {
/**
* Provides a way to access attributes that are common to all compilation rules.
*/
// TODO(bazel-team): Delete and move into support-specific attributes classes once ObjcCommon is
// gone.
static final class CompilationAttributes {
private final RuleContext ruleContext;
private final ObjcSdkFrameworks.Attributes sdkFrameworkAttributes;
CompilationAttributes(RuleContext ruleContext) {
this.ruleContext = Preconditions.checkNotNull(ruleContext);
this.sdkFrameworkAttributes = new ObjcSdkFrameworks.Attributes(ruleContext);
}
ImmutableList<Artifact> hdrs() {
// Some rules may compile but not have the "hdrs" attribute.
if (!ruleContext.attributes().has("hdrs", BuildType.LABEL_LIST)) {
return ImmutableList.of();
}
ImmutableList.Builder<Artifact> headers = ImmutableList.builder();
for (Pair<Artifact, Label> header : CcCommon.getHeaders(ruleContext)) {
headers.add(header.first);
}
return headers.build();
}
/**
* Returns headers that cannot be compiled individually.
*/
ImmutableList<Artifact> textualHdrs() {
// Some rules may compile but not have the "textual_hdrs" attribute.
if (!ruleContext.attributes().has("textual_hdrs", BuildType.LABEL_LIST)) {
return ImmutableList.of();
}
return ruleContext.getPrerequisiteArtifacts("textual_hdrs", Mode.TARGET).list();
}
Optional<Artifact> bridgingHeader() {
Artifact header = ruleContext.getPrerequisiteArtifact("bridging_header", Mode.TARGET);
return Optional.fromNullable(header);
}
Iterable<PathFragment> includes() {
return Iterables.transform(
ruleContext.attributes().get("includes", Type.STRING_LIST),
PathFragment.TO_PATH_FRAGMENT);
}
Iterable<PathFragment> sdkIncludes() {
return Iterables.transform(
ruleContext.attributes().get("sdk_includes", Type.STRING_LIST),
PathFragment.TO_PATH_FRAGMENT);
}
/**
* Returns the value of the sdk_frameworks attribute plus frameworks that are included
* automatically.
*/
ImmutableSet<SdkFramework> sdkFrameworks() {
return sdkFrameworkAttributes.sdkFrameworks();
}
/**
* Returns the value of the weak_sdk_frameworks attribute.
*/
ImmutableSet<SdkFramework> weakSdkFrameworks() {
return sdkFrameworkAttributes.weakSdkFrameworks();
}
/**
* Returns the value of the sdk_dylibs attribute.
*/
ImmutableSet<String> sdkDylibs() {
return sdkFrameworkAttributes.sdkDylibs();
}
/**
* Returns the exec paths of all header search paths that should be added to this target and
* dependers on this target, obtained from the {@code includes} attribute.
*/
ImmutableList<PathFragment> headerSearchPaths() {
ImmutableList.Builder<PathFragment> paths = new ImmutableList.Builder<>();
PathFragment packageFragment =
ruleContext.getLabel().getPackageIdentifier().getPathFragment();
List<PathFragment> rootFragments = ImmutableList.of(
packageFragment,
ruleContext.getConfiguration().getGenfilesFragment().getRelative(packageFragment));
Iterable<PathFragment> relativeIncludes =
Iterables.filter(includes(), Predicates.not(PathFragment.IS_ABSOLUTE));
for (PathFragment include : relativeIncludes) {
for (PathFragment rootFragment : rootFragments) {
paths.add(rootFragment.getRelative(include).normalize());
}
}
return paths.build();
}
/**
* Returns any values specified in this rule's {@code copts} attribute or an empty list if the
* attribute does not exist or no values are specified.
*/
public Iterable<String> copts() {
if (!ruleContext.attributes().has("copts", Type.STRING_LIST)) {
return ImmutableList.of();
}
return ruleContext.getTokenizedStringListAttr("copts");
}
/**
* Returns any {@code copts} defined on an {@code objc_options} rule that is a dependency of
* this rule.
*/
public Iterable<String> optionsCopts() {
if (!ruleContext.attributes().has("options", BuildType.LABEL)) {
return ImmutableList.of();
}
OptionsProvider optionsProvider =
ruleContext.getPrerequisite("options", Mode.TARGET, OptionsProvider.class);
if (optionsProvider == null) {
return ImmutableList.of();
}
return optionsProvider.getCopts();
}
/**
* The clang module maps of direct dependencies of this rule. These are needed to generate
* this rule's module map.
*/
public List<CppModuleMap> moduleMapsForDirectDeps() {
// Make sure all dependencies that have headers are included here. If a module map is missing,
// its private headers will be treated as public!
ArrayList<CppModuleMap> moduleMaps = new ArrayList<>();
collectModuleMapsFromAttributeIfExists(moduleMaps, "deps");
collectModuleMapsFromAttributeIfExists(moduleMaps, "non_propagated_deps");
return moduleMaps;
}
/**
* Collects all module maps from the targets in a certain attribute and adds them into
* {@code moduleMaps}.
*
* @param moduleMaps an {@link ArrayList} to collect the module maps into
* @param attribute the name of a label list attribute to collect module maps from
*/
private void collectModuleMapsFromAttributeIfExists(
ArrayList<CppModuleMap> moduleMaps, String attribute) {
if (ruleContext.attributes().has(attribute, BuildType.LABEL_LIST)) {
Iterable<ObjcProvider> providers =
ruleContext.getPrerequisites(attribute, Mode.TARGET, ObjcProvider.class);
for (ObjcProvider provider : providers) {
moduleMaps.addAll(provider.get(TOP_LEVEL_MODULE_MAP).toCollection());
}
}
}
/**
* Returns whether this target uses language features that require clang modules, such as
* {@literal @}import.
*/
public boolean enableModules() {
return ruleContext.attributes().has("enable_modules", Type.BOOLEAN)
&& ruleContext.attributes().get("enable_modules", Type.BOOLEAN);
}
}
/**
* Provides a way to access attributes that are common to all resources rules.
*/
// TODO(bazel-team): Delete and move into support-specific attributes classes once ObjcCommon is
// gone.
static final class ResourceAttributes {
private final RuleContext ruleContext;
ResourceAttributes(RuleContext ruleContext) {
this.ruleContext = ruleContext;
}
ImmutableList<Artifact> strings() {
return ruleContext.getPrerequisiteArtifacts("strings", Mode.TARGET).list();
}
ImmutableList<Artifact> xibs() {
return ruleContext.getPrerequisiteArtifacts("xibs", Mode.TARGET).list();
}
ImmutableList<Artifact> storyboards() {
return ruleContext.getPrerequisiteArtifacts("storyboards", Mode.TARGET).list();
}
ImmutableList<Artifact> resources() {
return ruleContext.getPrerequisiteArtifacts("resources", Mode.TARGET).list();
}
ImmutableList<Artifact> structuredResources() {
return ruleContext.getPrerequisiteArtifacts("structured_resources", Mode.TARGET).list();
}
ImmutableList<Artifact> datamodels() {
return ruleContext.getPrerequisiteArtifacts("datamodels", Mode.TARGET).list();
}
ImmutableList<Artifact> assetCatalogs() {
return ruleContext.getPrerequisiteArtifacts("asset_catalogs", Mode.TARGET).list();
}
}
static class Builder {
private RuleContext context;
private Optional<CompilationAttributes> compilationAttributes = Optional.absent();
private Optional<ResourceAttributes> resourceAttributes = Optional.absent();
private Iterable<SdkFramework> extraSdkFrameworks = ImmutableList.of();
private Iterable<SdkFramework> extraWeakSdkFrameworks = ImmutableList.of();
private Iterable<String> extraSdkDylibs = ImmutableList.of();
private Iterable<Artifact> frameworkImports = ImmutableList.of();
private Optional<CompilationArtifacts> compilationArtifacts = Optional.absent();
private Iterable<ObjcProvider> depObjcProviders = ImmutableList.of();
private Iterable<ObjcProvider> directDepObjcProviders = ImmutableList.of();
private Iterable<String> defines = ImmutableList.of();
private Iterable<PathFragment> userHeaderSearchPaths = ImmutableList.of();
private IntermediateArtifacts intermediateArtifacts;
private boolean alwayslink;
private boolean hasModuleMap;
private Iterable<Artifact> extraImportLibraries = ImmutableList.of();
private Optional<Artifact> linkedBinary = Optional.absent();
private Optional<Artifact> breakpadFile = Optional.absent();
private Iterable<CppCompilationContext> depCcHeaderProviders = ImmutableList.of();
private Iterable<CcLinkParamsProvider> depCcLinkProviders = ImmutableList.of();
Builder(RuleContext context) {
this.context = Preconditions.checkNotNull(context);
}
public Builder setCompilationAttributes(CompilationAttributes baseCompilationAttributes) {
Preconditions.checkState(!this.compilationAttributes.isPresent(),
"compilationAttributes is already set to: %s", this.compilationAttributes);
this.compilationAttributes = Optional.of(baseCompilationAttributes);
return this;
}
public Builder setResourceAttributes(ResourceAttributes baseResourceAttributes) {
Preconditions.checkState(!this.resourceAttributes.isPresent(),
"resourceAttributes is already set to: %s", this.resourceAttributes);
this.resourceAttributes = Optional.of(baseResourceAttributes);
return this;
}
Builder addExtraSdkFrameworks(Iterable<SdkFramework> extraSdkFrameworks) {
this.extraSdkFrameworks = Iterables.concat(this.extraSdkFrameworks, extraSdkFrameworks);
return this;
}
Builder addExtraWeakSdkFrameworks(Iterable<SdkFramework> extraWeakSdkFrameworks) {
this.extraWeakSdkFrameworks =
Iterables.concat(this.extraWeakSdkFrameworks, extraWeakSdkFrameworks);
return this;
}
Builder addExtraSdkDylibs(Iterable<String> extraSdkDylibs) {
this.extraSdkDylibs = Iterables.concat(this.extraSdkDylibs, extraSdkDylibs);
return this;
}
Builder addFrameworkImports(Iterable<Artifact> frameworkImports) {
this.frameworkImports = Iterables.concat(this.frameworkImports, frameworkImports);
return this;
}
Builder setCompilationArtifacts(CompilationArtifacts compilationArtifacts) {
Preconditions.checkState(!this.compilationArtifacts.isPresent(),
"compilationArtifacts is already set to: %s", this.compilationArtifacts);
this.compilationArtifacts = Optional.of(compilationArtifacts);
return this;
}
/**
* Add providers which will be exposed both to the declaring rule and to any dependers on the
* declaring rule.
*/
Builder addDepObjcProviders(Iterable<ObjcProvider> depObjcProviders) {
this.depObjcProviders = Iterables.concat(this.depObjcProviders, depObjcProviders);
return this;
}
/**
* Add providers which will only be used by the declaring rule, and won't be propagated to any
* dependers on the declaring rule.
*/
Builder addNonPropagatedDepObjcProviders(Iterable<ObjcProvider> directDepObjcProviders) {
this.directDepObjcProviders = Iterables.concat(
this.directDepObjcProviders, directDepObjcProviders);
return this;
}
public Builder addUserHeaderSearchPaths(Iterable<PathFragment> userHeaderSearchPaths) {
this.userHeaderSearchPaths =
Iterables.concat(this.userHeaderSearchPaths, userHeaderSearchPaths);
return this;
}
public Builder addDefines(Iterable<String> defines) {
this.defines = Iterables.concat(this.defines, defines);
return this;
}
Builder setIntermediateArtifacts(IntermediateArtifacts intermediateArtifacts) {
this.intermediateArtifacts = intermediateArtifacts;
return this;
}
Builder setAlwayslink(boolean alwayslink) {
this.alwayslink = alwayslink;
return this;
}
/**
* Specifies that this target has a clang module map. This should be called if this target
* compiles sources or exposes headers for other targets to use. Note that this does not add
* the action to generate the module map. It simply indicates that it should be added to the
* provider.
*/
Builder setHasModuleMap() {
this.hasModuleMap = true;
return this;
}
/**
* Adds additional static libraries to be linked into the final ObjC application bundle.
*/
Builder addExtraImportLibraries(Iterable<Artifact> extraImportLibraries) {
this.extraImportLibraries = Iterables.concat(this.extraImportLibraries, extraImportLibraries);
return this;
}
/**
* Sets a linked binary generated by this rule to be propagated to dependers.
*/
Builder setLinkedBinary(Artifact linkedBinary) {
this.linkedBinary = Optional.of(linkedBinary);
return this;
}
/**
* Sets a breakpad file (used by the breakpad crash reporting system) generated by this rule to
* be propagated to dependers.
*/
Builder setBreakpadFile(Artifact breakpadFile) {
this.breakpadFile = Optional.of(breakpadFile);
return this;
}
/**
* Sets information from {@code cc_library} dependencies to be used during compilation.
*/
public Builder addDepCcHeaderProviders(Iterable<CppCompilationContext> depCcHeaderProviders) {
this.depCcHeaderProviders = Iterables.concat(this.depCcHeaderProviders, depCcHeaderProviders);
return this;
}
/**
* Sets information from {@code cc_library} dependencies to be used during linking.
*/
public Builder addDepCcLinkProviders(RuleContext ruleContext) {
for (TransitiveInfoCollection dep : ruleContext.getPrerequisites("deps", Mode.TARGET)) {
// Hack to determine if dep is a cc target. Required so objc_library archives packed in
// CcLinkParamsProvider do not get consumed as cc targets.
if (dep.getProvider(CppRunfilesProvider.class) != null) {
CcLinkParamsProvider ccLinkParamsProvider = dep.getProvider(CcLinkParamsProvider.class);
this.depCcLinkProviders =
Iterables.concat(
this.depCcLinkProviders,
ImmutableList.<CcLinkParamsProvider>of(ccLinkParamsProvider));
}
}
return this;
}
ObjcCommon build() {
Iterable<BundleableFile> bundleImports = BundleableFile.bundleImportsFromRule(context);
ObjcProvider.Builder objcProvider = new ObjcProvider.Builder()
.addAll(IMPORTED_LIBRARY, extraImportLibraries)
.addAll(BUNDLE_FILE, bundleImports)
.addAll(BUNDLE_IMPORT_DIR,
uniqueContainers(BundleableFile.toArtifacts(bundleImports), BUNDLE_CONTAINER_TYPE))
.addAll(SDK_FRAMEWORK, extraSdkFrameworks)
.addAll(WEAK_SDK_FRAMEWORK, extraWeakSdkFrameworks)
.addAll(SDK_DYLIB, extraSdkDylibs)
.addAll(FRAMEWORK_FILE, frameworkImports)
.addAll(FRAMEWORK_DIR, uniqueContainers(frameworkImports, FRAMEWORK_CONTAINER_TYPE))
.addAll(INCLUDE, userHeaderSearchPaths)
.addAll(DEFINE, defines)
.addTransitiveAndPropagate(depObjcProviders)
.addTransitiveWithoutPropagating(directDepObjcProviders);
for (CppCompilationContext headerProvider : depCcHeaderProviders) {
objcProvider.addTransitiveAndPropagate(HEADER, headerProvider.getDeclaredIncludeSrcs());
objcProvider.addAll(INCLUDE, headerProvider.getIncludeDirs());
// TODO(bazel-team): This pulls in stl via CppHelper.mergeToolchainDependentContext but
// probably shouldn't.
objcProvider.addAll(INCLUDE_SYSTEM, headerProvider.getSystemIncludeDirs());
objcProvider.addAll(DEFINE, headerProvider.getDefines());
}
for (CcLinkParamsProvider linkProvider : depCcLinkProviders) {
CcLinkParams params = linkProvider.getCcLinkParams(true, false);
ImmutableList<String> linkOpts = params.flattenedLinkopts();
ImmutableSet.Builder<SdkFramework> frameworkLinkOpts = new ImmutableSet.Builder<>();
ImmutableList.Builder<String> nonFrameworkLinkOpts = new ImmutableList.Builder<>();
// Add any framework flags as frameworks directly, rather than as linkopts.
for (UnmodifiableIterator<String> iterator = linkOpts.iterator(); iterator.hasNext(); ) {
String arg = iterator.next();
if (arg.equals("-framework") && iterator.hasNext()) {
String framework = iterator.next();
frameworkLinkOpts.add(new SdkFramework(framework));
} else {
nonFrameworkLinkOpts.add(arg);
}
}
objcProvider
.addAll(SDK_FRAMEWORK, frameworkLinkOpts.build())
.addAll(LINKOPT, nonFrameworkLinkOpts.build())
.addTransitiveAndPropagate(CC_LIBRARY, params.getLibraries());
}
if (compilationAttributes.isPresent()) {
CompilationAttributes attributes = compilationAttributes.get();
Iterable<PathFragment> sdkIncludes = Iterables.transform(
Interspersing.prependEach(
AppleToolchain.sdkDir() + "/usr/include/",
PathFragment.safePathStrings(attributes.sdkIncludes())),
TO_PATH_FRAGMENT);
objcProvider
.addAll(HEADER, attributes.hdrs())
.addAll(HEADER, attributes.textualHdrs())
.addAll(INCLUDE, attributes.headerSearchPaths())
.addAll(INCLUDE, sdkIncludes)
.addAll(SDK_FRAMEWORK, attributes.sdkFrameworks())
.addAll(WEAK_SDK_FRAMEWORK, attributes.weakSdkFrameworks())
.addAll(SDK_DYLIB, attributes.sdkDylibs());
}
if (resourceAttributes.isPresent()) {
ResourceAttributes attributes = resourceAttributes.get();
objcProvider
.addAll(GENERAL_RESOURCE_FILE, attributes.storyboards())
.addAll(GENERAL_RESOURCE_FILE, attributes.resources())
.addAll(GENERAL_RESOURCE_FILE, attributes.strings())
.addAll(GENERAL_RESOURCE_FILE, attributes.xibs())
.addAll(
GENERAL_RESOURCE_DIR, xcodeStructuredResourceDirs(attributes.structuredResources()))
.addAll(BUNDLE_FILE, BundleableFile.flattenedRawResourceFiles(attributes.resources()))
.addAll(
BUNDLE_FILE,
BundleableFile.structuredRawResourceFiles(attributes.structuredResources()))
.addAll(
XCASSETS_DIR,
uniqueContainers(attributes.assetCatalogs(), ASSET_CATALOG_CONTAINER_TYPE))
.addAll(ASSET_CATALOG, attributes.assetCatalogs())
.addAll(XCDATAMODEL, attributes.datamodels())
.addAll(XIB, attributes.xibs())
.addAll(STRINGS, attributes.strings())
.addAll(STORYBOARD, attributes.storyboards());
}
if (ObjcRuleClasses.useLaunchStoryboard(context)) {
Artifact launchStoryboard =
context.getPrerequisiteArtifact("launch_storyboard", Mode.TARGET);
objcProvider.add(GENERAL_RESOURCE_FILE, launchStoryboard);
if (ObjcRuleClasses.STORYBOARD_TYPE.matches(launchStoryboard.getPath())) {
objcProvider.add(STORYBOARD, launchStoryboard);
} else {
objcProvider.add(XIB, launchStoryboard);
}
}
for (CompilationArtifacts artifacts : compilationArtifacts.asSet()) {
Iterable<Artifact> allSources =
Iterables.concat(artifacts.getSrcs(), artifacts.getNonArcSrcs());
// TODO(bazel-team): Add private headers to the provider when we have module maps to enforce
// them.
objcProvider
.addAll(HEADER, artifacts.getAdditionalHdrs())
.addAll(LIBRARY, artifacts.getArchive().asSet())
.addAll(SOURCE, allSources);
boolean usesCpp = false;
boolean usesSwift = false;
for (Artifact sourceFile :
Iterables.concat(artifacts.getSrcs(), artifacts.getNonArcSrcs())) {
usesCpp = usesCpp || ObjcRuleClasses.CPP_SOURCES.matches(sourceFile.getExecPath());
usesSwift = usesSwift || ObjcRuleClasses.SWIFT_SOURCES.matches(sourceFile.getExecPath());
}
if (usesCpp) {
objcProvider.add(FLAG, USES_CPP);
}
if (usesSwift) {
objcProvider.add(FLAG, USES_SWIFT);
}
}
if (alwayslink) {
for (CompilationArtifacts artifacts : compilationArtifacts.asSet()) {
for (Artifact archive : artifacts.getArchive().asSet()) {
objcProvider.add(FORCE_LOAD_LIBRARY, archive);
objcProvider.add(FORCE_LOAD_FOR_XCODEGEN, String.format(
"$(BUILT_PRODUCTS_DIR)/lib%s.a",
XcodeProvider.xcodeTargetName(context.getLabel())));
}
}
for (Artifact archive : extraImportLibraries) {
objcProvider.add(FORCE_LOAD_LIBRARY, archive);
objcProvider.add(FORCE_LOAD_FOR_XCODEGEN,
"$(WORKSPACE_ROOT)/" + archive.getExecPath().getSafePathString());
}
}
if (hasModuleMap && ObjcRuleClasses.objcConfiguration(context).moduleMapsEnabled()) {
CppModuleMap moduleMap = intermediateArtifacts.moduleMap();
objcProvider.add(MODULE_MAP, moduleMap.getArtifact());
objcProvider.add(TOP_LEVEL_MODULE_MAP, moduleMap);
}
objcProvider.addAll(LINKED_BINARY, linkedBinary.asSet())
.addAll(BREAKPAD_FILE, breakpadFile.asSet());
return new ObjcCommon(objcProvider.build(), compilationArtifacts);
}
}
static final FileType BUNDLE_CONTAINER_TYPE = FileType.of(".bundle");
static final FileType ASSET_CATALOG_CONTAINER_TYPE = FileType.of(".xcassets");
public static final FileType FRAMEWORK_CONTAINER_TYPE = FileType.of(".framework");
private final ObjcProvider objcProvider;
private final Optional<CompilationArtifacts> compilationArtifacts;
private ObjcCommon(
ObjcProvider objcProvider,
Optional<CompilationArtifacts> compilationArtifacts) {
this.objcProvider = Preconditions.checkNotNull(objcProvider);
this.compilationArtifacts = Preconditions.checkNotNull(compilationArtifacts);
}
public ObjcProvider getObjcProvider() {
return objcProvider;
}
public Optional<CompilationArtifacts> getCompilationArtifacts() {
return compilationArtifacts;
}
/**
* Returns an {@link Optional} containing the compiled {@code .a} file, or
* {@link Optional#absent()} if this object contains no {@link CompilationArtifacts} or the
* compilation information has no sources.
*/
public Optional<Artifact> getCompiledArchive() {
if (compilationArtifacts.isPresent()) {
return compilationArtifacts.get().getArchive();
}
return Optional.absent();
}
static ImmutableList<PathFragment> userHeaderSearchPaths(BuildConfiguration configuration) {
return ImmutableList.of(
new PathFragment("."),
configuration.getGenfilesFragment());
}
/**
* Returns the first directory in the sequence of parents of the exec path of the given artifact
* that matches {@code type}. For instance, if {@code type} is FileType.of(".foo") and the exec
* path of {@code artifact} is {@code a/b/c/bar.foo/d/e}, then the return value is
* {@code a/b/c/bar.foo}.
*/
static Optional<PathFragment> nearestContainerMatching(FileType type, Artifact artifact) {
PathFragment container = artifact.getExecPath();
do {
if (type.matches(container)) {
return Optional.of(container);
}
container = container.getParentDirectory();
} while (container != null);
return Optional.absent();
}
/**
* Similar to {@link #nearestContainerMatching(FileType, Artifact)}, but tries matching several
* file types in {@code types}, and returns a path for the first match in the sequence.
*/
static Optional<PathFragment> nearestContainerMatching(
Iterable<FileType> types, Artifact artifact) {
for (FileType type : types) {
for (PathFragment container : nearestContainerMatching(type, artifact).asSet()) {
return Optional.of(container);
}
}
return Optional.absent();
}
/**
* Returns all directories matching {@code containerType} that contain the items in
* {@code artifacts}. This function ignores artifacts that are not in any directory matching
* {@code containerType}.
*/
static Iterable<PathFragment> uniqueContainers(
Iterable<Artifact> artifacts, FileType containerType) {
ImmutableSet.Builder<PathFragment> containers = new ImmutableSet.Builder<>();
for (Artifact artifact : artifacts) {
containers.addAll(ObjcCommon.nearestContainerMatching(containerType, artifact).asSet());
}
return containers.build();
}
/**
* Returns the Xcode structured resource directory paths.
*
* <p>For a checked-in source artifact "//a/b/res/sub_dir/d" included by objc rule "//a/b:c",
* "a/b/res" will be returned. For a generated source artifact "res/sub_dir/d" owned by genrule
* "//a/b:c", "bazel-out/.../genfiles/a/b/res" will be returned.
*
* <p>When XCode sees a included resource directory of "a/b/res", the entire directory structure
* up to "res" will be copied into the app bundle.
*/
private static Iterable<PathFragment> xcodeStructuredResourceDirs(Iterable<Artifact> artifacts) {
ImmutableSet.Builder<PathFragment> containers = new ImmutableSet.Builder<>();
for (Artifact artifact : artifacts) {
PathFragment ownerRuleDirectory = artifact.getArtifactOwner().getLabel().getPackageFragment();
String containerName =
artifact.getRootRelativePath().relativeTo(ownerRuleDirectory).getSegment(0);
PathFragment rootExecPath = artifact.getRoot().getExecPath();
containers.add(rootExecPath.getRelative(ownerRuleDirectory.getRelative(containerName)));
}
return containers.build();
}
/**
* Similar to {@link #nearestContainerMatching(FileType, Artifact)}, but returns the container
* closest to the root that matches the given type.
*/
static Optional<PathFragment> farthestContainerMatching(FileType type, Artifact artifact) {
PathFragment container = artifact.getExecPath();
Optional<PathFragment> lastMatch = Optional.absent();
do {
if (type.matches(container)) {
lastMatch = Optional.of(container);
}
container = container.getParentDirectory();
} while (container != null);
return lastMatch;
}
static Iterable<String> notInContainerErrors(
Iterable<Artifact> artifacts, FileType containerType) {
return notInContainerErrors(artifacts, ImmutableList.of(containerType));
}
@VisibleForTesting
static final String NOT_IN_CONTAINER_ERROR_FORMAT =
"File '%s' is not in a directory of one of these type(s): %s";
static Iterable<String> notInContainerErrors(
Iterable<Artifact> artifacts, Iterable<FileType> containerTypes) {
Set<String> errors = new HashSet<>();
for (Artifact artifact : artifacts) {
boolean inContainer = nearestContainerMatching(containerTypes, artifact).isPresent();
if (!inContainer) {
errors.add(String.format(NOT_IN_CONTAINER_ERROR_FORMAT,
artifact.getExecPath(), Iterables.toString(containerTypes)));
}
}
return errors;
}
}
| apache-2.0 |
OpenBEL/openbel-framework | org.openbel.framework.common/src/main/java/org/openbel/framework/common/lang/ComplexAbundance.java | 3057 | /**
* Copyright (C) 2012-2013 Selventa, Inc.
*
* This file is part of the OpenBEL Framework.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The OpenBEL Framework is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with the OpenBEL Framework. If not, see <http://www.gnu.org/licenses/>.
*
* Additional Terms under LGPL v3:
*
* This license does not authorize you and you are prohibited from using the
* name, trademarks, service marks, logos or similar indicia of Selventa, Inc.,
* or, in the discretion of other licensors or authors of the program, the
* name, trademarks, service marks, logos or similar indicia of such authors or
* licensors, in any marketing or advertising materials relating to your
* distribution of the program or any covered product. This restriction does
* not waive or limit your obligation to keep intact all copyright notices set
* forth in the program as delivered to you.
*
* If you distribute the program in whole or in part, or any modified version
* of the program, and you assume contractual liability to the recipient with
* respect to the program or modified version, then you will indemnify the
* authors and licensors of the program for any liabilities that these
* contractual assumptions directly impose on those licensors and authors.
*/
package org.openbel.framework.common.lang;
import org.openbel.framework.common.Strings;
/**
* Denotes the abundance of a molecular complex.
* <p>
* Function {@link Signature signature(s)}:
*
* <pre>
* complexAbundance(E:abundance)abundance
* complexAbundance(F:abundance...)abundance
* </pre>
*
* </p>
*
* @see Signature
*/
public class ComplexAbundance extends Function {
/**
* {@link Strings#COMPLEX_ABUNDANCE}
*/
public final static String NAME;
/**
* {@link Strings#COMPLEX_ABUNDANCE_ABBREV}
*/
public final static String ABBREVIATION;
/**
* Function description.
*/
public final static String DESC;
static {
NAME = Strings.COMPLEX_ABUNDANCE;
ABBREVIATION = Strings.COMPLEX_ABUNDANCE_ABBREV;
DESC = "Denotes the abundance of a molecular complex";
}
public ComplexAbundance() {
super(NAME, ABBREVIATION, DESC,
"complexAbundance(E:abundance)complexAbundance",
"complexAbundance(F:abundance...)complexAbundance");
}
/**
* {@inheritDoc}
*/
@Override
public boolean validArgumentCount(int count) {
if (count > 0) {
return true;
}
return false;
}
}
| apache-2.0 |
winval/druid | extensions-core/hdfs-storage/src/main/java/org/apache/hadoop/fs/HadoopFsWrapper.java | 2241 | /*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hadoop.fs;
import io.druid.java.util.common.logger.Logger;
import java.io.IOException;
/**
* This wrapper class is created to be able to access some of the the "protected" methods inside Hadoop's
* FileSystem class. Those are supposed to become public eventually or more appropriate alternatives would be
* provided.
* This is a hack and should be removed when no longer necessary.
*/
public class HadoopFsWrapper
{
private static final Logger log = new Logger(HadoopFsWrapper.class);
private HadoopFsWrapper() {}
/**
* Same as FileSystem.rename(from, to, Options.Rename). It is different from FileSystem.rename(from, to) which moves
* "from" directory inside "to" directory if it already exists.
*
* @param from
* @param to
* @param replaceExisting if existing files should be overwritten
*
* @return true if operation succeeded, false if replaceExisting == false and destination already exists
*
* @throws IOException if trying to overwrite a non-empty directory
*/
public static boolean rename(FileSystem fs, Path from, Path to, boolean replaceExisting) throws IOException
{
try {
fs.rename(from, to, replaceExisting ? Options.Rename.OVERWRITE : Options.Rename.NONE);
return true;
}
catch (FileAlreadyExistsException ex) {
log.info(ex, "Destination exists while renaming [%s] to [%s]", from, to);
return false;
}
}
}
| apache-2.0 |
userKarl/sctd | src/main/java/com/sc/td/common/persistence/BaseEntity.java | 1560 | package com.sc.td.common.persistence;
import javax.persistence.MappedSuperclass;
import org.joda.time.DateTime;
import com.sc.td.common.utils.StringUtils;
import com.sc.td.common.utils.datetime.TimeUtil;
@MappedSuperclass
public abstract class BaseEntity {
private String createBy; // 创建者
private String createDate; // 创建日期
private String updateBy; // 更新者
private String updateDate; // 更新日期
public String getCreateBy() {
return createBy;
}
public void setCreateBy(String createBy) {
this.createBy = createBy;
}
public String getCreateDate() {
return (StringUtils.isBlank(createDate)) ? createDate : createDate.replace(".0", "");
}
public void setCreateDate(String createDate) {
this.createDate = createDate;
}
public String getUpdateBy() {
return updateBy;
}
public void setUpdateBy(String updateBy) {
this.updateBy = updateBy;
}
public String getUpdateDate() {
return (StringUtils.isBlank(updateDate)) ? updateDate : updateDate.replace(".0", "");
}
public void setUpdateDate(String updateDate) {
this.updateDate = updateDate;
}
public void setInitValue(BaseEntity t, String userId) {
t.setCreateBy(userId);
t.setCreateDate(TimeUtil.dateTime2Str(DateTime.now(), TimeUtil.DSPdaytimeFormat));
t.setUpdateBy(userId);
t.setUpdateDate(TimeUtil.dateTime2Str(DateTime.now(), TimeUtil.DSPdaytimeFormat));
}
public void setUpdateValue(BaseEntity t, String userId) {
t.setUpdateBy(userId);
t.setUpdateDate(TimeUtil.dateTime2Str(DateTime.now(), TimeUtil.DSPdaytimeFormat));
}
}
| apache-2.0 |
StarMade/SMEditClassic | JoFileLibrary/src/jo/vecmath/Point3s.java | 2697 | /*
* $RCSfile: Point3i.java,v $
*
* Copyright 1999-2008 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Sun designates this
* particular file as subject to the "Classpath" exception as provided
* by Sun in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
* CA 95054 USA or visit www.sun.com if you need additional information or
* have any questions.
*
* $Revision: 1.6 $
* $Date: 2008/02/28 20:18:50 $
* $State: Exp $
*/
package jo.vecmath;
/**
* A 3 element point represented by signed integer x,y,z
* coordinates.
*
* @since vecmath 1.2
*/
public class Point3s extends Tuple3s implements java.io.Serializable {
// Compatible with 1.2
static final long serialVersionUID = 6149289077348153921L;
/**
* Constructs and initializes a Point3i from the specified
* x, y, and z coordinates.
* @param x the x coordinate
* @param y the y coordinate
* @param z the z coordinate
*/
public Point3s(short x, short y, short z) {
super(x, y, z);
}
/**
* Constructs and initializes a Point3i from the array of length 3.
* @param t the array of length 3 containing x, y, and z in order.
*/
public Point3s(short[] t) {
super(t);
}
/**
* Constructs and initializes a Point3i from the specified Tuple3i.
* @param t1 the Tuple3i containing the initialization x, y, and z
* data.
*/
public Point3s(Tuple3s t1) {
super(t1);
}
/**
* Constructs and initializes a Point3i from the specified Tuple3i.
* @param t1 the Tuple3i containing the initialization x, y, and z
* data.
*/
public Point3s(Tuple3i t1) {
super(t1);
}
/**
* Constructs and initializes a Point3i to (0,0,0).
*/
public Point3s() {
super();
}
} | apache-2.0 |
gerardnico/java-demo | src/test/java/Java/Generic/Column/ColumnUtilities.java | 309 | package Java.Generic.Column;
public class ColumnUtilities {
static public <T> T getMax(Column<T> column) {
T value;
if (column.getMyType().equals(String.class)){
value = (T) "3";
} else {
value = (T) ((Integer) 3);
}
return value;
}
} | apache-2.0 |
leonardvandriel/Jorn-Sound | src/peen/jornsound/main/Main.java | 185 | package peen.jornsound.main;
import peen.jornsound.graphics.Frame;
public class Main {
public static void main(String[] args) throws Exception {
new Frame().setVisible(true);
}
}
| apache-2.0 |
ldesgrange/pwad | src/test/java/net/desgrange/pwad/utils/BlockingAnswer.java | 1039 | /**
*
* Copyright 2010-2012 Laurent Desgrange
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package net.desgrange.pwad.utils;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
public class BlockingAnswer implements Answer<Void> {
private boolean blocking = true;
@Override
public Void answer(final InvocationOnMock invocation) throws Throwable {
while (blocking) {
Thread.sleep(50);
}
return null;
}
public void unblock() {
blocking = false;
}
}
| apache-2.0 |
orientechnologies/orientdb | jdbc/src/test/java/com/orientechnologies/orient/jdbc/OrientJdbcDatabaseMetaDataTest.java | 9240 | /**
* Copyright 2010-2016 OrientDB LTD (http://orientdb.com)
*
* <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*
* <p>For more information: http://orientdb.com
*/
package com.orientechnologies.orient.jdbc;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import com.orientechnologies.orient.core.OConstants;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
public class OrientJdbcDatabaseMetaDataTest extends OrientJdbcDbPerClassTemplateTest {
private DatabaseMetaData metaData;
@Before
public void setup() throws SQLException {
metaData = conn.getMetaData();
}
@Test
public void verifyDriverAndDatabaseVersions() throws SQLException {
// assertEquals("memory:" + name.getMethodName(), metaData.getURL());
assertEquals("admin", metaData.getUserName());
assertEquals("OrientDB", metaData.getDatabaseProductName());
assertEquals(OConstants.getVersion(), metaData.getDatabaseProductVersion());
assertEquals(3, metaData.getDatabaseMajorVersion());
assertEquals(2, metaData.getDatabaseMinorVersion());
assertEquals("OrientDB JDBC Driver", metaData.getDriverName());
assertEquals(
"OrientDB " + OConstants.getVersion() + " JDBC Driver", metaData.getDriverVersion());
assertEquals(3, metaData.getDriverMajorVersion());
assertEquals(2, metaData.getDriverMinorVersion());
}
@Test
public void shouldRetrievePrimaryKeysMetadata() throws SQLException {
ResultSet primaryKeys = metaData.getPrimaryKeys(null, null, "Item");
assertTrue(primaryKeys.next());
assertEquals("intKey", primaryKeys.getString(4));
assertEquals("Item.intKey", primaryKeys.getString(6));
assertEquals(1, primaryKeys.getInt(5));
assertTrue(primaryKeys.next());
assertEquals("stringKey", primaryKeys.getString("COLUMN_NAME"));
assertEquals("Item.stringKey", primaryKeys.getString("PK_NAME"));
assertEquals(1, primaryKeys.getInt("KEY_SEQ"));
}
@Test
public void shouldRetrieveTableTypes() throws SQLException {
ResultSet tableTypes = metaData.getTableTypes();
// Assertions.
assertTrue(tableTypes.next());
assertEquals("TABLE", tableTypes.getString(1));
assertTrue(tableTypes.next());
assertEquals("SYSTEM TABLE", tableTypes.getString(1));
assertFalse(tableTypes.next());
}
@Test
public void shouldRetrieveKeywords() throws SQLException {
final String keywordsStr = metaData.getSQLKeywords();
assertNotNull(keywordsStr);
assertThat(Arrays.asList(keywordsStr.toUpperCase(Locale.ENGLISH).split(",\\s*")))
.contains("TRAVERSE");
}
@Test
public void shouldRetrieveUniqueIndexInfoForTable() throws Exception {
ResultSet indexInfo =
metaData.getIndexInfo(
"OrientJdbcDatabaseMetaDataTest",
"OrientJdbcDatabaseMetaDataTest",
"Item",
true,
false);
indexInfo.next();
assertThat(indexInfo.getString("INDEX_NAME")).isEqualTo("Item.intKey");
assertThat(indexInfo.getBoolean("NON_UNIQUE")).isFalse();
indexInfo.next();
assertThat(indexInfo.getString("INDEX_NAME")).isEqualTo("Item.stringKey");
assertThat(indexInfo.getBoolean("NON_UNIQUE")).isFalse();
}
@Test
public void getFields() throws SQLException {
ResultSet rs = conn.createStatement().executeQuery("select from OUser");
ResultSetMetaData rsMetaData = rs.getMetaData();
int cc = rsMetaData.getColumnCount();
Set<String> colset = new HashSet<>();
List<Map<String, Object>> columns = new ArrayList<>(cc);
for (int i = 1; i <= cc; i++) {
String name = rsMetaData.getColumnLabel(i);
// if (colset.contains(name))
// continue;
colset.add(name);
Map<String, Object> field = new HashMap<>();
field.put("name", name);
try {
String catalog = rsMetaData.getCatalogName(i);
String schema = rsMetaData.getSchemaName(i);
String table = rsMetaData.getTableName(i);
ResultSet rsmc = conn.getMetaData().getColumns(catalog, schema, table, name);
while (rsmc.next()) {
field.put("description", rsmc.getString("REMARKS"));
break;
}
} catch (SQLException se) {
se.printStackTrace();
}
columns.add(field);
}
for (Map<String, Object> c : columns) {
System.out.println(c);
}
}
@Test
public void shouldFetchAllTables() throws SQLException {
ResultSet rs = metaData.getTables(null, null, null, null);
int tableCount = sizeOf(rs);
assertThat(tableCount)
.isEqualTo(conn.getDatabase().getMetadata().getSchema().getClasses().size());
}
@Test
public void shouldFillSchemaAndCatalogWithDatabaseName() throws SQLException {
ResultSet rs = metaData.getTables(null, null, null, null);
while (rs.next()) {
assertThat(rs.getString("TABLE_SCHEM")).isEqualTo("perClassTestDatabase");
assertThat(rs.getString("TABLE_CAT")).isEqualTo("perClassTestDatabase");
}
}
@Test
public void shouldGetAllTablesFilteredByAllTypes() throws SQLException {
ResultSet rs = metaData.getTableTypes();
List<String> tableTypes = new ArrayList<>(2);
while (rs.next()) {
tableTypes.add(rs.getString(1));
}
rs = metaData.getTables(null, null, null, tableTypes.toArray(new String[2]));
int tableCount = sizeOf(rs);
assertThat(tableCount)
.isEqualTo(conn.getDatabase().getMetadata().getSchema().getClasses().size());
}
@Test
public void getNoTablesFilteredByEmptySetOfTypes() throws SQLException {
final ResultSet rs = metaData.getTables(null, null, null, new String[0]);
int tableCount = sizeOf(rs);
assertThat(tableCount).isEqualTo(0);
}
@Test
public void getSingleTable() throws SQLException {
ResultSet rs = metaData.getTables(null, null, "ouser", null);
rs.next();
assertThat(rs.getString("TABLE_NAME")).isEqualTo("OUser");
assertThat(rs.getString("TABLE_CAT")).isEqualTo("perClassTestDatabase");
assertThat(rs.getString("TABLE_SCHEM")).isEqualTo("perClassTestDatabase");
assertThat(rs.getString("REMARKS")).isNull();
assertThat(rs.getString("REF_GENERATION")).isNull();
assertThat(rs.getString("TYPE_NAME")).isNull();
assertThat(rs.next()).isFalse();
}
@Test
public void shouldGetSingleColumnOfArticle() throws SQLException {
ResultSet rs = metaData.getColumns(null, null, "Article", "uuid");
rs.next();
assertThat(rs.getString("TABLE_NAME")).isEqualTo("Article");
assertThat(rs.getString("COLUMN_NAME")).isEqualTo("uuid");
assertThat(rs.getString("TYPE_NAME")).isEqualTo("LONG");
assertThat(rs.getInt("DATA_TYPE")).isEqualTo(-5);
assertThat(rs.next()).isFalse();
}
@Test
public void shouldGetAllColumnsOfArticle() throws SQLException {
ResultSet rs = metaData.getColumns(null, null, "Article", null);
while (rs.next()) {
assertThat(rs.getString("TABLE_NAME")).isEqualTo("Article");
assertThat(rs.getString("COLUMN_NAME")).isIn("date", "uuid", "author", "title", "content");
// System.out.println("rs = " + rs.getInt("DATA_TYPE"));
assertThat(rs.getInt("DATA_TYPE")).isIn(-5, 12, 91, 2000);
assertThat(rs.getString("TYPE_NAME")).isIn("LONG", "LINK", "DATE", "STRING", "INTEGER");
}
}
@Test
public void shouldGetAllIndexesOnArticle() throws Exception {
ResultSet rs = metaData.getIndexInfo(null, null, "Article", true, true);
rs.next();
assertThat(rs.getString("COLUMN_NAME")).isEqualTo("uuid");
assertThat(rs.getString("INDEX_NAME")).isEqualTo("Article.uuid");
assertThat(rs.getBoolean("NON_UNIQUE")).isFalse();
}
@Test
public void shouldGetPrimaryKeyOfArticle() throws Exception {
ResultSet rs = metaData.getPrimaryKeys(null, null, "Article");
rs.next();
assertThat(rs.getString("TABLE_NAME")).isEqualTo("Article");
assertThat(rs.getString("COLUMN_NAME")).isEqualTo("uuid");
assertThat(rs.getString("PK_NAME")).isEqualTo("Article.uuid");
assertThat(rs.getInt("KEY_SEQ")).isEqualTo(1);
}
private int sizeOf(ResultSet rs) throws SQLException {
int tableCount = 0;
while (rs.next()) {
tableCount++;
}
return tableCount;
}
}
| apache-2.0 |
sjbutler/jim | src/uk/ac/open/crc/jim/parser/java14/ASTFormalParameters.java | 916 | /* Generated By:JJTree: Do not edit this line. ASTFormalParameters.java Version 6.0 */
/* JavaCCOptions:MULTI=true,NODE_USES_PARSER=true,VISITOR=true,TRACK_TOKENS=true,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=*,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */
package uk.ac.open.crc.jim.parser.java14;
public
class ASTFormalParameters extends SimpleNode {
public ASTFormalParameters(int id) {
super(id);
}
public ASTFormalParameters(Java14Parser p, int id) {
super(p, id);
}
public static Node jjtCreate(int id) {
return new ASTFormalParameters(id);
}
public static Node jjtCreate(Java14Parser p, int id) {
return new ASTFormalParameters(p, id);
}
/** Accept the visitor. **/
public Object jjtAccept(Java14ParserVisitor visitor, Object data) {
return
visitor.visit(this, data);
}
}
/* JavaCC - OriginalChecksum=e09cd2ace1c922b0e7dbc9d53400e866 (do not edit this line) */
| apache-2.0 |
doanduyhai/Achilles | integration-test-2_1/src/main/java/info/archinnov/achilles/internals/entities/EntityWithComplexCounters.java | 2033 | /*
* Copyright (C) 2012-2021 DuyHai DOAN
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package info.archinnov.achilles.internals.entities;
import java.util.UUID;
import info.archinnov.achilles.annotations.*;
import info.archinnov.achilles.internals.codecs.StringToLongCodec;
@Table(table = "entity_complex_counters")
public class EntityWithComplexCounters {
@PartitionKey
private Long id;
@Column("static_count")
@Static
@Counter
private Long staticCounter;
@ClusteringColumn
private UUID uuid;
@Column("count")
@Counter
private long simpleCounter;
@Column("codec_count")
@Counter
@Codec(StringToLongCodec.class)
private String counterWithCodec;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getStaticCounter() {
return staticCounter;
}
public void setStaticCounter(Long staticCounter) {
this.staticCounter = staticCounter;
}
public UUID getUuid() {
return uuid;
}
public void setUuid(UUID uuid) {
this.uuid = uuid;
}
public long getSimpleCounter() {
return simpleCounter;
}
public void setSimpleCounter(long simpleCounter) {
this.simpleCounter = simpleCounter;
}
public String getCounterWithCodec() {
return counterWithCodec;
}
public void setCounterWithCodec(String counterWithCodec) {
this.counterWithCodec = counterWithCodec;
}
}
| apache-2.0 |
gosu-lang/old-gosu-repo | gosu-xml/src/main/java/gw/internal/schema/gw/xsd/w3c/xmlschema/anonymous/attributes/Appinfo_Source.java | 2289 | package gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.attributes;
/***************************************************************************/
/* THIS IS AUTOGENERATED CODE - DO NOT MODIFY OR YOUR CHANGES WILL BE LOST */
/* THIS CODE CAN BE REGENERATED USING 'xsd-codegen' */
/***************************************************************************/
public class Appinfo_Source implements gw.internal.xml.IXmlGeneratedClass {
public static final javax.xml.namespace.QName $QNAME = new javax.xml.namespace.QName( "", "source", "" );
public static final gw.util.concurrent.LockingLazyVar<gw.lang.reflect.IType> TYPE = new gw.util.concurrent.LockingLazyVar<gw.lang.reflect.IType>( gw.lang.reflect.TypeSystem.getGlobalLock() ) {
@Override
protected gw.lang.reflect.IType init() {
return gw.lang.reflect.TypeSystem.getByFullName( "gw.xsd.w3c.xmlschema.anonymous.attributes.Appinfo_Source" );
}
};
private Appinfo_Source() {
}
public static gw.xml.XmlSimpleValue createSimpleValue( java.net.URI value ) {
//noinspection RedundantArrayCreation
return (gw.xml.XmlSimpleValue) TYPE.get().getTypeInfo().getMethod( "createSimpleValue", gw.lang.reflect.TypeSystem.get( java.net.URI.class ) ).getCallHandler().handleCall( null, new java.lang.Object[] { value } );
}
public static void set( gw.internal.schema.gw.xsd.w3c.xmlschema.types.complex.AnyType anyType, java.net.URI value ) {
//noinspection RedundantArrayCreation
TYPE.get().getTypeInfo().getMethod( "set", gw.lang.reflect.TypeSystem.get( gw.internal.schema.gw.xsd.w3c.xmlschema.types.complex.AnyType.class ), gw.lang.reflect.TypeSystem.get( java.net.URI.class ) ).getCallHandler().handleCall( null, new java.lang.Object[] { anyType, value } );
}
public static void set( gw.xml.XmlElement element, java.net.URI value ) {
//noinspection RedundantArrayCreation
TYPE.get().getTypeInfo().getMethod( "set", gw.lang.reflect.TypeSystem.get( gw.xml.XmlElement.class ), gw.lang.reflect.TypeSystem.get( java.net.URI.class ) ).getCallHandler().handleCall( null, new java.lang.Object[] { element, value } );
}
@SuppressWarnings( {"UnusedDeclaration"} )
private static final long FINGERPRINT = -3788403261967307401L;
}
| apache-2.0 |
sebastiansemmle/acio | src/test/java/org/apache/commons/io/IOExceptionWithCauseTestCase.java | 2093 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.io;
import junit.framework.TestCase;
/**
* Tests IOExceptionWithCause
*
* @author <a href="mailto:ggregory@seagullsw.com">Gary Gregory</a>
* @version $Id$
*/
public class IOExceptionWithCauseTestCase extends TestCase {
/**
* Tests the {@link IOExceptionWithCause#IOExceptionWithCause(String,Throwable)} constructor.
*/
public void testIOExceptionStringThrowable() {
Throwable cause = new IllegalArgumentException("cause");
IOExceptionWithCause exception = new IOExceptionWithCause("message", cause);
this.validate(exception, cause, "message");
}
/**
* Tests the {@link IOExceptionWithCause#IOExceptionWithCause(Throwable)} constructor.
*/
public void testIOExceptionThrowable() {
Throwable cause = new IllegalArgumentException("cause");
IOExceptionWithCause exception = new IOExceptionWithCause(cause);
this.validate(exception, cause, "java.lang.IllegalArgumentException: cause");
}
void validate(Throwable throwable, Throwable expectedCause, String expectedMessage) {
assertEquals(expectedMessage, throwable.getMessage());
assertEquals(expectedCause, throwable.getCause());
assertSame(expectedCause, throwable.getCause());
}
}
| apache-2.0 |
cjellick/cattle | modules/caas/backend/src/main/java/io/cattle/platform/inator/launchconfig/LaunchConfig.java | 954 | package io.cattle.platform.inator.launchconfig;
import io.cattle.platform.inator.InatorContext;
import io.cattle.platform.inator.Unit;
import io.cattle.platform.inator.UnitRef;
import io.cattle.platform.inator.wrapper.DeploymentUnitWrapper;
import io.cattle.platform.inator.wrapper.InstanceWrapper;
import io.cattle.platform.inator.wrapper.StackWrapper;
import java.util.Map;
public interface LaunchConfig {
String getName();
Map<UnitRef, Unit> getDependencies();
InstanceWrapper create(InatorContext context, StackWrapper stack, DeploymentUnitWrapper unit);
boolean validateDeps(InatorContext context, InstanceWrapper instanceWrapper);
String getRevision();
boolean isHealthcheckActionNone();
boolean isStartFirst();
String getImageUuid();
Map<String, Object> getLabels();
String getPullMode();
void applyDynamic(InstanceWrapper instance, InatorContext context);
String getServiceName();
}
| apache-2.0 |
keepsl/keepsmis | model/src/main/java/com/keeps/model/TUser.java | 2944 | package com.keeps.model;
// default package
// Generated 2017-1-11 10:08:53 by Hibernate Tools 3.2.2.GA
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import static javax.persistence.GenerationType.IDENTITY;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import com.keeps.core.model.AbstractModelInteger;
/**
* TUser generated by hbm2java
*/
@Entity
@Table(name = "t_user")
public class TUser extends AbstractModelInteger implements java.io.Serializable {
private static final long serialVersionUID = 1L;
private Integer id;
private String nickname;
private String email;
private Integer phone;
private String password;
private Integer status;
private Date createtime;
private Date updatetime;
public TUser() {
}
public TUser(String password, Integer status) {
this.password = password;
this.status = status;
}
public TUser(String nickname, String email, Integer phone, String password, Integer status, Date createtime,
Date updatetime) {
this.nickname = nickname;
this.email = email;
this.phone = phone;
this.password = password;
this.status = status;
this.createtime = createtime;
this.updatetime = updatetime;
}
@Id
@GeneratedValue(strategy = IDENTITY)
@Column(name = "id", unique = true, nullable = false)
public Integer getId() {
return this.id;
}
public void setId(Integer id) {
this.id = id;
}
@Column(name = "nickname", length = 80)
public String getNickname() {
return this.nickname;
}
public void setNickname(String nickname) {
this.nickname = nickname;
}
@Column(name = "email", length = 50)
public String getEmail() {
return this.email;
}
public void setEmail(String email) {
this.email = email;
}
@Column(name = "phone")
public Integer getPhone() {
return this.phone;
}
public void setPhone(Integer phone) {
this.phone = phone;
}
@Column(name = "password", nullable = false, length = 50)
public String getPassword() {
return this.password;
}
public void setPassword(String password) {
this.password = password;
}
@Column(name = "status", nullable = false)
public Integer isStatus() {
return this.status;
}
public void setStatus(Integer status) {
this.status = status;
}
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "createtime", length = 19)
public Date getCreatetime() {
return this.createtime;
}
public void setCreatetime(Date createtime) {
this.createtime = createtime;
}
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "updatetime", length = 19)
public Date getUpdatetime() {
return this.updatetime;
}
public void setUpdatetime(Date updatetime) {
this.updatetime = updatetime;
}
}
| apache-2.0 |
oehme/analysing-gradle-performance | my-app/src/test/java/org/gradle/test/performance/mediummonolithicjavaproject/p368/Test7370.java | 2111 | package org.gradle.test.performance.mediummonolithicjavaproject.p368;
import org.junit.Test;
import static org.junit.Assert.*;
public class Test7370 {
Production7370 objectUnderTest = new Production7370();
@Test
public void testProperty0() {
String value = "value";
objectUnderTest.setProperty0(value);
assertEquals(value, objectUnderTest.getProperty0());
}
@Test
public void testProperty1() {
String value = "value";
objectUnderTest.setProperty1(value);
assertEquals(value, objectUnderTest.getProperty1());
}
@Test
public void testProperty2() {
String value = "value";
objectUnderTest.setProperty2(value);
assertEquals(value, objectUnderTest.getProperty2());
}
@Test
public void testProperty3() {
String value = "value";
objectUnderTest.setProperty3(value);
assertEquals(value, objectUnderTest.getProperty3());
}
@Test
public void testProperty4() {
String value = "value";
objectUnderTest.setProperty4(value);
assertEquals(value, objectUnderTest.getProperty4());
}
@Test
public void testProperty5() {
String value = "value";
objectUnderTest.setProperty5(value);
assertEquals(value, objectUnderTest.getProperty5());
}
@Test
public void testProperty6() {
String value = "value";
objectUnderTest.setProperty6(value);
assertEquals(value, objectUnderTest.getProperty6());
}
@Test
public void testProperty7() {
String value = "value";
objectUnderTest.setProperty7(value);
assertEquals(value, objectUnderTest.getProperty7());
}
@Test
public void testProperty8() {
String value = "value";
objectUnderTest.setProperty8(value);
assertEquals(value, objectUnderTest.getProperty8());
}
@Test
public void testProperty9() {
String value = "value";
objectUnderTest.setProperty9(value);
assertEquals(value, objectUnderTest.getProperty9());
}
} | apache-2.0 |
h4ck3rm1k3/javascript-closure-compiler-git | src/com/google/javascript/rhino/jstype/UnionType.java | 17563 | /*
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Rhino code, released
* May 6, 1999.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1997-1999
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Bob Jervis
* Google Inc.
*
* Alternatively, the contents of this file may be used under the terms of
* the GNU General Public License Version 2 or later (the "GPL"), in which
* case the provisions of the GPL are applicable instead of those above. If
* you wish to allow use of your version of this file only under the terms of
* the GPL and not to allow others to use your version of this file under the
* MPL, indicate your decision by deleting the provisions above and replacing
* them with the notice and other provisions required by the GPL. If you do
* not delete the provisions above, a recipient may use your version of this
* file under either the MPL or the GPL.
*
* ***** END LICENSE BLOCK ***** */
package com.google.javascript.rhino.jstype;
import static com.google.javascript.rhino.jstype.TernaryValue.UNKNOWN;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.javascript.rhino.ErrorReporter;
import java.util.Collection;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
/**
* The {@code UnionType} implements a common JavaScript idiom in which the
* code is specifically designed to work with multiple input types. Because
* JavaScript always knows the run-time type of an object value, this is safer
* than a C union.<p>
*
* For instance, values of the union type {@code (String,boolean)} can be of
* type {@code String} or of type {@code boolean}. The commutativity of the
* statement is captured by making {@code (String,boolean)} and
* {@code (boolean,String)} equal.<p>
*
* The implementation of this class prevents the creation of nested
* unions.<p>
*/
public class UnionType extends JSType {
private static final long serialVersionUID = 1L;
Collection<JSType> alternates;
private final int hashcode;
/**
* Creates a union type.
*
* @param alternates the alternates of the union
*/
UnionType(JSTypeRegistry registry, Collection<JSType> alternates) {
super(registry);
this.alternates = alternates;
this.hashcode = this.alternates.hashCode();
}
/**
* Gets the alternate types of this union type.
* @return The alternate types of this union type. The returned set is
* immutable.
*/
public Iterable<JSType> getAlternates() {
return alternates;
}
/**
* This predicate is used to test whether a given type can appear in a
* numeric context, such as an operand of a multiply operator.
*
* @return true if the type can appear in a numeric context.
*/
@Override
public boolean matchesNumberContext() {
// TODO(user): Reverse this logic to make it correct instead of generous.
for (JSType t : alternates) {
if (t.matchesNumberContext()) {
return true;
}
}
return false;
}
/**
* This predicate is used to test whether a given type can appear in a
* {@code String} context, such as an operand of a string concat ({@code +})
* operator.<p>
*
* All types have at least the potential for converting to {@code String}.
* When we add externally defined types, such as a browser OM, we may choose
* to add types that do not automatically convert to {@code String}.
*
* @return {@code true} if not {@link VoidType}
*/
@Override
public boolean matchesStringContext() {
// TODO(user): Reverse this logic to make it correct instead of generous.
for (JSType t : alternates) {
if (t.matchesStringContext()) {
return true;
}
}
return false;
}
/**
* This predicate is used to test whether a given type can appear in an
* {@code Object} context, such as the expression in a {@code with}
* statement.<p>
*
* Most types we will encounter, except notably {@code null}, have at least
* the potential for converting to {@code Object}. Host defined objects can
* get peculiar.<p>
*
* VOID type is included here because while it is not part of the JavaScript
* language, functions returning 'void' type can't be used as operands of
* any operator or statement.<p>
*
* @return {@code true} if the type is not {@link NullType} or
* {@link VoidType}
*/
@Override
public boolean matchesObjectContext() {
// TODO(user): Reverse this logic to make it correct instead of generous.
for (JSType t : alternates) {
if (t.matchesObjectContext()) {
return true;
}
}
return false;
}
@Override
public JSType findPropertyType(String propertyName) {
JSType propertyType = null;
for (JSType alternate : getAlternates()) {
// Filter out the null/undefined type.
if (alternate.isNullType() || alternate.isVoidType()) {
continue;
}
JSType altPropertyType = alternate.findPropertyType(propertyName);
if (altPropertyType == null) {
continue;
}
if (propertyType == null) {
propertyType = altPropertyType;
} else {
propertyType = propertyType.getLeastSupertype(altPropertyType);
}
}
return propertyType;
}
@Override
public boolean canAssignTo(JSType that) {
boolean canAssign = true;
for (JSType t : alternates) {
if (t.isUnknownType()) {
return true;
}
canAssign &= t.canAssignTo(that);
}
return canAssign;
}
@Override
public boolean canBeCalled() {
for (JSType t : alternates) {
if (!t.canBeCalled()) {
return false;
}
}
return true;
}
@Override
public JSType autobox() {
UnionTypeBuilder restricted = new UnionTypeBuilder(registry);
for (JSType t : alternates) {
restricted.addAlternate(t.autobox());
}
return restricted.build();
}
@Override
public JSType restrictByNotNullOrUndefined() {
UnionTypeBuilder restricted = new UnionTypeBuilder(registry);
for (JSType t : alternates) {
restricted.addAlternate(t.restrictByNotNullOrUndefined());
}
return restricted.build();
}
@Override
public TernaryValue testForEquality(JSType that) {
TernaryValue result = null;
for (JSType t : alternates) {
TernaryValue test = t.testForEquality(that);
if (result == null) {
result = test;
} else if (!result.equals(test)) {
return UNKNOWN;
}
}
return result;
}
/**
* This predicate determines whether objects of this type can have the
* {@code null} value, and therefore can appear in contexts where
* {@code null} is expected.
*
* @return {@code true} for everything but {@code Number} and
* {@code Boolean} types.
*/
@Override
public boolean isNullable() {
for (JSType t : alternates) {
if (t.isNullable()) {
return true;
}
}
return false;
}
@Override
public boolean isUnknownType() {
for (JSType t : alternates) {
if (t.isUnknownType()) {
return true;
}
}
return false;
}
@Override
public boolean isStruct() {
for (JSType typ : getAlternates()) {
if (typ.isStruct()) {
return true;
}
}
return false;
}
@Override
public boolean isDict() {
for (JSType typ : getAlternates()) {
if (typ.isDict()) {
return true;
}
}
return false;
}
@Override
public JSType getLeastSupertype(JSType that) {
if (!that.isUnknownType() && !that.isUnionType()) {
for (JSType alternate : alternates) {
if (!alternate.isUnknownType() && that.isSubtype(alternate)) {
return this;
}
}
}
return getLeastSupertype(this, that);
}
JSType meet(JSType that) {
UnionTypeBuilder builder = new UnionTypeBuilder(registry);
for (JSType alternate : alternates) {
if (alternate.isSubtype(that)) {
builder.addAlternate(alternate);
}
}
if (that.isUnionType()) {
for (JSType otherAlternate : that.toMaybeUnionType().alternates) {
if (otherAlternate.isSubtype(this)) {
builder.addAlternate(otherAlternate);
}
}
} else if (that.isSubtype(this)) {
builder.addAlternate(that);
}
JSType result = builder.build();
if (!result.isNoType()) {
return result;
} else if (this.isObject() && that.isObject()) {
return getNativeType(JSTypeNative.NO_OBJECT_TYPE);
} else {
return getNativeType(JSTypeNative.NO_TYPE);
}
}
/**
* Two union types are equal if they have the same number of alternates
* and all alternates are equal.
*/
@Override
public boolean isEquivalentTo(JSType object) {
if (object == null) {
return false;
}
if (object.isUnionType()) {
UnionType that = object.toMaybeUnionType();
if (alternates.size() != that.alternates.size()) {
return false;
}
for (JSType alternate : that.alternates) {
if (!hasAlternate(alternate)) {
return false;
}
}
return true;
} else {
return false;
}
}
private boolean hasAlternate(JSType type) {
for (JSType alternate : alternates) {
if (alternate.isEquivalentTo(type)) {
return true;
}
}
return false;
}
@Override
public int hashCode() {
return this.hashcode;
}
@Override
public UnionType toMaybeUnionType() {
return this;
}
@Override
public boolean isObject() {
for (JSType alternate : alternates) {
if (!alternate.isObject()) {
return false;
}
}
return true;
}
/**
* A {@link UnionType} contains a given type (alternate) iff the member
* vector contains it.
*
* @param type The alternate which might be in this union.
*
* @return {@code true} if the alternate is in the union
*/
public boolean contains(JSType type) {
for (JSType alt : alternates) {
if (alt.isEquivalentTo(type)) {
return true;
}
}
return false;
}
/**
* Returns a more restricted union type than {@code this} one, in which all
* subtypes of {@code type} have been removed.<p>
*
* Examples:
* <ul>
* <li>{@code (number,string)} restricted by {@code number} is
* {@code string}</li>
* <li>{@code (null, EvalError, URIError)} restricted by
* {@code Error} is {@code null}</li>
* </ul>
*
* @param type the supertype of the types to remove from this union type
*/
public JSType getRestrictedUnion(JSType type) {
UnionTypeBuilder restricted = new UnionTypeBuilder(registry);
for (JSType t : alternates) {
if (t.isUnknownType() || !t.isSubtype(type)) {
restricted.addAlternate(t);
}
}
return restricted.build();
}
@Override String toStringHelper(boolean forAnnotations) {
StringBuilder result = new StringBuilder();
boolean firstAlternate = true;
result.append("(");
SortedSet<JSType> sorted = new TreeSet<JSType>(ALPHA);
sorted.addAll(alternates);
for (JSType t : sorted) {
if (!firstAlternate) {
result.append("|");
}
result.append(t.toStringHelper(forAnnotations));
firstAlternate = false;
}
result.append(")");
return result.toString();
}
@Override
public boolean isSubtype(JSType that) {
// unknown
if (that.isUnknownType()) {
return true;
}
// all type
if (that.isAllType()) {
return true;
}
for (JSType element : alternates) {
if (!element.isSubtype(that)) {
return false;
}
}
return true;
}
@Override
public JSType getRestrictedTypeGivenToBooleanOutcome(boolean outcome) {
// gather elements after restriction
UnionTypeBuilder restricted = new UnionTypeBuilder(registry);
for (JSType element : alternates) {
restricted.addAlternate(
element.getRestrictedTypeGivenToBooleanOutcome(outcome));
}
return restricted.build();
}
@Override
public BooleanLiteralSet getPossibleToBooleanOutcomes() {
BooleanLiteralSet literals = BooleanLiteralSet.EMPTY;
for (JSType element : alternates) {
literals = literals.union(element.getPossibleToBooleanOutcomes());
if (literals == BooleanLiteralSet.BOTH) {
break;
}
}
return literals;
}
@Override
public TypePair getTypesUnderEquality(JSType that) {
UnionTypeBuilder thisRestricted = new UnionTypeBuilder(registry);
UnionTypeBuilder thatRestricted = new UnionTypeBuilder(registry);
for (JSType element : alternates) {
TypePair p = element.getTypesUnderEquality(that);
if (p.typeA != null) {
thisRestricted.addAlternate(p.typeA);
}
if (p.typeB != null) {
thatRestricted.addAlternate(p.typeB);
}
}
return new TypePair(
thisRestricted.build(),
thatRestricted.build());
}
@Override
public TypePair getTypesUnderInequality(JSType that) {
UnionTypeBuilder thisRestricted = new UnionTypeBuilder(registry);
UnionTypeBuilder thatRestricted = new UnionTypeBuilder(registry);
for (JSType element : alternates) {
TypePair p = element.getTypesUnderInequality(that);
if (p.typeA != null) {
thisRestricted.addAlternate(p.typeA);
}
if (p.typeB != null) {
thatRestricted.addAlternate(p.typeB);
}
}
return new TypePair(
thisRestricted.build(),
thatRestricted.build());
}
@Override
public TypePair getTypesUnderShallowInequality(JSType that) {
UnionTypeBuilder thisRestricted = new UnionTypeBuilder(registry);
UnionTypeBuilder thatRestricted = new UnionTypeBuilder(registry);
for (JSType element : alternates) {
TypePair p = element.getTypesUnderShallowInequality(that);
if (p.typeA != null) {
thisRestricted.addAlternate(p.typeA);
}
if (p.typeB != null) {
thatRestricted.addAlternate(p.typeB);
}
}
return new TypePair(
thisRestricted.build(),
thatRestricted.build());
}
@Override
public <T> T visit(Visitor<T> visitor) {
return visitor.caseUnionType(this);
}
@Override
JSType resolveInternal(ErrorReporter t, StaticScope<JSType> scope) {
setResolvedTypeInternal(this); // for circularly defined types.
boolean changed = false;
ImmutableList.Builder<JSType> resolvedTypes = ImmutableList.builder();
for (JSType alternate : alternates) {
JSType newAlternate = alternate.resolve(t, scope);
changed |= (alternate != newAlternate);
resolvedTypes.add(alternate);
}
if (changed) {
Collection<JSType> newAlternates = resolvedTypes.build();
Preconditions.checkState(
newAlternates.hashCode() == this.hashcode);
alternates = newAlternates;
}
return this;
}
@Override
public String toDebugHashCodeString() {
List<String> hashCodes = Lists.newArrayList();
for (JSType a : alternates) {
hashCodes.add(a.toDebugHashCodeString());
}
return "{(" + Joiner.on(",").join(hashCodes) + ")}";
}
@Override
public boolean setValidator(Predicate<JSType> validator) {
for (JSType a : alternates) {
a.setValidator(validator);
}
return true;
}
@Override
public JSType collapseUnion() {
JSType currentValue = null;
ObjectType currentCommonSuper = null;
for (JSType a : alternates) {
if (a.isUnknownType()) {
return getNativeType(JSTypeNative.UNKNOWN_TYPE);
}
ObjectType obj = a.toObjectType();
if (obj == null) {
if (currentValue == null && currentCommonSuper == null) {
// If obj is not an object, then it must be a value.
currentValue = a;
} else {
// Multiple values and objects will always collapse to the ALL_TYPE.
return getNativeType(JSTypeNative.ALL_TYPE);
}
} else if (currentValue != null) {
// Values and objects will always collapse to the ALL_TYPE.
return getNativeType(JSTypeNative.ALL_TYPE);
} else if (currentCommonSuper == null) {
currentCommonSuper = obj;
} else {
currentCommonSuper =
registry.findCommonSuperObject(currentCommonSuper, obj);
}
}
return currentCommonSuper;
}
@Override
public void matchConstraint(JSType constraint) {
for (JSType alternate : alternates) {
alternate.matchConstraint(constraint);
}
}
@Override
public boolean hasAnyTemplateInternal() {
for (JSType alternate : alternates) {
if (alternate.hasAnyTemplate()) {
return true;
}
}
return false;
}
}
| apache-2.0 |
consulo/consulo | modules/base/platform-impl/src/main/java/com/intellij/ide/actions/QuickChangeSchemesAction.java | 1606 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.actions;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.DumbAware;
import javax.annotation.Nonnull;
/**
* @author max
*/
public class QuickChangeSchemesAction extends QuickSwitchSchemeAction implements DumbAware {
protected void fillActions(Project project, @Nonnull DefaultActionGroup group, @Nonnull DataContext dataContext) {
final AnAction[] actions = getGroup().getChildren(null);
for (AnAction action : actions) {
group.add(action);
}
}
public void actionPerformed(AnActionEvent e) {
super.actionPerformed(e);
FeatureUsageTracker.getInstance().triggerFeatureUsed("ui.scheme.quickswitch");
}
protected boolean isEnabled() {
return true;
}
private DefaultActionGroup getGroup() {
return (DefaultActionGroup)ActionManager.getInstance().getAction(IdeActions.GROUP_CHANGE_SCHEME);
}
}
| apache-2.0 |
sbt/ivy | src/java/org/apache/ivy/plugins/circular/CircularDependencyHelper.java | 3023 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.ivy.plugins.circular;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.apache.ivy.core.module.descriptor.ModuleDescriptor;
import org.apache.ivy.core.module.id.ModuleRevisionId;
public final class CircularDependencyHelper {
/** CircularDependencyHelper is not designed to be an instance */
private CircularDependencyHelper() {
}
/**
* Returns a string representation of this circular dependency graph
*
* @param mrids
* in order of circular dependency
* @return a string representation of this circular dependency graph
*/
public static String formatMessage(final ModuleRevisionId[] mrids) {
Set alreadyAdded = new HashSet();
StringBuffer buff = new StringBuffer();
buff.append(mrids[0]);
alreadyAdded.add(mrids[0]);
for (int i = 1; i < mrids.length; i++) {
buff.append("->");
if (alreadyAdded.add(mrids[i])) {
buff.append(mrids[i]);
} else {
buff.append("...");
break;
}
}
return buff.toString();
}
public static String formatMessage(final ModuleDescriptor[] descriptors) {
return formatMessage(toMrids(descriptors));
}
/**
* @param loopElements
* a List<ModuleDescriptor>
*/
public static String formatMessageFromDescriptors(List loopElements) {
ModuleRevisionId[] mrids = new ModuleRevisionId[loopElements.size()];
int pos = 0;
for (Iterator it = loopElements.iterator(); it.hasNext();) {
ModuleDescriptor descriptor = (ModuleDescriptor) it.next();
mrids[pos] = descriptor.getModuleRevisionId();
pos++;
}
return formatMessage(mrids);
}
public static ModuleRevisionId[] toMrids(ModuleDescriptor[] descriptors) {
ModuleRevisionId[] mrids = new ModuleRevisionId[descriptors.length];
for (int i = 0; i < descriptors.length; i++) {
mrids[i] = descriptors[i].getModuleRevisionId();
}
return mrids;
}
}
| apache-2.0 |
ThiagoGarciaAlves/intellij-community | java/java-analysis-impl/src/com/intellij/codeInspection/dataFlow/StateMerger.java | 30563 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInspection.dataFlow;
import com.intellij.codeInspection.dataFlow.rangeSet.LongRangeSet;
import com.intellij.codeInspection.dataFlow.value.*;
import com.intellij.codeInspection.dataFlow.value.DfaRelationValue.RelationType;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.util.UnorderedPair;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.HashMap;
import com.intellij.util.containers.MultiMap;
import one.util.streamex.LongStreamEx;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import static com.intellij.codeInspection.dataFlow.DfaFactType.CAN_BE_NULL;
import static com.intellij.codeInspection.dataFlow.DfaFactType.RANGE;
/**
* @author peter
*/
class StateMerger {
public static final int MAX_RANGE_STATES = 100;
private static final int COMPLEXITY_LIMIT = 250000;
private final Map<DfaMemoryStateImpl, Set<Fact>> myFacts = ContainerUtil.newIdentityHashMap();
private final Map<DfaMemoryState, Map<DfaVariableValue, DfaMemoryStateImpl>> myCopyCache = ContainerUtil.newIdentityHashMap();
@Nullable
List<DfaMemoryStateImpl> mergeByFacts(@NotNull List<DfaMemoryStateImpl> states) {
MultiMap<Fact, DfaMemoryStateImpl> statesByFact = createFactToStateMap(states);
Set<Fact> facts = statesByFact.keySet();
int complexity = 0;
for (final Fact fact : facts) {
if (fact.myPositive) continue;
Collection<DfaMemoryStateImpl> negativeStates = statesByFact.get(fact);
if (negativeStates.size() == states.size()) continue;
Collection<DfaMemoryStateImpl> positiveStates = statesByFact.get(fact.getPositiveCounterpart());
if (positiveStates.isEmpty()) continue;
ProgressManager.checkCanceled();
MultiMap<CompactFactSet, DfaMemoryStateImpl> statesByUnrelatedFacts1 = mapByUnrelatedFacts(fact, negativeStates, facts);
MultiMap<CompactFactSet, DfaMemoryStateImpl> statesByUnrelatedFacts2 = mapByUnrelatedFacts(fact, positiveStates, facts);
complexity += StreamEx.of(statesByUnrelatedFacts1, statesByUnrelatedFacts2).flatCollection(MultiMap::keySet)
.mapToInt(CompactFactSet::size).sum();
if (complexity > COMPLEXITY_LIMIT) return null;
Replacements replacements = new Replacements(states);
for (Map.Entry<CompactFactSet, Collection<DfaMemoryStateImpl>> entry : statesByUnrelatedFacts1.entrySet()) {
final Collection<DfaMemoryStateImpl> group1 = entry.getValue();
final Collection<DfaMemoryStateImpl> group2 = statesByUnrelatedFacts2.get(entry.getKey());
if (group1.isEmpty() || group2.isEmpty()) continue;
final Collection<DfaMemoryStateImpl> group = ContainerUtil.newArrayList(ContainerUtil.concat(group1, group2));
final Set<DfaVariableValue> unknowns = getAllUnknownVariables(group);
replacements.stripAndMerge(group, original -> {
DfaMemoryStateImpl copy = withUnknownVariables(original, unknowns);
fact.removeFromState(copy);
if (fact instanceof EqualityFact) {
restoreOtherInequalities((EqualityFact)fact, group, copy);
}
return copy;
});
}
if (replacements.hasMerges()) return replacements.getMergeResult();
}
return null;
}
@NotNull
private MultiMap<Fact, DfaMemoryStateImpl> createFactToStateMap(@NotNull List<DfaMemoryStateImpl> states) {
MultiMap<Fact, DfaMemoryStateImpl> statesByFact = MultiMap.createLinked();
Map<DfaConstValue, Map<DfaVariableValue, Set<DfaMemoryStateImpl>>> constantVars = new HashMap<>();
for (DfaMemoryStateImpl state : states) {
ProgressManager.checkCanceled();
for (Fact fact : getFacts(state)) {
statesByFact.putValue(fact, state);
DfaConstValue value = fact.comparedToConstant();
if (value != null) {
constantVars.computeIfAbsent(value, k -> new HashMap<>())
.computeIfAbsent(fact.myVar, k -> ContainerUtil.newIdentityTroveSet()).add(state);
}
}
}
for (final Fact fact : new ArrayList<>(statesByFact.keySet())) {
if (fact.myPositive) continue;
Collection<DfaMemoryStateImpl> negativeStates = statesByFact.get(fact);
Collection<DfaMemoryStateImpl> positiveStates = statesByFact.get(fact.getPositiveCounterpart());
if (isComparisonOfVariablesComparedWithConstant(fact, constantVars, positiveStates, negativeStates)) {
statesByFact.remove(fact);
statesByFact.remove(fact.getPositiveCounterpart());
}
}
return statesByFact;
}
/**
* Returns true if fact is {@link EqualityFact} which compares two variables, which both are known to be compared with
* the same constant for all states. In this case the fact looks implied (like "a == null && b == null" implies "a == b")
* and it's unnecessary to process it separately (processing "a == null" and "b == null" would be enough).
*
* @param fact fact to check
* @param constantVars constant vars map
* @param positiveStates states for which fact is positive
* @param negativeStates states for which fact is negative
* @return true if fact is {@link EqualityFact} which compares two variables which were compared with some constant
*/
private static boolean isComparisonOfVariablesComparedWithConstant(Fact fact,
Map<DfaConstValue, Map<DfaVariableValue, Set<DfaMemoryStateImpl>>> constantVars,
Collection<DfaMemoryStateImpl> positiveStates,
Collection<DfaMemoryStateImpl> negativeStates) {
if (!(fact instanceof EqualityFact) || !(((EqualityFact)fact).myArg instanceof DfaVariableValue)) return false;
DfaVariableValue var1 = fact.myVar;
DfaVariableValue var2 = (DfaVariableValue)((EqualityFact)fact).myArg;
for (Map<DfaVariableValue, Set<DfaMemoryStateImpl>> map : constantVars.values()) {
Set<DfaMemoryStateImpl> states1 = map.get(var1);
Set<DfaMemoryStateImpl> states2 = map.get(var2);
if (states1 != null && states2 != null &&
states1.containsAll(negativeStates) && states1.containsAll(positiveStates) &&
states2.containsAll(negativeStates) && states2.containsAll(positiveStates)) {
return true;
}
}
return false;
}
@NotNull
private MultiMap<CompactFactSet, DfaMemoryStateImpl> mapByUnrelatedFacts(@NotNull Fact fact,
@NotNull Collection<DfaMemoryStateImpl> states,
@NotNull Set<Fact> interestingFacts) {
MultiMap<CompactFactSet, DfaMemoryStateImpl> statesByUnrelatedFacts = MultiMap.createLinked();
for (DfaMemoryStateImpl state : states) {
statesByUnrelatedFacts.putValue(getUnrelatedFacts(fact, state, interestingFacts), state);
}
return statesByUnrelatedFacts;
}
@NotNull
private CompactFactSet getUnrelatedFacts(@NotNull final Fact fact,
@NotNull DfaMemoryStateImpl state,
@NotNull Set<Fact> interestingFacts) {
final ArrayList<Fact> result = new ArrayList<>();
for (Fact other : getFacts(state)) {
if (!fact.invalidatesFact(other) && interestingFacts.contains(other)) {
result.add(other);
}
}
return new CompactFactSet(state.getFactory(), result);
}
private void restoreOtherInequalities(@NotNull EqualityFact removedFact,
@NotNull Collection<DfaMemoryStateImpl> mergedGroup,
@NotNull DfaMemoryStateImpl state) {
Set<DfaConstValue> inequalitiesToRestore = null;
for (DfaMemoryStateImpl member : mergedGroup) {
Set<Fact> memberFacts = getFacts(member);
if (memberFacts.contains(removedFact)) {
Set<DfaConstValue> otherInequalities = getOtherInequalities(removedFact, memberFacts, member);
if (inequalitiesToRestore == null) {
inequalitiesToRestore = otherInequalities;
} else {
inequalitiesToRestore.retainAll(otherInequalities);
}
}
}
if (inequalitiesToRestore != null) {
DfaRelationValue.Factory relationFactory = state.getFactory().getRelationFactory();
for (DfaConstValue toRestore : inequalitiesToRestore) {
state.applyCondition(relationFactory.createRelation(removedFact.myVar, RelationType.NE, toRestore));
}
}
}
@NotNull
private static Set<DfaConstValue> getOtherInequalities(@NotNull EqualityFact removedFact,
@NotNull Set<Fact> memberFacts,
@NotNull DfaMemoryStateImpl state) {
Set<DfaConstValue> otherInequalities = ContainerUtil.newLinkedHashSet();
Set<DfaValue> eqValues = ContainerUtil.newHashSet(state.getEquivalentValues(removedFact.myArg));
for (Fact candidate : memberFacts) {
if (!(candidate instanceof EqualityFact)) continue;
EqualityFact equality = (EqualityFact)candidate;
if (!equality.myPositive &&
equality.myVar == removedFact.myVar &&
equality.myArg instanceof DfaConstValue &&
!eqValues.contains(equality.myArg)) {
otherInequalities.add((DfaConstValue)equality.myArg);
}
}
return otherInequalities;
}
@NotNull
private static Set<DfaVariableValue> getAllUnknownVariables(@NotNull Collection<DfaMemoryStateImpl> complementary) {
final Set<DfaVariableValue> toFlush = ContainerUtil.newLinkedHashSet();
for (DfaMemoryStateImpl removedState : complementary) {
toFlush.addAll(removedState.getUnknownVariables());
}
return toFlush;
}
@NotNull
private static DfaMemoryStateImpl withUnknownVariables(@NotNull DfaMemoryStateImpl original, @NotNull Set<DfaVariableValue> toFlush) {
DfaMemoryStateImpl copy = original.createCopy();
for (DfaVariableValue value : toFlush) {
copy.doFlush(value, true);
}
return copy;
}
@Nullable
List<DfaMemoryStateImpl> mergeByUnknowns(@NotNull List<DfaMemoryStateImpl> states) {
MultiMap<Integer, DfaMemoryStateImpl> byHash = new MultiMap<>();
for (DfaMemoryStateImpl state : states) {
ProgressManager.checkCanceled();
byHash.putValue(state.getPartialHashCode(false, true), state);
}
Replacements replacements = new Replacements(states);
for (Integer key : byHash.keySet()) {
Collection<DfaMemoryStateImpl> similarStates = byHash.get(key);
if (similarStates.size() < 2) continue;
for (final DfaMemoryStateImpl state1 : similarStates) {
ProgressManager.checkCanceled();
List<DfaMemoryStateImpl> complementary = ContainerUtil.filter(similarStates, state2 -> state1.equalsByRelations(state2) && state1.equalsByVariableStates(state2));
if (mergeUnknowns(replacements, complementary)) break;
}
}
return replacements.getMergeResult();
}
@Nullable
List<DfaMemoryStateImpl> mergeByNullability(List<DfaMemoryStateImpl> states) {
MultiMap<Integer, DfaMemoryStateImpl> byHash = new MultiMap<>();
for (DfaMemoryStateImpl state : states) {
ProgressManager.checkCanceled();
byHash.putValue(state.getPartialHashCode(false, false), state);
}
Replacements replacements = new Replacements(states);
for (Integer key : byHash.keySet()) {
Collection<DfaMemoryStateImpl> similarStates = byHash.get(key);
if (similarStates.size() < 2) continue;
groupLoop:
for (final DfaMemoryStateImpl state1 : similarStates) {
ProgressManager.checkCanceled();
for (final DfaVariableValue var : state1.getChangedVariables()) {
if (state1.getVariableState(var).getNullability() != Nullness.NULLABLE) {
continue;
}
List<DfaMemoryStateImpl> complementary = ContainerUtil.filter(similarStates, state2 -> state1.equalsByRelations(state2) &&
areEquivalentModuloVar(state1, state2, var) &&
areVarStatesEqualModuloNullability(state1, state2, var));
if (mergeUnknowns(replacements, complementary)) break groupLoop;
}
}
}
return replacements.getMergeResult();
}
@Nullable
List<DfaMemoryStateImpl> mergeByRanges(List<DfaMemoryStateImpl> states) {
// If the same variable has different range A and B in different memState and range A contains range B
// then range A is replaced with range B
Map<DfaVariableValue, Map<LongRangeSet, LongRangeSet>> ranges = createRangeMap(states);
boolean changed = false;
for (Map<LongRangeSet, LongRangeSet> map : ranges.values()) {
for (Map.Entry<LongRangeSet, LongRangeSet> entry : map.entrySet()) {
for(LongRangeSet candidate : map.values()) {
if(!entry.getValue().equals(candidate) && candidate.contains(entry.getValue())) {
entry.setValue(candidate);
changed = true;
}
}
}
}
if(changed) {
changed = false;
for (DfaMemoryStateImpl state : states) {
for (Map.Entry<DfaVariableValue, Map<LongRangeSet, LongRangeSet>> entry : ranges.entrySet()) {
DfaVariableState variableState = state.getVariableState(entry.getKey());
LongRangeSet range = variableState.getFact(RANGE);
if (range != null && !range.isEmpty() && range.max() == range.min()) continue;
LongRangeSet boundingRange = entry.getValue().get(range);
if (boundingRange != null && !boundingRange.equals(range)) {
state.setFact(entry.getKey(), RANGE, boundingRange);
changed = true;
}
}
}
if(changed) {
return new ArrayList<>(new LinkedHashSet<>(states));
}
}
List<DfaMemoryStateImpl> merged = mergeIndependentRanges(states, ranges);
if(merged != null) return merged;
return dropExcessRangeInfo(states, ranges.keySet());
}
@NotNull
private static Map<DfaVariableValue, Map<LongRangeSet, LongRangeSet>> createRangeMap(List<DfaMemoryStateImpl> states) {
Map<DfaVariableValue, Map<LongRangeSet, LongRangeSet>> ranges = new LinkedHashMap<>();
for (DfaMemoryStateImpl state : states) {
ProgressManager.checkCanceled();
state.forVariableStates((varValue, varState) -> {
LongRangeSet range = varState.getFact(RANGE);
if (range != null) {
ranges.computeIfAbsent(varValue, k -> new HashMap<>()).put(range, range);
}
});
}
return ranges;
}
@Nullable
private List<DfaMemoryStateImpl> mergeIndependentRanges(List<DfaMemoryStateImpl> states, Map<DfaVariableValue, Map<LongRangeSet, LongRangeSet>> ranges) {
boolean changed = false;
// For every variable with more than one range, try to union range info and see if some states could be merged after that
for (Map.Entry<DfaVariableValue, Map<LongRangeSet, LongRangeSet>> entry : ranges.entrySet()) {
if (entry.getValue().size() > 1) {
List<DfaMemoryStateImpl> updated = mergeIndependentRanges(states, entry.getKey());
if (updated != null) {
states = updated;
changed = true;
}
}
}
return changed ? states : null;
}
@Nullable
private List<DfaMemoryStateImpl> mergeIndependentRanges(List<DfaMemoryStateImpl> states, DfaVariableValue var) {
class Record {
final DfaMemoryStateImpl myState;
final LongRangeSet myRange;
final Set<EqualityFact> myCommonEqualities;
Record(DfaMemoryStateImpl state, LongRangeSet range, Set<EqualityFact> commonEqualities) {
myState = state;
myRange = range;
myCommonEqualities = commonEqualities;
}
Set<EqualityFact> getEqualityFacts() {
return StreamEx.of(getFacts(myState)).select(EqualityFact.class)
.filter(fact -> fact.myVar == var || fact.myArg == var).toSet();
}
Record union(Record other) {
Set<EqualityFact> equalities = myCommonEqualities == null ? getEqualityFacts() : myCommonEqualities;
equalities.retainAll(other.getEqualityFacts());
return new Record(myState, myRange.union(other.myRange), equalities);
}
DfaMemoryStateImpl getState() {
if(myCommonEqualities != null) {
myState.flushVariable(var);
myState.setFact(var, RANGE, myRange);
for (EqualityFact equality : myCommonEqualities) {
equality.applyTo(myState);
}
}
return myState;
}
}
ProgressManager.checkCanceled();
Map<DfaMemoryStateImpl, Record> merged = new LinkedHashMap<>();
for (DfaMemoryStateImpl state : states) {
DfaVariableState variableState = state.getVariableState(var);
LongRangeSet range = variableState.getFact(RANGE);
if (range == null) {
range = LongRangeSet.fromType(var.getVariableType());
if (range == null) return null;
}
merged.merge(copyWithoutVar(state, var), new Record(state, range, null), Record::union);
}
return merged.size() == states.size() ? null : StreamEx.ofValues(merged).map(Record::getState).toList();
}
@Nullable
private static List<DfaMemoryStateImpl> dropExcessRangeInfo(List<DfaMemoryStateImpl> states, Set<DfaVariableValue> rangeVariables) {
if (states.size() <= MAX_RANGE_STATES || rangeVariables.isEmpty()) return null;
// If there are too many states, try to drop range information from some variable
DfaVariableValue lastVar = Collections.max(rangeVariables, Comparator.comparingInt(DfaVariableValue::getID));
for (DfaMemoryStateImpl state : states) {
state.setFact(lastVar, RANGE, null);
}
return new ArrayList<>(new HashSet<>(states));
}
private static boolean mergeUnknowns(@NotNull Replacements replacements, @NotNull List<DfaMemoryStateImpl> complementary) {
if (complementary.size() < 2) return false;
final Set<DfaVariableValue> toFlush = getAllUnknownVariables(complementary);
if (toFlush.isEmpty()) return false;
return replacements.stripAndMerge(complementary, original -> withUnknownVariables(original, toFlush));
}
private boolean areEquivalentModuloVar(@NotNull DfaMemoryStateImpl state1, @NotNull DfaMemoryStateImpl state2, @NotNull DfaVariableValue var) {
DfaMemoryStateImpl copy1 = copyWithoutVar(state1, var);
DfaMemoryStateImpl copy2 = copyWithoutVar(state2, var);
return copy2.equalsByRelations(copy1) && copy2.equalsByVariableStates(copy1);
}
@NotNull
private DfaMemoryStateImpl copyWithoutVar(@NotNull DfaMemoryStateImpl state, @NotNull DfaVariableValue var) {
Map<DfaVariableValue, DfaMemoryStateImpl> map = myCopyCache.computeIfAbsent(state, k -> ContainerUtil.newIdentityHashMap());
DfaMemoryStateImpl copy = map.get(var);
if (copy == null) {
copy = state.createCopy();
copy.flushVariable(var);
map.put(var, copy);
}
return copy;
}
private static boolean areVarStatesEqualModuloNullability(@NotNull DfaMemoryStateImpl state1,
@NotNull DfaMemoryStateImpl state2,
@NotNull DfaVariableValue var) {
return state1.getVariableState(var).withoutFact(CAN_BE_NULL).equals(state2.getVariableState(var).withoutFact(CAN_BE_NULL));
}
@NotNull
private Set<Fact> getFacts(@NotNull DfaMemoryStateImpl state) {
return myFacts.computeIfAbsent(state, StateMerger::doGetFacts);
}
@NotNull
private static Set<Fact> doGetFacts(DfaMemoryStateImpl state) {
Set<Fact> result = ContainerUtil.newLinkedHashSet();
IdentityHashMap<EqClass, EqClassInfo> classInfo = new IdentityHashMap<>();
for (EqClass eqClass : state.getNonTrivialEqClasses()) {
EqClassInfo info = classInfo.computeIfAbsent(eqClass, EqClassInfo::new);
DfaValue constant = info.constant;
List<DfaVariableValue> vars = info.vars;
int size = vars.size();
for (int i = 0; i < size; i++) {
DfaVariableValue var = vars.get(i);
if (constant != null) {
result.add(Fact.createEqualityFact(var, constant));
}
for (int j = i + 1; j < size; j++) {
DfaVariableValue eqVar = vars.get(j);
result.add(Fact.createEqualityFact(var, eqVar));
}
}
}
for (UnorderedPair<EqClass> classPair : state.getDistinctClassPairs()) {
EqClassInfo info1 = classInfo.computeIfAbsent(classPair.first, EqClassInfo::new);
EqClassInfo info2 = classInfo.computeIfAbsent(classPair.second, EqClassInfo::new);
for (DfaVariableValue var1 : info1.vars) {
for (DfaVariableValue var2 : info2.vars) {
result.add(new EqualityFact(var1, false, var2));
result.add(new EqualityFact(var2, false, var1));
}
}
if(info1.constant != null) {
for (DfaVariableValue var2 : info2.vars) {
result.add(new EqualityFact(var2, false, info1.constant));
}
}
if(info2.constant != null) {
for (DfaVariableValue var1 : info1.vars) {
result.add(new EqualityFact(var1, false, info2.constant));
}
}
}
state.forVariableStates((var, variableState) -> {
TypeConstraint typeConstraint = variableState.getTypeConstraint();
for (DfaPsiType type : typeConstraint.getInstanceofValues()) {
result.add(new InstanceofFact(var, true, type));
}
for (DfaPsiType type : typeConstraint.getNotInstanceofValues()) {
result.add(new InstanceofFact(var, false, type));
}
});
return result;
}
static final class CompactFactSet {
private final long[] myData;
private final int myHashCode;
private final DfaValueFactory myFactory;
CompactFactSet(DfaValueFactory factory, Collection<Fact> facts) {
myData = facts.stream().mapToLong(Fact::pack).toArray();
Arrays.sort(myData);
myHashCode = Arrays.hashCode(myData);
myFactory = factory;
}
public int size() {
return myData.length;
}
@Override
public int hashCode() {
return myHashCode;
}
@Override
public boolean equals(Object obj) {
if (obj == this) return true;
if (!(obj instanceof CompactFactSet)) return false;
CompactFactSet other = (CompactFactSet)obj;
return this.myHashCode == other.myHashCode && Arrays.equals(this.myData, other.myData);
}
@Override
public String toString() {
return LongStreamEx.of(myData).mapToObj(f -> Fact.unpack(myFactory, f)).joining(", ", "{", "}");
}
}
static abstract class Fact {
final boolean myPositive;
@NotNull final DfaVariableValue myVar;
private final int myHash;
protected Fact(boolean positive, @NotNull DfaVariableValue var, int hash) {
myPositive = positive;
myVar = var;
myHash = hash;
}
private int packLow() {
return myPositive ? myVar.getID() : -myVar.getID();
}
abstract int packHigh();
long pack() {
int lo = packLow();
int hi = packHigh();
return ((long)hi << 32) | (lo & 0xFFFF_FFFFL);
}
@Override
public final int hashCode() {
return myHash;
}
@NotNull
abstract Fact getPositiveCounterpart();
DfaConstValue comparedToConstant() {
return null;
}
abstract boolean invalidatesFact(@NotNull Fact another);
abstract void removeFromState(@NotNull DfaMemoryStateImpl state);
@NotNull
static EqualityFact createEqualityFact(@NotNull DfaVariableValue var, @NotNull DfaValue val) {
if (val instanceof DfaVariableValue && val.getID() < var.getID()) {
return new EqualityFact((DfaVariableValue)val, true, var);
}
return new EqualityFact(var, true, val);
}
static DfaValue normalize(DfaValue value) {
if (value instanceof DfaVariableValue && ((DfaVariableValue)value).isNegated()) {
return ((DfaVariableValue)value).createNegated();
}
return value;
}
static Fact unpack(DfaValueFactory factory, long packed) {
int lo = (int)(packed & 0xFFFF_FFFFL);
int hi = (int)(packed >> 32);
boolean positive = lo >= 0;
DfaVariableValue var = (DfaVariableValue)factory.getValue(Math.abs(lo));
if (hi >= 0) {
return new EqualityFact(var, positive, factory.getValue(hi));
} else {
return new InstanceofFact(var, positive, factory.getType(-hi));
}
}
}
static final class EqualityFact extends Fact {
@NotNull private final DfaValue myArg;
private EqualityFact(@NotNull DfaVariableValue var, boolean positive, @NotNull DfaValue arg) {
super(positive, var, (var.hashCode() * 31 + arg.hashCode()) * 31 + (positive ? 1 : 0));
myArg = arg;
}
@Override
int packHigh() {
return myArg.getID();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof EqualityFact)) return false;
EqualityFact fact = (EqualityFact)o;
return myArg == fact.myArg && myVar == fact.myVar && myPositive == fact.myPositive;
}
@Override
public String toString() {
return myVar + (myPositive ? " EQ " : " NE ") + myArg;
}
@Override
DfaConstValue comparedToConstant() {
return myArg instanceof DfaConstValue ? (DfaConstValue)myArg : null;
}
@Override
@NotNull
EqualityFact getPositiveCounterpart() {
return new EqualityFact(myVar, true, myArg);
}
void applyTo(DfaMemoryStateImpl state) {
state.applyCondition(state.getFactory().createCondition(myVar, myPositive ? RelationType.EQ : RelationType.NE, myArg));
}
@Override
boolean invalidatesFact(@NotNull Fact another) {
if (!(another instanceof EqualityFact)) return false;
DfaValue normalizedVar = normalize(myVar);
return normalizedVar == normalize(another.myVar) || normalizedVar == normalize(((EqualityFact)another).myArg);
}
@Override
void removeFromState(@NotNull DfaMemoryStateImpl state) {
state.removeEquivalenceRelations(myVar);
}
}
static final class InstanceofFact extends Fact {
@NotNull private final DfaPsiType myType;
private InstanceofFact(@NotNull DfaVariableValue var, boolean positive, @NotNull DfaPsiType type) {
super(positive, var, (var.hashCode() * 31 + type.hashCode()) * 31 + (positive ? 1 : 0));
myType = type;
}
@Override
int packHigh() {
return -myType.getID();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof InstanceofFact)) return false;
InstanceofFact fact = (InstanceofFact)o;
return myPositive == fact.myPositive && myType == fact.myType && myVar == fact.myVar;
}
@Override
public String toString() {
return myVar + (myPositive ? " IS " : " IS NOT ") + myType;
}
@Override
@NotNull
Fact getPositiveCounterpart() {
return new InstanceofFact(myVar, true, myType);
}
@Override
boolean invalidatesFact(@NotNull Fact another) {
return another instanceof InstanceofFact &&
myType == ((InstanceofFact)another).myType &&
normalize(myVar) == normalize(another.myVar);
}
@Override
void removeFromState(@NotNull DfaMemoryStateImpl state) {
DfaVariableState varState = state.getVariableState(myVar);
state.setVariableState(myVar, varState.withoutType(myType));
}
}
private static class Replacements {
@NotNull private final List<DfaMemoryStateImpl> myAllStates;
private final Set<DfaMemoryStateImpl> myRemovedStates = ContainerUtil.newIdentityTroveSet();
private final List<DfaMemoryStateImpl> myMerged = ContainerUtil.newArrayList();
private Replacements(@NotNull List<DfaMemoryStateImpl> allStates) {
myAllStates = allStates;
}
private boolean hasMerges() { return !myMerged.isEmpty(); }
@Nullable
private List<DfaMemoryStateImpl> getMergeResult() {
if (hasMerges()) {
List<DfaMemoryStateImpl> result = ContainerUtil.newArrayList(myMerged);
for (DfaMemoryStateImpl state : myAllStates) {
if (!myRemovedStates.contains(state)) {
result.add(state);
}
}
return result;
}
return null;
}
private boolean stripAndMerge(@NotNull Collection<DfaMemoryStateImpl> group,
@NotNull Function<DfaMemoryStateImpl, DfaMemoryStateImpl> stripper) {
if (group.size() <= 1) return false;
MultiMap<DfaMemoryStateImpl, DfaMemoryStateImpl> strippedToOriginals = MultiMap.create();
for (DfaMemoryStateImpl original : group) {
strippedToOriginals.putValue(stripper.fun(original), original);
}
boolean hasMerges = false;
for (Map.Entry<DfaMemoryStateImpl, Collection<DfaMemoryStateImpl>> entry : strippedToOriginals.entrySet()) {
Collection<DfaMemoryStateImpl> merged = entry.getValue();
if (merged.size() > 1) {
myRemovedStates.addAll(merged);
myMerged.add(entry.getKey());
hasMerges = true;
}
}
return hasMerges;
}
}
static final class EqClassInfo {
final List<DfaVariableValue> vars;
final DfaValue constant;
EqClassInfo(EqClass eqClass) {
vars = eqClass.getVariables(false);
constant = eqClass.findConstant(true);
}
}
}
| apache-2.0 |
treeform/orekit | src/test/java/org/orekit/attitudes/BodyCenterPointingTest.java | 7563 | /* Copyright 2002-2014 CS Systèmes d'Information
* Licensed to CS Systèmes d'Information (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.attitudes;
import org.apache.commons.math3.geometry.euclidean.threed.Line;
import org.apache.commons.math3.geometry.euclidean.threed.Rotation;
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
import org.apache.commons.math3.util.FastMath;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.orekit.Utils;
import org.orekit.errors.OrekitException;
import org.orekit.frames.Frame;
import org.orekit.frames.FramesFactory;
import org.orekit.frames.Transform;
import org.orekit.orbits.CircularOrbit;
import org.orekit.orbits.PositionAngle;
import org.orekit.propagation.Propagator;
import org.orekit.propagation.SpacecraftState;
import org.orekit.propagation.analytical.KeplerianPropagator;
import org.orekit.time.AbsoluteDate;
import org.orekit.time.DateComponents;
import org.orekit.time.TimeComponents;
import org.orekit.time.TimeScalesFactory;
import org.orekit.utils.AngularCoordinates;
import org.orekit.utils.Constants;
import org.orekit.utils.IERSConventions;
import org.orekit.utils.PVCoordinates;
public class BodyCenterPointingTest {
// Computation date
private AbsoluteDate date;
// Orbit
private CircularOrbit circ;
// Reference frame = ITRF 2008
private Frame itrf;
// Transform from EME2000 to ITRF2008
private Transform eme2000ToItrf;
// Earth center pointing attitude provider
private BodyCenterPointing earthCenterAttitudeLaw;
/** Test if target is body center
*/
@Test
public void testTarget() throws OrekitException {
// Call get target method
Vector3D target = earthCenterAttitudeLaw.getTargetPoint(circ, date, circ.getFrame());
// Check that target is body center
Assert.assertEquals(0.0, target.getNorm(), Utils.epsilonTest);
}
/** Test if body center belongs to the direction pointed by the satellite
*/
@Test
public void testBodyCenterInPointingDirection() throws OrekitException {
// Transform satellite position to position/velocity parameters in EME2000 frame
PVCoordinates pvSatEME2000 = circ.getPVCoordinates();
// Pointing direction
// ********************
// Get satellite attitude rotation, i.e rotation from EME2000 frame to satellite frame
Rotation rotSatEME2000 = earthCenterAttitudeLaw.getAttitude(circ, date, circ.getFrame()).getRotation();
// Transform Z axis from satellite frame to EME2000
Vector3D zSatEME2000 = rotSatEME2000.applyInverseTo(Vector3D.PLUS_K);
// Transform Z axis from EME2000 to ITRF2008
Vector3D zSatITRF2008C = eme2000ToItrf.transformVector(zSatEME2000);
// Transform satellite position/velocity from EME2000 to ITRF2008
PVCoordinates pvSatITRF2008C = eme2000ToItrf.transformPVCoordinates(pvSatEME2000);
// Line containing satellite point and following pointing direction
Line pointingLine = new Line(pvSatITRF2008C.getPosition(),
pvSatITRF2008C.getPosition().add(Constants.WGS84_EARTH_EQUATORIAL_RADIUS,
zSatITRF2008C),
1.0e-10);
// Check that the line contains earth center (distance from line to point less than 1.e-8 m)
double distance = pointingLine.distance(Vector3D.ZERO);
Assert.assertTrue(distance < 1.e-8);
}
@Test
public void testSpin() throws OrekitException {
Propagator propagator = new KeplerianPropagator(circ, earthCenterAttitudeLaw);
double h = 0.01;
SpacecraftState sMinus = propagator.propagate(date.shiftedBy(-h));
SpacecraftState s0 = propagator.propagate(date);
SpacecraftState sPlus = propagator.propagate(date.shiftedBy(h));
// check spin is consistent with attitude evolution
double errorAngleMinus = Rotation.distance(sMinus.shiftedBy(h).getAttitude().getRotation(),
s0.getAttitude().getRotation());
double evolutionAngleMinus = Rotation.distance(sMinus.getAttitude().getRotation(),
s0.getAttitude().getRotation());
Assert.assertEquals(0.0, errorAngleMinus, 1.0e-6 * evolutionAngleMinus);
double errorAnglePlus = Rotation.distance(s0.getAttitude().getRotation(),
sPlus.shiftedBy(-h).getAttitude().getRotation());
double evolutionAnglePlus = Rotation.distance(s0.getAttitude().getRotation(),
sPlus.getAttitude().getRotation());
Assert.assertEquals(0.0, errorAnglePlus, 1.0e-6 * evolutionAnglePlus);
Vector3D spin0 = s0.getAttitude().getSpin();
Vector3D reference = AngularCoordinates.estimateRate(sMinus.getAttitude().getRotation(),
sPlus.getAttitude().getRotation(),
2 * h);
Assert.assertTrue(spin0.getNorm() > 1.0e-3);
Assert.assertEquals(0.0, spin0.subtract(reference).getNorm(), 1.0e-13);
}
@Before
public void setUp() {
try {
Utils.setDataRoot("regular-data");
// Computation date
date = new AbsoluteDate(new DateComponents(2008, 04, 07),
TimeComponents.H00,
TimeScalesFactory.getUTC());
// Satellite position as circular parameters
final double mu = 3.9860047e14;
final double raan = 270.;
circ =
new CircularOrbit(7178000.0, 0.5e-4, -0.5e-4, FastMath.toRadians(50.), FastMath.toRadians(raan),
FastMath.toRadians(5.300 - raan), PositionAngle.MEAN,
FramesFactory.getEME2000(), date, mu);
// Reference frame = ITRF 2008
itrf = FramesFactory.getITRF(IERSConventions.IERS_2010, true);
// Transform from EME2000 to ITRF2008
eme2000ToItrf = FramesFactory.getEME2000().getTransformTo(itrf, date);
// Create earth center pointing attitude provider */
earthCenterAttitudeLaw = new BodyCenterPointing(itrf);
} catch (OrekitException oe) {
Assert.fail(oe.getMessage());
}
}
@After
public void tearDown() {
date = null;
itrf = null;
eme2000ToItrf = null;
earthCenterAttitudeLaw = null;
circ = null;
}
}
| apache-2.0 |
AU3904/iTrack | iTrack/src/main/java/com/sports/iTrack/utils/NetworkUtils.java | 2200 | package com.sports.iTrack.utils;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.location.LocationManager;
import android.net.Uri;
import android.provider.Settings;
/**
* Created by aaron_lu on 2/12/15.
*/
public class NetworkUtils {
/**
* 强制帮用户打开GPS
*
* @param context
*/
public static final void openGPS(Context context) {
//只对部分手机可行
/*Intent GPSIntent = new Intent();
GPSIntent.setClassName("com.android.settings",
"com.android.settings.widget.SettingsAppWidgetProvider");
GPSIntent.addCategory("android.intent.category.ALTERNATIVE");
GPSIntent.setData(Uri.parse("custom:3"));
try {
PendingIntent.getBroadcast(context, 0, GPSIntent, 0).send();
} catch (PendingIntent.CanceledException e) {
e.printStackTrace();
}*/
try {
Intent myIntent = new Intent("android.settings.LOCATION_SOURCE_SETTINGS");
myIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(myIntent);
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* 判断GPS是否开启,GPS或者AGPS开启一个就认为是开启的
*
* @param context
* @return true 表示开启
*/
public static final boolean isOpenGPS(final Context context) {
LocationManager locationManager
= (LocationManager) context.getSystemService(Context.LOCATION_SERVICE);
// 通过GPS卫星定位,定位级别可以精确到街(通过24颗卫星定位,在室外和空旷的地方定位准确、速度快)
boolean gps = locationManager.isProviderEnabled(LocationManager.GPS_PROVIDER);
// 通过WLAN或移动网络(3G/2G)确定的位置(也称作AGPS,辅助GPS定位。主要用于在室内或遮盖物(建筑群或茂密的深林等)密集的地方定位)
boolean network = locationManager.isProviderEnabled(LocationManager.NETWORK_PROVIDER);
if (gps) {
return true;
}
return false;
}
}
| apache-2.0 |
rapito/market-trade-processor | message-processor/src/main/java/com/github/rapito/mtp/processor/comms/basic/BasicTradeProcessHome.java | 338 | package com.github.rapito.mtp.processor.comms.basic;
import javax.ejb.CreateException;
import javax.ejb.EJBHome;
import java.rmi.RemoteException;
/**
* Created by Robert Peralta on 3/24/2015.
*/
public interface BasicTradeProcessHome extends EJBHome
{
public BasicTradeProcess create() throws RemoteException, CreateException;
}
| apache-2.0 |
JoshCode/SEM | src/test/java/nl/joshuaslik/tudelft/SEM/control/gameObjects/pickup/powerup/player/playerMods/ProjectileSpikeDelayUpTest.java | 1197 | package nl.joshuaslik.tudelft.SEM.control.gameObjects.pickup.powerup.player.playerMods;
import static org.junit.Assert.*;
import nl.joshuaslik.tudelft.SEM.control.gameObjects.pickup.powerup.player.IPlayerModifier;
import nl.joshuaslik.tudelft.SEM.control.gameObjects.pickup.powerup.player.PlayerBaseModifier;
import org.junit.Test;
/**
* Test the projectile spike delay up class.
* @author Faris
*/
public class ProjectileSpikeDelayUpTest {
private final IPlayerModifier ipm = new PlayerBaseModifier();
private IPlayerModifier delay = new ProjectileSpikeDelayUp().decorate(ipm);
/**
* Test the getProjectileSpikeDelay method.
*/
@Test
public void testGetProjectileSpikeDelay() {
assertEquals(1.0, delay.getProjectileSpikeDelay(), 0.001);
}
/**
* Test the getMoveSpeedMultiplier method.
*/
@Test
public void testGetMoveSpeedMultiplier() {
assertEquals(1.0, delay.getMoveSpeedMultiplier(), 0.001);
}
/**
* Test the getProjectileSpeedMultiplier method.
*/
@Test
public void testGetProjectileSpeedMultiplier() {
assertEquals(1.0, delay.getProjectileSpeedMultiplier(), 0.001);
}
}
| apache-2.0 |
mxzs1314/Aviations | app/src/main/java/com/example/administrator/aviation/ui/activity/intjcgywl/IntExportCarrierActivity.java | 6941 | package com.example.administrator.aviation.ui.activity.intjcgywl;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.Spinner;
import com.example.administrator.aviation.R;
import com.example.administrator.aviation.http.HttpCommons;
import com.example.administrator.aviation.http.HttpRoot;
import com.example.administrator.aviation.tool.DateUtils;
import com.example.administrator.aviation.ui.base.NavBar;
import com.example.administrator.aviation.util.AviationNoteConvert;
import com.example.administrator.aviation.util.ChoseTimeMethod;
import org.ksoap2.serialization.SoapObject;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
* 出港业务量查询界面
*/
public class IntExportCarrierActivity extends Activity implements View.OnClickListener {
@BindView(R.id.declare_info_begin_time_et)
EditText declareInfoBeginTimeEt;
@BindView(R.id.declare_info_begin_time_btn)
ImageView declareInfoBeginTimeBtn;
@BindView(R.id.declare_info_end_time_et)
EditText declareInfoEndTimeEt;
@BindView(R.id.declare_info_end_time_btn)
ImageView declareInfoEndTimeBtn;
@BindView(R.id.declare_info_search_btn)
Button declareInfoSearchBtn;
@BindView(R.id.declare_info_pb)
ProgressBar declareInfoPb;
@BindView(R.id.export_carrier_et)
EditText exportCarrierEt;
@BindView(R.id.export_carrier_sp)
Spinner exportCarrierSp;
// 获取当前时间
private String reportType;
private String begainTime;
private String endTime;
private String currentTime;
private String xml;
// 进出港类型
private ArrayAdapter<String> reportTypeSpAdapter;
private List<String> reportTypeSpList;
ChoseTimeMethod choseTimeMethod = new ChoseTimeMethod();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_intexportcarrier_search);
ButterKnife.bind(this);
initView();
}
private void initView() {
NavBar navBar = new NavBar(this);
navBar.setTitle("出港业务量查询");
navBar.hideRight();
currentTime = DateUtils.getTodayDateTime();
declareInfoBeginTimeEt.setText(currentTime);
declareInfoEndTimeEt.setText(currentTime);
declareInfoBeginTimeBtn.setOnClickListener(this);
declareInfoEndTimeBtn.setOnClickListener(this);
declareInfoSearchBtn.setOnClickListener(this);
reportTypeSpList = new ArrayList<>();
reportTypeSpList.add("业务量");
reportTypeSpList.add("目的港");
reportTypeSpList.add("航班号");
reportTypeSpList.add("日");
reportTypeSpAdapter = new ArrayAdapter<>(this, android.R.layout.simple_spinner_item, reportTypeSpList);
reportTypeSpAdapter.setDropDownViewResource(android.R.layout.simple_list_item_single_choice);
exportCarrierSp.setAdapter(reportTypeSpAdapter);
exportCarrierSp.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> adapterView, View view, int i, long l) {
exportCarrierEt.setText(reportTypeSpAdapter.getItem(i));
}
@Override
public void onNothingSelected(AdapterView<?> adapterView) {
}
});
}
@Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.declare_info_begin_time_btn:
choseTimeMethod.getCurrentTime(IntExportCarrierActivity.this, declareInfoBeginTimeEt);
break;
case R.id.declare_info_end_time_btn:
choseTimeMethod.getCurrentTime(IntExportCarrierActivity.this, declareInfoEndTimeEt);
break;
case R.id.declare_info_search_btn:
declareInfoPb.setVisibility(View.VISIBLE);
getEditText();
xml = getXml(begainTime, endTime, reportType);
Map<String, String> params = new HashMap<>();
params.put("awbXml", xml);
params.put("ErrString", "");
HttpRoot.getInstance().requstAync(IntExportCarrierActivity.this, HttpCommons.CGO_GET_INT_EXPORT_REPORT_NAME,
HttpCommons.CGO_GET_INT_EXPORT_REPORT_ACTION, params,
new HttpRoot.CallBack() {
@Override
public void onSucess(Object result) {
SoapObject object = (SoapObject) result;
String xmls = object.getProperty(0).toString();
// Toast.makeText(IntExportCarrierActivity.this, xml, Toast.LENGTH_SHORT).show();
Intent intent = new Intent(IntExportCarrierActivity.this, IntExportCarrierDetailActivity.class);
Bundle bundle = new Bundle();
bundle.putString("intexportdayxml", xmls);
bundle.putString("type", reportType);
intent.putExtras(bundle);
intent.putExtra("xml", xml);
startActivity(intent);
declareInfoPb.setVisibility(View.GONE);
}
@Override
public void onFailed(String message) {
declareInfoPb.setVisibility(View.GONE);
}
@Override
public void onError() {
declareInfoPb.setVisibility(View.GONE);
}
});
break;
default:
break;
}
}
private void getEditText() {
reportType = exportCarrierEt.getText().toString().trim();
reportType = AviationNoteConvert.cNtoEn(reportType);
begainTime = declareInfoBeginTimeEt.getText().toString().trim();
endTime = declareInfoEndTimeEt.getText().toString().trim();
}
private String getXml(String begainTime, String endTime, String reportType) {
return "<GJCCarrierReport>"
+ "<ReportType>" + reportType + "</ReportType>"
+ "<StartDay>" + begainTime + "</StartDay>"
+ "<EndDay>" + endTime + "</EndDay>"
+ "</GJCCarrierReport>";
}
}
| apache-2.0 |
flapdoodle-oss/de.flapdoodle.codedoc | maven-plugin/src/main/java/de/flapdoodle/codedoc/maven/CodeDocMojo.java | 2492 | /**
* Copyright (C) 2016
* Michael Mosmann <michael@mosmann.de>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.flapdoodle.codedoc.maven;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Execute;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
/*
* @see https://maven.apache.org/guides/plugin/guide-java-plugin-development.html
* @see https://books.sonatype.com/mvnref-book/reference/writing-plugins-sect-custom-plugin.html
* @author mosmann
*
*/
@Mojo( name = "touch", defaultPhase = LifecyclePhase.PREPARE_PACKAGE )
@Execute(goal = "touch", phase = LifecyclePhase.PREPARE_PACKAGE)
public class CodeDocMojo extends AbstractMojo {
/**
* Location of the file.
*/
@Parameter( defaultValue = "${project.build.directory}", property = "outputDir", required = true )
private File outputDirectory;
public void execute()
throws MojoExecutionException
{
File f = outputDirectory;
if ( !f.exists() )
{
f.mkdirs();
}
File touch = new File( f, "touch.txt" );
FileWriter w = null;
try
{
w = new FileWriter( touch );
w.write( "touch.txt" );
}
catch ( IOException e )
{
throw new MojoExecutionException( "Error creating file " + touch, e );
}
finally
{
if ( w != null )
{
try
{
w.close();
}
catch ( IOException e )
{
// ignore
}
}
}
}
}
| apache-2.0 |
manub/dropwizard-cassandra | src/test/java/org/stuartgunter/dropwizard/cassandra/smoke/SmokeIntegrationTest.java | 3152 | /*
* Copyright 2014 Stuart Gunter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.stuartgunter.dropwizard.cassandra.smoke;
import com.google.common.io.Resources;
import io.dropwizard.testing.junit.DropwizardAppRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.util.Arrays;
import java.util.Collection;
import static org.assertj.core.api.Assertions.assertThat;
/**
* A series of smoke tests that ensure the application can load and initialise the Cassandra cluster under
* different configuration variants.
*
* This doesn't test that it works correctly - just that the configuration can be successfully loaded.
*/
@RunWith(Parameterized.class)
public class SmokeIntegrationTest {
@Rule
public final DropwizardAppRule<SmokeTestConfiguration> app;
public SmokeIntegrationTest(String configPath) {
this.app = new DropwizardAppRule<>(SmokeTestApp.class, Resources.getResource(configPath).getPath());
}
@Parameterized.Parameters(name = "Config: {0}")
public static Collection<Object[]> data() {
return Arrays.asList(new Object[][] {
{ "smoke/basic.yml" },
{ "smoke/poolingOptions.yml" },
{ "smoke/queryOptions.yml" },
{ "smoke/socketOptions.yml" },
{ "smoke/auth/authProvider-plainText.yml" },
{ "smoke/loadbalancing/loadBalancingPolicy-dcAwareRoundRobin.yml" },
{ "smoke/loadbalancing/loadBalancingPolicy-latencyAware.yml" },
{ "smoke/loadbalancing/loadBalancingPolicy-roundRobin.yml" },
{ "smoke/loadbalancing/loadBalancingPolicy-tokenAware.yml" },
{ "smoke/loadbalancing/loadBalancingPolicy-whiteList.yml" },
{ "smoke/reconnection/reconnectionPolicy-constant.yml" },
{ "smoke/reconnection/reconnectionPolicy-exponential.yml" },
{ "smoke/retry/retryPolicy-default.yml" },
{ "smoke/retry/retryPolicy-downgradingConsistency.yml" },
{ "smoke/retry/retryPolicy-fallthrough.yml" },
{ "smoke/retry/retryPolicy-log.yml" },
{ "smoke/speculativeexecution/speculativeExecutionPolicy-constant.yml" },
{ "smoke/speculativeexecution/speculativeExecutionPolicy-none.yml" }
});
}
@Test
public void supportsConfiguration() throws Exception {
assertThat(app.getEnvironment().healthChecks().getNames())
.contains("cassandra.smoke-cluster");
}
}
| apache-2.0 |
kwin/jackrabbit-oak | oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/tool/Restore.java | 2921 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.segment.tool;
import static com.google.common.base.Preconditions.checkNotNull;
import java.io.File;
import org.apache.jackrabbit.oak.backup.FileStoreRestore;
/**
* Restore a backup of a segment store into an existing segment store.
*/
public class Restore implements Runnable {
/**
* Create a builder for the {@link Restore} command.
*
* @return an instance of {@link Builder}.
*/
public static Builder builder() {
return new Builder();
}
/**
* Collect options for the {@link Restore} command.
*/
public static class Builder {
private File source;
private File target;
private Builder() {
// Prevent external instantiation.
}
/**
* The source path of the restore. This parameter is mandatory.
*
* @param source the source path of the restore.
* @return this builder.
*/
public Builder withSource(File source) {
this.source = checkNotNull(source);
return this;
}
/**
* The target of the restore. This parameter is mandatory.
*
* @param target the target of the restore.
* @return this builder.
*/
public Builder withTarget(File target) {
this.target = checkNotNull(target);
return this;
}
/**
* Create an executable version of the {@link Restore} command.
*
* @return an instance of {@link Runnable}.
*/
public Runnable build() {
checkNotNull(source);
checkNotNull(target);
return new Restore(this);
}
}
private final File source;
private final File target;
private Restore(Builder builder) {
this.source = builder.source;
this.target = builder.target;
}
@Override
public void run() {
try {
FileStoreRestore.restore(source, target);
} catch (Exception e) {
e.printStackTrace();
}
}
}
| apache-2.0 |
moyq5/weixin-popular | src/main/java/weixin/popular/bean/user/tag/get/TagGet.java | 1011 | /**
*
*/
package weixin.popular.bean.user.tag.get;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* 获取标签下粉丝列表-请求参数
* @author Moyq5
* @date 2017年2月16日
*/
public class TagGet {
/**
* 标签ID,必填
*/
@JsonProperty("tagid")
private Integer tagId;
/**
* 第一个拉取的OPENID,不填默认从头开始拉取
*/
@JsonProperty("next_openid")
private String nextOpenId;
/**
* @return 标签ID
*/
public Integer getTagId() {
return tagId;
}
/**
* 标签ID,必填
* @param tagId 标签ID
*/
public void setTagId(Integer tagId) {
this.tagId = tagId;
}
/**
* @return 第一个拉取的OPENID
*/
public String getNextOpenId() {
return nextOpenId;
}
/**
* 第一个拉取的OPENID,不填默认从头开始拉取
* @param nextOpenId 第一个拉取的OPENID
*/
public void setNextOpenId(String nextOpenId) {
this.nextOpenId = nextOpenId;
}
}
| apache-2.0 |
lstav/accumulo | test/src/main/java/org/apache/accumulo/test/metrics/MetricsFileTailer.java | 7863 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.test.metrics;
import java.io.File;
import java.io.RandomAccessFile;
import java.net.URL;
import java.util.Iterator;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.configuration2.Configuration;
import org.apache.commons.configuration2.FileBasedConfiguration;
import org.apache.commons.configuration2.PropertiesConfiguration;
import org.apache.commons.configuration2.builder.FileBasedConfigurationBuilder;
import org.apache.commons.configuration2.builder.fluent.Parameters;
import org.apache.commons.configuration2.ex.ConfigurationException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class allows testing of the publishing to the hadoop metrics system by processing a file for
* metric records (written as a line.) The file should be configured using the hadoop metrics
* properties as a file based sink with the prefix that is provided on instantiation of the
* instance.
*
* This class will simulate tail-ing a file and is intended to be run in a separate thread. When the
* underlying file has data written, the vaule returned by getLastUpdate will change, and the last
* line can be retrieved with getLast().
*/
public class MetricsFileTailer implements Runnable, AutoCloseable {
private static final Logger log = LoggerFactory.getLogger(MetricsFileTailer.class);
private static final int BUFFER_SIZE = 4;
private final String metricsPrefix;
private Lock lock = new ReentrantLock();
private AtomicBoolean running = new AtomicBoolean(Boolean.TRUE);
private AtomicLong lastUpdate = new AtomicLong(0);
private long startTime = System.nanoTime();
private int lineCounter = 0;
private String[] lineBuffer = new String[BUFFER_SIZE];
private final String metricsFilename;
/**
* Create an instance that will tail a metrics file. The filename / path is determined by the
* hadoop-metrics-accumulo.properties sink configuration for the metrics prefix that is provided.
*
* @param metricsPrefix
* the prefix in the metrics configuration.
*/
public MetricsFileTailer(final String metricsPrefix) {
this.metricsPrefix = metricsPrefix;
Configuration sub = loadMetricsConfig();
// dump received configuration keys received.
if (log.isTraceEnabled()) {
Iterator<String> keys = sub.getKeys();
while (keys.hasNext()) {
log.trace("configuration key:{}", keys.next());
}
}
if (sub.containsKey("filename")) {
metricsFilename = sub.getString("filename");
} else {
metricsFilename = "";
}
}
/**
* Create an instance by specifying a file directly instead of using the metrics configuration -
* mainly for testing.
*
* @param metricsPrefix
* generally can be ignored.
* @param filename
* the path / file to be monitored.
*/
MetricsFileTailer(final String metricsPrefix, final String filename) {
this.metricsPrefix = metricsPrefix;
metricsFilename = filename;
}
/**
* Look for the accumulo metrics configuration file on the classpath and return the subset for the
* http sink.
*
* @return a configuration with http sink properties.
*/
private Configuration loadMetricsConfig() {
try {
final URL propUrl =
getClass().getClassLoader().getResource(MetricsTestSinkProperties.METRICS_PROP_FILENAME);
if (propUrl == null) {
throw new IllegalStateException(
"Could not find " + MetricsTestSinkProperties.METRICS_PROP_FILENAME + " on classpath");
}
String filename = propUrl.getFile();
Parameters params = new Parameters();
// Read data from this file
File propertiesFile = new File(filename);
FileBasedConfigurationBuilder<FileBasedConfiguration> builder =
new FileBasedConfigurationBuilder<FileBasedConfiguration>(PropertiesConfiguration.class)
.configure(params.fileBased().setFile(propertiesFile));
Configuration config = builder.getConfiguration();
final Configuration sub = config.subset(metricsPrefix);
if (log.isTraceEnabled()) {
log.trace("Config {}", config);
Iterator<String> iterator = sub.getKeys();
while (iterator.hasNext()) {
String key = iterator.next();
log.trace("'{}\'=\'{}\'", key, sub.getProperty(key));
}
}
return sub;
} catch (ConfigurationException ex) {
throw new IllegalStateException(
String.format("Could not find configuration file \'%s\' on classpath",
MetricsTestSinkProperties.METRICS_PROP_FILENAME));
}
}
/**
* Creates a marker value that changes each time a new line is detected. Clients can use this to
* determine if a call to getLast() will return a new value.
*
* @return a marker value set when a line is available.
*/
public long getLastUpdate() {
return lastUpdate.get();
}
/**
* Get the last line seen in the file.
*
* @return the last line from the file.
*/
public String getLast() {
lock.lock();
try {
int last = (lineCounter % BUFFER_SIZE) - 1;
if (last < 0) {
last = BUFFER_SIZE - 1;
}
return lineBuffer[last];
} finally {
lock.unlock();
}
}
/**
* A loop that polls for changes and when the file changes, put the last line in a buffer that can
* be retrieved by clients using getLast().
*/
@Override
public void run() {
long filePos = 0;
File f = new File(metricsFilename);
while (running.get()) {
try {
Thread.sleep(5_000);
} catch (InterruptedException ex) {
running.set(Boolean.FALSE);
Thread.currentThread().interrupt();
return;
}
long len = f.length();
try {
// file truncated? reset position
if (len < filePos) {
filePos = 0;
lock.lock();
try {
for (int i = 0; i < BUFFER_SIZE; i++) {
lineBuffer[i] = "";
}
lineCounter = 0;
} finally {
lock.unlock();
}
}
if (len > filePos) {
// File must have had something added to it!
RandomAccessFile raf = new RandomAccessFile(f, "r");
raf.seek(filePos);
String line;
lock.lock();
try {
while ((line = raf.readLine()) != null) {
lineBuffer[lineCounter++ % BUFFER_SIZE] = line;
}
lastUpdate.set(System.nanoTime() - startTime);
} finally {
lock.unlock();
}
filePos = raf.getFilePointer();
raf.close();
}
} catch (Exception ex) {
log.info("Error processing metrics file {}", metricsFilename, ex);
}
}
}
@Override
public void close() {
running.set(Boolean.FALSE);
}
}
| apache-2.0 |
fpompermaier/onvif | onvif-ws-client/src/main/java/org/onvif/ver10/schema/AnalyticsEngineInput.java | 5648 |
package org.onvif.ver10.schema;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyAttribute;
import javax.xml.bind.annotation.XmlAnyElement;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
import javax.xml.namespace.QName;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.cxf.xjc.runtime.JAXBToStringStyle;
import org.w3c.dom.Element;
/**
* <p>Java class for AnalyticsEngineInput complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="AnalyticsEngineInput">
* <complexContent>
* <extension base="{http://www.onvif.org/ver10/schema}ConfigurationEntity">
* <sequence>
* <element name="SourceIdentification" type="{http://www.onvif.org/ver10/schema}SourceIdentification"/>
* <element name="VideoInput" type="{http://www.onvif.org/ver10/schema}VideoEncoderConfiguration"/>
* <element name="MetadataInput" type="{http://www.onvif.org/ver10/schema}MetadataInput"/>
* <any processContents='lax' maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* <anyAttribute processContents='lax'/>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "AnalyticsEngineInput", propOrder = {
"sourceIdentification",
"videoInput",
"metadataInput",
"any"
})
public class AnalyticsEngineInput
extends ConfigurationEntity
{
@XmlElement(name = "SourceIdentification", required = true)
protected SourceIdentification sourceIdentification;
@XmlElement(name = "VideoInput", required = true)
protected VideoEncoderConfiguration videoInput;
@XmlElement(name = "MetadataInput", required = true)
protected MetadataInput metadataInput;
@XmlAnyElement(lax = true)
protected List<java.lang.Object> any;
@XmlAnyAttribute
private Map<QName, String> otherAttributes = new HashMap<QName, String>();
/**
* Gets the value of the sourceIdentification property.
*
* @return
* possible object is
* {@link SourceIdentification }
*
*/
public SourceIdentification getSourceIdentification() {
return sourceIdentification;
}
/**
* Sets the value of the sourceIdentification property.
*
* @param value
* allowed object is
* {@link SourceIdentification }
*
*/
public void setSourceIdentification(SourceIdentification value) {
this.sourceIdentification = value;
}
/**
* Gets the value of the videoInput property.
*
* @return
* possible object is
* {@link VideoEncoderConfiguration }
*
*/
public VideoEncoderConfiguration getVideoInput() {
return videoInput;
}
/**
* Sets the value of the videoInput property.
*
* @param value
* allowed object is
* {@link VideoEncoderConfiguration }
*
*/
public void setVideoInput(VideoEncoderConfiguration value) {
this.videoInput = value;
}
/**
* Gets the value of the metadataInput property.
*
* @return
* possible object is
* {@link MetadataInput }
*
*/
public MetadataInput getMetadataInput() {
return metadataInput;
}
/**
* Sets the value of the metadataInput property.
*
* @param value
* allowed object is
* {@link MetadataInput }
*
*/
public void setMetadataInput(MetadataInput value) {
this.metadataInput = value;
}
/**
* Gets the value of the any property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the any property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getAny().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Element }
* {@link java.lang.Object }
*
*
*/
public List<java.lang.Object> getAny() {
if (any == null) {
any = new ArrayList<java.lang.Object>();
}
return this.any;
}
/**
* Gets a map that contains attributes that aren't bound to any typed property on this class.
*
* <p>
* the map is keyed by the name of the attribute and
* the value is the string value of the attribute.
*
* the map returned by this method is live, and you can add new attribute
* by updating the map directly. Because of this design, there's no setter.
*
*
* @return
* always non-null
*/
public Map<QName, String> getOtherAttributes() {
return otherAttributes;
}
/**
* Generates a String representation of the contents of this type.
* This is an extension method, produced by the 'ts' xjc plugin
*
*/
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this, JAXBToStringStyle.DEFAULT_STYLE);
}
}
| apache-2.0 |
eggeral/threading-examples | src/test/java/software/egger/am_wait_notify.java | 7144 | package software.egger;
import org.junit.Test;
public class am_wait_notify {
@Test
public void simpleWait() throws InterruptedException {
Object lock = new Object();
Thread t = new Thread(() -> {
synchronized (lock) { // wait has to be called in synchronized otherwise we get java.lang.IllegalMonitorStateException
System.out.println("t waits");
try {
lock.wait(); // the also releases the lock! Think of what happens to the sum example if we use wait() there!
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println("t awake again");
}
});
t.start();
Thread.sleep(100); // Give t some time to enter wait()
synchronized (lock) { // notify has to be called in synchronized otherwise we get java.lang.IllegalMonitorStateException
System.out.println("calling notify");
lock.notify();
System.out.println("notify called");
}
t.join();
}
@Test
public void waitAlsoExitsOnInterrupt() throws InterruptedException {
Object lock = new Object();
Thread t = new Thread(() -> {
synchronized (lock) {
System.out.println("t waits");
try {
lock.wait();
} catch (InterruptedException e) {
System.out.println("wait interrupted");
System.out.println("isInterrupted(): " + Thread.currentThread().isInterrupted());
System.out.println("interrupted(): " + Thread.interrupted());
}
System.out.println("t awake again");
}
});
t.start();
Thread.sleep(100); // Give t some time to enter wait()
t.interrupt();
t.join();
}
@SuppressWarnings("Duplicates")
@Test
public void notifyReleasesOneThreads() throws InterruptedException {
Object lock = new Object();
Thread t1 = new Thread(() -> {
synchronized (lock) {
System.out.println("t1 waits");
try {
lock.wait();
} catch (InterruptedException e) {
System.out.println("t1 wait interrupted");
}
System.out.println("t1 awake again");
}
});
Thread t2 = new Thread(() -> {
synchronized (lock) {
System.out.println("t2 waits");
try {
lock.wait();
} catch (InterruptedException e) {
System.out.println("t2 wait interrupted");
}
System.out.println("t2 awake again");
}
});
t1.start();
t2.start();
Thread.sleep(100); // Give t1, t2 some time to enter wait()
synchronized (lock) {
System.out.println("calling notify (1)");
lock.notify();
System.out.println("notify (1) called");
System.out.println("calling notify (2)");
lock.notify();
System.out.println("notify (2) called");
}
t1.join();
t2.join();
}
int counter = 0;
@SuppressWarnings("Duplicates")
@Test
public void notifyAllReleasesAllThreads() throws InterruptedException {
Object lock = new Object();
Thread t1 = new Thread(() -> {
synchronized (lock) {
System.out.println("t1 waits");
try {
lock.wait();
} catch (InterruptedException e) {
System.out.println("t1 wait interrupted");
}
System.out.println("t1 awake again");
int tmp = counter;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
counter = tmp + 1;
System.out.println("t1 done");
}
});
Thread t2 = new Thread(() -> {
synchronized (lock) {
System.out.println("t2 waits");
try {
lock.wait();
} catch (InterruptedException e) {
System.out.println("t2 wait interrupted");
}
System.out.println("t2 awake again");
int tmp = counter;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
counter = tmp + 1;
System.out.println("t2 done");
}
});
t1.start();
t2.start();
Thread.sleep(100); // Give t1, t2 some time to enter wait()
synchronized (lock) {
System.out.println("calling notifyAll");
lock.notifyAll(); // Note that all threads are awakened _and_ they are executed sequentially so locking is still guaranteed -> counter = 2
System.out.println("notifyAll called");
}
t1.join();
t2.join();
System.out.println("Counter " + counter);
}
@Test
public void waitCanHaveTimeOuts() throws InterruptedException {
Object lock = new Object();
Thread t = new Thread(() -> {
synchronized (lock) {
System.out.println("t waits");
try {
lock.wait(100, 1); // there is also an implementation of wait without nanos. Look at the implementations of wait with nanos! timeout ++!
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println("t awake again");
}
});
t.start();
t.join();
}
private int count = 0;
@Test
public void waitConditionAlwaysHasToBeCheckedInALoop() throws InterruptedException {
// see also documentation of wait()
Object lock = new Object();
Thread t = new Thread(() -> {
synchronized (lock) {
System.out.println("t waits");
while (count < 10) { // in very rare cases the thread can exit wait so we always have to check the condition in a loop!
try {
System.out.println("count not high enough!");
lock.wait();
System.out.println("t exited wait");
} catch (InterruptedException e) {
e.printStackTrace();
}
}
System.out.println("t is done");
}
});
t.start();
Thread.sleep(10);
synchronized (lock) {
count = 5;
lock.notify();
Thread.sleep(10);
count = 11;
lock.notify();
}
t.join();
}
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-cloudtrail/src/main/java/com/amazonaws/services/cloudtrail/model/DescribeQueryRequest.java | 5292 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.cloudtrail.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/cloudtrail-2013-11-01/DescribeQuery" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeQueryRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The ARN (or the ID suffix of the ARN) of an event data store on which the specified query was run.
* </p>
*/
private String eventDataStore;
/**
* <p>
* The query ID.
* </p>
*/
private String queryId;
/**
* <p>
* The ARN (or the ID suffix of the ARN) of an event data store on which the specified query was run.
* </p>
*
* @param eventDataStore
* The ARN (or the ID suffix of the ARN) of an event data store on which the specified query was run.
*/
public void setEventDataStore(String eventDataStore) {
this.eventDataStore = eventDataStore;
}
/**
* <p>
* The ARN (or the ID suffix of the ARN) of an event data store on which the specified query was run.
* </p>
*
* @return The ARN (or the ID suffix of the ARN) of an event data store on which the specified query was run.
*/
public String getEventDataStore() {
return this.eventDataStore;
}
/**
* <p>
* The ARN (or the ID suffix of the ARN) of an event data store on which the specified query was run.
* </p>
*
* @param eventDataStore
* The ARN (or the ID suffix of the ARN) of an event data store on which the specified query was run.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeQueryRequest withEventDataStore(String eventDataStore) {
setEventDataStore(eventDataStore);
return this;
}
/**
* <p>
* The query ID.
* </p>
*
* @param queryId
* The query ID.
*/
public void setQueryId(String queryId) {
this.queryId = queryId;
}
/**
* <p>
* The query ID.
* </p>
*
* @return The query ID.
*/
public String getQueryId() {
return this.queryId;
}
/**
* <p>
* The query ID.
* </p>
*
* @param queryId
* The query ID.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeQueryRequest withQueryId(String queryId) {
setQueryId(queryId);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getEventDataStore() != null)
sb.append("EventDataStore: ").append(getEventDataStore()).append(",");
if (getQueryId() != null)
sb.append("QueryId: ").append(getQueryId());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeQueryRequest == false)
return false;
DescribeQueryRequest other = (DescribeQueryRequest) obj;
if (other.getEventDataStore() == null ^ this.getEventDataStore() == null)
return false;
if (other.getEventDataStore() != null && other.getEventDataStore().equals(this.getEventDataStore()) == false)
return false;
if (other.getQueryId() == null ^ this.getQueryId() == null)
return false;
if (other.getQueryId() != null && other.getQueryId().equals(this.getQueryId()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getEventDataStore() == null) ? 0 : getEventDataStore().hashCode());
hashCode = prime * hashCode + ((getQueryId() == null) ? 0 : getQueryId().hashCode());
return hashCode;
}
@Override
public DescribeQueryRequest clone() {
return (DescribeQueryRequest) super.clone();
}
}
| apache-2.0 |
Rikkola/guvnor | guvnor-webapp/src/main/java/org/drools/guvnor/client/ruleeditor/MultiViewEditorMenuBarCreator.java | 866 | /*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.guvnor.client.ruleeditor;
import com.google.gwt.user.client.ui.MenuBar;
/**
* Creates a MenuBar component for a MultiViewEditor instance
*/
public interface MultiViewEditorMenuBarCreator {
MenuBar createMenuBar(MultiViewEditor editor);
}
| apache-2.0 |
welterde/ewok | com/planet_ink/coffee_mud/Abilities/Spells/Spell_RepairingAura.java | 5920 | package com.planet_ink.coffee_mud.Abilities.Spells;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2000-2010 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings("unchecked")
public class Spell_RepairingAura extends Spell
{
public String ID() { return "Spell_RepairingAura"; }
public String name(){return "Repairing Aura";}
protected int canAffectCode(){return CAN_ITEMS;}
protected int canTargetCode(){return CAN_ITEMS;}
public int classificationCode(){return Ability.ACODE_SPELL|Ability.DOMAIN_ABJURATION;}
public int abstractQuality(){ return Ability.QUALITY_INDIFFERENT;}
public int overrideMana(){ return 50;}
public static final int REPAIR_MAX=30;
public int repairDown=REPAIR_MAX;
public int adjustedLevel=1;
public void affectEnvStats(Environmental affected, EnvStats affectableStats)
{
super.affectEnvStats(affected,affectableStats);
affectableStats.setDisposition(affectableStats.disposition()|EnvStats.IS_BONUS);
}
public boolean tick(Tickable ticking, int tickID)
{
if(!super.tick(ticking,tickID))
return false;
repairDown-=adjustedLevel;
if((repairDown<=0)&&(affected instanceof Item))
{
repairDown=REPAIR_MAX;
Item I=(Item)affected;
if((I.subjectToWearAndTear())&&(I.usesRemaining()<100))
{
if(I.owner() instanceof Room)
((Room)I.owner()).showHappens(CMMsg.MSG_OK_VISUAL,I,"<S-NAME> is magically repairing itself.");
else
if(I.owner() instanceof MOB)
((MOB)I.owner()).tell(I.name()+" is magically repairing itself.");
I.setUsesRemaining(I.usesRemaining()+1);
}
}
return true;
}
public boolean invoke(MOB mob, Vector commands, Environmental givenTarget, boolean auto, int asLevel)
{
Environmental target=getAnyTarget(mob,commands,givenTarget,Wearable.FILTER_ANY);
if(target==null) return false;
if(target.fetchEffect(this.ID())!=null)
{
mob.tell(target.name()+" is already repairing!");
return false;
}
if((!(target instanceof Item))&&(!(target instanceof MOB)))
{
mob.tell(target.name()+" would not be affected by this spell.");
return false;
}
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
boolean success=proficiencyCheck(mob,0,auto);
Item realTarget=null;
if(target instanceof Item)
realTarget=(Item)target;
else
if(target instanceof MOB)
{
Vector choices=new Vector();
Vector inventory=new Vector();
MOB M=(MOB)target;
Item I=null;
for(int i=0;i<M.inventorySize();i++)
{
I=M.fetchInventory(i);
if((I!=null)&&(I.subjectToWearAndTear())&&(I.fetchEffect(ID())==null))
{
if(I.amWearingAt(Wearable.IN_INVENTORY))
inventory.addElement(I);
else
choices.addElement(I);
}
}
Vector chooseFrom=inventory;
if(choices.size()<3)
inventory.addAll(choices);
else
chooseFrom=choices;
if(chooseFrom.size()<1)
success=false;
else
realTarget=(Item)chooseFrom.elementAt(CMLib.dice().roll(1,chooseFrom.size(),-1));
}
if(success)
{
CMMsg msg=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),auto?"":"^S<S-NAME> wave(s) <S-HIS-HER> hands around <T-NAMESELF>, incanting.^?");
CMMsg msg2=(target==realTarget)?null:CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),null);
if(mob.location().okMessage(mob,msg)
&&(realTarget!=null)
&&((msg2==null)||mob.location().okMessage(mob,msg2)))
{
mob.location().send(mob,msg);
if(msg2!=null) mob.location().send(mob,msg2);
mob.location().show(mob,realTarget,CMMsg.MSG_OK_ACTION,"<T-NAME> attain(s) a repairing aura.");
beneficialAffect(mob,realTarget,asLevel,0);
Spell_RepairingAura A=(Spell_RepairingAura)realTarget.fetchEffect(ID());
if(A!=null) A.adjustedLevel=adjustedLevel(mob,asLevel);
realTarget.recoverEnvStats();
mob.recoverEnvStats();
mob.location().recoverRoomStats();
}
}
else
beneficialWordsFizzle(mob,target,"<S-NAME> wave(s) <S-HIS-HER> hands around <T-NAMESELF>, incanting, but nothing happens.");
// return whether it worked
return success;
}
}
| apache-2.0 |
RUB-NDS/Single-Sign-On-Libraries | ssolibs/ValidationService/ft_sso_api/src/main/java/oasis/names/tc/saml/_1_0/assertion/AttributeStatementType.java | 2357 | //
// Diese Datei wurde mit der JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802 generiert
// Siehe <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Änderungen an dieser Datei gehen bei einer Neukompilierung des Quellschemas verloren.
// Generiert: 2017.01.06 um 12:38:40 PM CET
//
package oasis.names.tc.saml._1_0.assertion;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>
* Java-Klasse für AttributeStatementType complex type.
*
* <p>
* Das folgende Schemafragment gibt den erwarteten Content an, der in dieser
* Klasse enthalten ist.
*
* <pre>
* <complexType name="AttributeStatementType">
* <complexContent>
* <extension base="{urn:oasis:names:tc:SAML:1.0:assertion}SubjectStatementAbstractType">
* <sequence>
* <element ref="{urn:oasis:names:tc:SAML:1.0:assertion}Attribute" maxOccurs="unbounded"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "AttributeStatementType", propOrder = { "attribute" })
public class AttributeStatementType extends SubjectStatementAbstractType {
@XmlElement(name = "Attribute", required = true)
protected List<AttributeType> attribute;
/**
* Gets the value of the attribute property.
*
* <p>
* This accessor method returns a reference to the live list, not a
* snapshot. Therefore any modification you make to the returned list will
* be present inside the JAXB object. This is why there is not a
* <CODE>set</CODE> method for the attribute property.
*
* <p>
* For example, to add a new item, do as follows:
*
* <pre>
* getAttribute().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link AttributeType }
*
*
*/
public List<AttributeType> getAttribute() {
if (attribute == null) {
attribute = new ArrayList<AttributeType>();
}
return this.attribute;
}
}
| apache-2.0 |
ichaki5748/netty | buffer/src/test/java/io/netty/buffer/AbstractCompositeByteBufTest.java | 36659 | /*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.buffer;
import io.netty.util.ReferenceCountUtil;
import org.junit.Test;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.Collections;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import static io.netty.buffer.Unpooled.*;
import static io.netty.util.ReferenceCountUtil.*;
import static io.netty.util.internal.EmptyArrays.*;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
/**
* An abstract test class for composite channel buffers
*/
public abstract class AbstractCompositeByteBufTest extends AbstractByteBufTest {
private final ByteOrder order;
protected AbstractCompositeByteBufTest(ByteOrder order) {
if (order == null) {
throw new NullPointerException("order");
}
this.order = order;
}
@Override
protected ByteBuf newBuffer(int length) {
List<ByteBuf> buffers = new ArrayList<ByteBuf>();
for (int i = 0; i < length + 45; i += 45) {
buffers.add(EMPTY_BUFFER);
buffers.add(wrappedBuffer(new byte[1]));
buffers.add(EMPTY_BUFFER);
buffers.add(wrappedBuffer(new byte[2]));
buffers.add(EMPTY_BUFFER);
buffers.add(wrappedBuffer(new byte[3]));
buffers.add(EMPTY_BUFFER);
buffers.add(wrappedBuffer(new byte[4]));
buffers.add(EMPTY_BUFFER);
buffers.add(wrappedBuffer(new byte[5]));
buffers.add(EMPTY_BUFFER);
buffers.add(wrappedBuffer(new byte[6]));
buffers.add(EMPTY_BUFFER);
buffers.add(wrappedBuffer(new byte[7]));
buffers.add(EMPTY_BUFFER);
buffers.add(wrappedBuffer(new byte[8]));
buffers.add(EMPTY_BUFFER);
buffers.add(wrappedBuffer(new byte[9]));
buffers.add(EMPTY_BUFFER);
}
ByteBuf buffer = wrappedBuffer(Integer.MAX_VALUE, buffers.toArray(new ByteBuf[buffers.size()])).order(order);
// Truncate to the requested capacity.
buffer.capacity(length);
assertEquals(length, buffer.capacity());
assertEquals(length, buffer.readableBytes());
assertFalse(buffer.isWritable());
buffer.writerIndex(0);
return buffer;
}
// Composite buffer does not waste bandwidth on discardReadBytes, but
// the test will fail in strict mode.
@Override
protected boolean discardReadBytesDoesNotMoveWritableBytes() {
return false;
}
/**
* Tests the "getBufferFor" method
*/
@Test
public void testComponentAtOffset() {
CompositeByteBuf buf = releaseLater((CompositeByteBuf) wrappedBuffer(new byte[]{1, 2, 3, 4, 5},
new byte[]{4, 5, 6, 7, 8, 9, 26}));
//Ensure that a random place will be fine
assertEquals(5, buf.componentAtOffset(2).capacity());
//Loop through each byte
byte index = 0;
while (index < buf.capacity()) {
ByteBuf _buf = buf.componentAtOffset(index++);
assertNotNull(_buf);
assertTrue(_buf.capacity() > 0);
assertNotNull(_buf.getByte(0));
assertNotNull(_buf.getByte(_buf.readableBytes() - 1));
}
}
@Test
public void testDiscardReadBytes3() {
ByteBuf a, b;
a = wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order);
b = releaseLater(wrappedBuffer(
wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, 0, 5).order(order),
wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, 5, 5).order(order)));
a.skipBytes(6);
a.markReaderIndex();
b.skipBytes(6);
b.markReaderIndex();
assertEquals(a.readerIndex(), b.readerIndex());
a.readerIndex(a.readerIndex() - 1);
b.readerIndex(b.readerIndex() - 1);
assertEquals(a.readerIndex(), b.readerIndex());
a.writerIndex(a.writerIndex() - 1);
a.markWriterIndex();
b.writerIndex(b.writerIndex() - 1);
b.markWriterIndex();
assertEquals(a.writerIndex(), b.writerIndex());
a.writerIndex(a.writerIndex() + 1);
b.writerIndex(b.writerIndex() + 1);
assertEquals(a.writerIndex(), b.writerIndex());
assertTrue(ByteBufUtil.equals(a, b));
// now discard
a.discardReadBytes();
b.discardReadBytes();
assertEquals(a.readerIndex(), b.readerIndex());
assertEquals(a.writerIndex(), b.writerIndex());
assertTrue(ByteBufUtil.equals(a, b));
a.resetReaderIndex();
b.resetReaderIndex();
assertEquals(a.readerIndex(), b.readerIndex());
a.resetWriterIndex();
b.resetWriterIndex();
assertEquals(a.writerIndex(), b.writerIndex());
assertTrue(ByteBufUtil.equals(a, b));
}
@Test
public void testAutoConsolidation() {
CompositeByteBuf buf = releaseLater(compositeBuffer(2));
buf.addComponent(wrappedBuffer(new byte[] { 1 }));
assertEquals(1, buf.numComponents());
buf.addComponent(wrappedBuffer(new byte[] { 2, 3 }));
assertEquals(2, buf.numComponents());
buf.addComponent(wrappedBuffer(new byte[] { 4, 5, 6 }));
assertEquals(1, buf.numComponents());
assertTrue(buf.hasArray());
assertNotNull(buf.array());
assertEquals(0, buf.arrayOffset());
}
@Test
public void testCompositeToSingleBuffer() {
CompositeByteBuf buf = releaseLater(compositeBuffer(3));
buf.addComponent(wrappedBuffer(new byte[] {1, 2, 3}));
assertEquals(1, buf.numComponents());
buf.addComponent(wrappedBuffer(new byte[] {4}));
assertEquals(2, buf.numComponents());
buf.addComponent(wrappedBuffer(new byte[] {5, 6}));
assertEquals(3, buf.numComponents());
// NOTE: hard-coding 6 here, since it seems like addComponent doesn't bump the writer index.
// I'm unsure as to whether or not this is correct behavior
ByteBuffer nioBuffer = buf.nioBuffer(0, 6);
byte[] bytes = nioBuffer.array();
assertEquals(6, bytes.length);
assertArrayEquals(new byte[] {1, 2, 3, 4, 5, 6}, bytes);
}
@Test
public void testFullConsolidation() {
CompositeByteBuf buf = releaseLater(compositeBuffer(Integer.MAX_VALUE));
buf.addComponent(wrappedBuffer(new byte[] { 1 }));
buf.addComponent(wrappedBuffer(new byte[] { 2, 3 }));
buf.addComponent(wrappedBuffer(new byte[] { 4, 5, 6 }));
buf.consolidate();
assertEquals(1, buf.numComponents());
assertTrue(buf.hasArray());
assertNotNull(buf.array());
assertEquals(0, buf.arrayOffset());
}
@Test
public void testRangedConsolidation() {
CompositeByteBuf buf = releaseLater(compositeBuffer(Integer.MAX_VALUE));
buf.addComponent(wrappedBuffer(new byte[] { 1 }));
buf.addComponent(wrappedBuffer(new byte[] { 2, 3 }));
buf.addComponent(wrappedBuffer(new byte[] { 4, 5, 6 }));
buf.addComponent(wrappedBuffer(new byte[] { 7, 8, 9, 10 }));
buf.consolidate(1, 2);
assertEquals(3, buf.numComponents());
assertEquals(wrappedBuffer(new byte[] { 1 }), buf.component(0));
assertEquals(wrappedBuffer(new byte[] { 2, 3, 4, 5, 6 }), buf.component(1));
assertEquals(wrappedBuffer(new byte[] { 7, 8, 9, 10 }), buf.component(2));
}
@Test
public void testCompositeWrappedBuffer() {
ByteBuf header = releaseLater(buffer(12)).order(order);
ByteBuf payload = releaseLater(buffer(512)).order(order);
header.writeBytes(new byte[12]);
payload.writeBytes(new byte[512]);
ByteBuf buffer = releaseLater(wrappedBuffer(header, payload));
assertEquals(12, header.readableBytes());
assertEquals(512, payload.readableBytes());
assertEquals(12 + 512, buffer.readableBytes());
assertEquals(2, buffer.nioBufferCount());
}
@Test
public void testSeveralBuffersEquals() {
ByteBuf a, b;
// XXX Same tests with several buffers in wrappedCheckedBuffer
// Different length.
a = releaseLater(wrappedBuffer(new byte[] { 1 }).order(order));
b = releaseLater(wrappedBuffer(
wrappedBuffer(new byte[] { 1 }).order(order),
wrappedBuffer(new byte[] { 2 }).order(order)));
assertFalse(ByteBufUtil.equals(a, b));
// Same content, same firstIndex, short length.
a = releaseLater(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order));
b = releaseLater(wrappedBuffer(
wrappedBuffer(new byte[]{1}).order(order),
wrappedBuffer(new byte[]{2}).order(order),
wrappedBuffer(new byte[]{3}).order(order)));
assertTrue(ByteBufUtil.equals(a, b));
// Same content, different firstIndex, short length.
a = releaseLater(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order));
b = releaseLater(wrappedBuffer(
wrappedBuffer(new byte[] { 0, 1, 2, 3, 4 }, 1, 2).order(order),
wrappedBuffer(new byte[] { 0, 1, 2, 3, 4 }, 3, 1).order(order)));
assertTrue(ByteBufUtil.equals(a, b));
// Different content, same firstIndex, short length.
a = releaseLater(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order));
b = releaseLater(wrappedBuffer(
wrappedBuffer(new byte[] { 1, 2 }).order(order),
wrappedBuffer(new byte[] { 4 }).order(order)));
assertFalse(ByteBufUtil.equals(a, b));
// Different content, different firstIndex, short length.
a = releaseLater(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order));
b = releaseLater(wrappedBuffer(
wrappedBuffer(new byte[] { 0, 1, 2, 4, 5 }, 1, 2).order(order),
wrappedBuffer(new byte[] { 0, 1, 2, 4, 5 }, 3, 1).order(order)));
assertFalse(ByteBufUtil.equals(a, b));
// Same content, same firstIndex, long length.
a = releaseLater(wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order));
b = releaseLater(wrappedBuffer(
wrappedBuffer(new byte[] { 1, 2, 3 }).order(order),
wrappedBuffer(new byte[] { 4, 5, 6 }).order(order),
wrappedBuffer(new byte[] { 7, 8, 9, 10 }).order(order)));
assertTrue(ByteBufUtil.equals(a, b));
// Same content, different firstIndex, long length.
a = releaseLater(wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order));
b = releaseLater(wrappedBuffer(
wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11}, 1, 5).order(order),
wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11}, 6, 5).order(order)));
assertTrue(ByteBufUtil.equals(a, b));
// Different content, same firstIndex, long length.
a = releaseLater(wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order));
b = releaseLater(wrappedBuffer(
wrappedBuffer(new byte[] { 1, 2, 3, 4, 6 }).order(order),
wrappedBuffer(new byte[] { 7, 8, 5, 9, 10 }).order(order)));
assertFalse(ByteBufUtil.equals(a, b));
// Different content, different firstIndex, long length.
a = releaseLater(wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }).order(order));
b = releaseLater(wrappedBuffer(
wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 6, 7, 8, 5, 9, 10, 11 }, 1, 5).order(order),
wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 6, 7, 8, 5, 9, 10, 11 }, 6, 5).order(order)));
assertFalse(ByteBufUtil.equals(a, b));
}
@Test
public void testWrappedBuffer() {
assertEquals(16, wrappedBuffer(wrappedBuffer(ByteBuffer.allocateDirect(16))).capacity());
assertEquals(
wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order)),
wrappedBuffer(wrappedBuffer(new byte[][] { new byte[] { 1, 2, 3 } }).order(order)));
assertEquals(
wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order)),
releaseLater(wrappedBuffer(wrappedBuffer(
new byte[] { 1 },
new byte[] { 2 },
new byte[] { 3 }).order(order))));
assertEquals(
wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order)),
wrappedBuffer(new ByteBuf[] {
wrappedBuffer(new byte[] { 1, 2, 3 }).order(order)
}));
assertEquals(
wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order)),
releaseLater(wrappedBuffer(
wrappedBuffer(new byte[] { 1 }).order(order),
wrappedBuffer(new byte[] { 2 }).order(order),
wrappedBuffer(new byte[] { 3 }).order(order))));
assertEquals(
wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order)),
wrappedBuffer(wrappedBuffer(new ByteBuffer[] {
ByteBuffer.wrap(new byte[] { 1, 2, 3 })
})));
assertEquals(
wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3 }).order(order)),
releaseLater(wrappedBuffer(wrappedBuffer(
ByteBuffer.wrap(new byte[] { 1 }),
ByteBuffer.wrap(new byte[] { 2 }),
ByteBuffer.wrap(new byte[] { 3 })))));
}
@Test
public void testWrittenBuffersEquals() {
//XXX Same tests than testEquals with written AggregateChannelBuffers
ByteBuf a, b;
// Different length.
a = releaseLater(wrappedBuffer(new byte[] { 1 })).order(order);
b = releaseLater(wrappedBuffer(wrappedBuffer(new byte[] { 1 }, new byte[1])).order(order));
// to enable writeBytes
b.writerIndex(b.writerIndex() - 1);
b.writeBytes(releaseLater(wrappedBuffer(new byte[] { 2 })).order(order));
assertFalse(ByteBufUtil.equals(a, b));
// Same content, same firstIndex, short length.
a = releaseLater(wrappedBuffer(new byte[] { 1, 2, 3 })).order(order);
b = releaseLater(wrappedBuffer(releaseLater(wrappedBuffer(new byte[] { 1 }, new byte[2]))).order(order));
// to enable writeBytes
b.writerIndex(b.writerIndex() - 2);
b.writeBytes(releaseLater(wrappedBuffer(new byte[] { 2 })).order(order));
b.writeBytes(releaseLater(wrappedBuffer(new byte[] { 3 })).order(order));
assertTrue(ByteBufUtil.equals(a, b));
// Same content, different firstIndex, short length.
a = releaseLater(wrappedBuffer(new byte[] { 1, 2, 3 })).order(order);
b = releaseLater(wrappedBuffer(releaseLater(wrappedBuffer(new byte[] { 0, 1, 2, 3, 4 }, 1, 3))).order(order));
// to enable writeBytes
b.writerIndex(b.writerIndex() - 1);
b.writeBytes(releaseLater(wrappedBuffer(new byte[] { 0, 1, 2, 3, 4 }, 3, 1)).order(order));
assertTrue(ByteBufUtil.equals(a, b));
// Different content, same firstIndex, short length.
a = releaseLater(wrappedBuffer(new byte[] { 1, 2, 3 })).order(order);
b = releaseLater(wrappedBuffer(releaseLater(wrappedBuffer(new byte[] { 1, 2 }, new byte[1])).order(order)));
// to enable writeBytes
b.writerIndex(b.writerIndex() - 1);
b.writeBytes(releaseLater(wrappedBuffer(new byte[] { 4 })).order(order));
assertFalse(ByteBufUtil.equals(a, b));
// Different content, different firstIndex, short length.
a = releaseLater(wrappedBuffer(new byte[] { 1, 2, 3 })).order(order);
b = releaseLater(wrappedBuffer(releaseLater(wrappedBuffer(new byte[] { 0, 1, 2, 4, 5 }, 1, 3))).order(order));
// to enable writeBytes
b.writerIndex(b.writerIndex() - 1);
b.writeBytes(releaseLater(wrappedBuffer(new byte[] { 0, 1, 2, 4, 5 }, 3, 1)).order(order));
assertFalse(ByteBufUtil.equals(a, b));
// Same content, same firstIndex, long length.
a = releaseLater(wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 })).order(order);
b = releaseLater(wrappedBuffer(releaseLater(wrappedBuffer(new byte[] { 1, 2, 3 }, new byte[7]))).order(order));
// to enable writeBytes
b.writerIndex(b.writerIndex() - 7);
b.writeBytes(releaseLater(wrappedBuffer(new byte[] { 4, 5, 6 })).order(order));
b.writeBytes(releaseLater(wrappedBuffer(new byte[] { 7, 8, 9, 10 })).order(order));
assertTrue(ByteBufUtil.equals(a, b));
// Same content, different firstIndex, long length.
a = releaseLater(wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 })).order(order);
b = releaseLater(wrappedBuffer(releaseLater(
wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11}, 1, 10))).order(order));
// to enable writeBytes
b.writerIndex(b.writerIndex() - 5);
b.writeBytes(releaseLater(
wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11}, 6, 5)).order(order));
assertTrue(ByteBufUtil.equals(a, b));
// Different content, same firstIndex, long length.
a = releaseLater(wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 })).order(order);
b = releaseLater(wrappedBuffer(wrappedBuffer(new byte[] { 1, 2, 3, 4, 6 }, new byte[5])).order(order));
// to enable writeBytes
b.writerIndex(b.writerIndex() - 5);
b.writeBytes(releaseLater(wrappedBuffer(new byte[] { 7, 8, 5, 9, 10 })).order(order));
assertFalse(ByteBufUtil.equals(a, b));
// Different content, different firstIndex, long length.
a = releaseLater(wrappedBuffer(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 })).order(order);
b = releaseLater(wrappedBuffer(releaseLater(
wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 6, 7, 8, 5, 9, 10, 11 }, 1, 10))).order(order));
// to enable writeBytes
b.writerIndex(b.writerIndex() - 5);
b.writeBytes(releaseLater(
wrappedBuffer(new byte[] { 0, 1, 2, 3, 4, 6, 7, 8, 5, 9, 10, 11 }, 6, 5)).order(order));
assertFalse(ByteBufUtil.equals(a, b));
}
@Test
public void testEmptyBuffer() {
ByteBuf b = releaseLater(wrappedBuffer(new byte[]{1, 2}, new byte[]{3, 4}));
b.readBytes(new byte[4]);
b.readBytes(EMPTY_BYTES);
}
// Test for https://github.com/netty/netty/issues/1060
@Test
public void testReadWithEmptyCompositeBuffer() {
ByteBuf buf = releaseLater(compositeBuffer());
int n = 65;
for (int i = 0; i < n; i ++) {
buf.writeByte(1);
assertEquals(1, buf.readByte());
}
}
@Test
public void testComponentMustBeSlice() {
CompositeByteBuf buf = releaseLater(compositeBuffer());
buf.addComponent(buffer(4).setIndex(1, 3));
assertThat(buf.component(0), is(instanceOf(AbstractUnpooledSlicedByteBuf.class)));
assertThat(buf.component(0).capacity(), is(2));
assertThat(buf.component(0).maxCapacity(), is(2));
}
@Test
public void testReferenceCounts1() {
ByteBuf c1 = buffer().writeByte(1);
ByteBuf c2 = buffer().writeByte(2).retain();
ByteBuf c3 = buffer().writeByte(3).retain(2);
CompositeByteBuf buf = releaseLater(compositeBuffer());
assertThat(buf.refCnt(), is(1));
buf.addComponents(c1, c2, c3);
assertThat(buf.refCnt(), is(1));
// Ensure that c[123]'s refCount did not change.
assertThat(c1.refCnt(), is(1));
assertThat(c2.refCnt(), is(2));
assertThat(c3.refCnt(), is(3));
assertThat(buf.component(0).refCnt(), is(1));
assertThat(buf.component(1).refCnt(), is(2));
assertThat(buf.component(2).refCnt(), is(3));
c3.release(2);
c2.release();
}
@Test
public void testReferenceCounts2() {
ByteBuf c1 = buffer().writeByte(1);
ByteBuf c2 = buffer().writeByte(2).retain();
ByteBuf c3 = buffer().writeByte(3).retain(2);
CompositeByteBuf bufA = compositeBuffer();
bufA.addComponents(c1, c2, c3).writerIndex(3);
CompositeByteBuf bufB = compositeBuffer();
bufB.addComponents(bufA);
// Ensure that bufA.refCnt() did not change.
assertThat(bufA.refCnt(), is(1));
// Ensure that c[123]'s refCnt did not change.
assertThat(c1.refCnt(), is(1));
assertThat(c2.refCnt(), is(2));
assertThat(c3.refCnt(), is(3));
// This should decrease bufA.refCnt().
bufB.release();
assertThat(bufB.refCnt(), is(0));
// Ensure bufA.refCnt() changed.
assertThat(bufA.refCnt(), is(0));
// Ensure that c[123]'s refCnt also changed due to the deallocation of bufA.
assertThat(c1.refCnt(), is(0));
assertThat(c2.refCnt(), is(1));
assertThat(c3.refCnt(), is(2));
c3.release(2);
c2.release();
}
@Test
public void testReferenceCounts3() {
ByteBuf c1 = buffer().writeByte(1);
ByteBuf c2 = buffer().writeByte(2).retain();
ByteBuf c3 = buffer().writeByte(3).retain(2);
CompositeByteBuf buf = releaseLater(compositeBuffer());
assertThat(buf.refCnt(), is(1));
List<ByteBuf> components = new ArrayList<ByteBuf>();
Collections.addAll(components, c1, c2, c3);
buf.addComponents(components);
// Ensure that c[123]'s refCount did not change.
assertThat(c1.refCnt(), is(1));
assertThat(c2.refCnt(), is(2));
assertThat(c3.refCnt(), is(3));
assertThat(buf.component(0).refCnt(), is(1));
assertThat(buf.component(1).refCnt(), is(2));
assertThat(buf.component(2).refCnt(), is(3));
c3.release(2);
c2.release();
}
@Test
public void testNestedLayout() {
CompositeByteBuf buf = releaseLater(compositeBuffer());
buf.addComponent(
compositeBuffer()
.addComponent(wrappedBuffer(new byte[]{1, 2}))
.addComponent(wrappedBuffer(new byte[]{3, 4})).slice(1, 2));
ByteBuffer[] nioBuffers = buf.nioBuffers(0, 2);
assertThat(nioBuffers.length, is(2));
assertThat(nioBuffers[0].remaining(), is(1));
assertThat(nioBuffers[0].get(), is((byte) 2));
assertThat(nioBuffers[1].remaining(), is(1));
assertThat(nioBuffers[1].get(), is((byte) 3));
}
@Test
public void testRemoveLastComponent() {
CompositeByteBuf buf = releaseLater(compositeBuffer());
buf.addComponent(wrappedBuffer(new byte[]{1, 2}));
assertEquals(1, buf.numComponents());
buf.removeComponent(0);
assertEquals(0, buf.numComponents());
}
@Test
public void testCopyEmpty() {
CompositeByteBuf buf = releaseLater(compositeBuffer());
assertEquals(0, buf.numComponents());
assertEquals(0, releaseLater(buf.copy()).readableBytes());
}
@Test
public void testDuplicateEmpty() {
CompositeByteBuf buf = releaseLater(compositeBuffer());
assertEquals(0, buf.numComponents());
assertEquals(0, releaseLater(buf.duplicate()).readableBytes());
}
@Test
public void testRemoveLastComponentWithOthersLeft() {
CompositeByteBuf buf = releaseLater(compositeBuffer());
buf.addComponent(wrappedBuffer(new byte[]{1, 2}));
buf.addComponent(wrappedBuffer(new byte[]{1, 2}));
assertEquals(2, buf.numComponents());
buf.removeComponent(1);
assertEquals(1, buf.numComponents());
}
@Test
public void testGatheringWritesHeap() throws Exception {
testGatheringWrites(buffer().order(order), buffer().order(order));
}
@Test
public void testGatheringWritesDirect() throws Exception {
testGatheringWrites(directBuffer().order(order), directBuffer().order(order));
}
@Test
public void testGatheringWritesMixes() throws Exception {
testGatheringWrites(buffer().order(order), directBuffer().order(order));
}
@Test
public void testGatheringWritesHeapPooled() throws Exception {
testGatheringWrites(PooledByteBufAllocator.DEFAULT.heapBuffer().order(order),
PooledByteBufAllocator.DEFAULT.heapBuffer().order(order));
}
@Test
public void testGatheringWritesDirectPooled() throws Exception {
testGatheringWrites(PooledByteBufAllocator.DEFAULT.directBuffer().order(order),
PooledByteBufAllocator.DEFAULT.directBuffer().order(order));
}
@Test
public void testGatheringWritesMixesPooled() throws Exception {
testGatheringWrites(PooledByteBufAllocator.DEFAULT.heapBuffer().order(order),
PooledByteBufAllocator.DEFAULT.directBuffer().order(order));
}
private static void testGatheringWrites(ByteBuf buf1, ByteBuf buf2) throws Exception {
CompositeByteBuf buf = releaseLater(compositeBuffer());
buf.addComponent(buf1.writeBytes(new byte[]{1, 2}));
buf.addComponent(buf2.writeBytes(new byte[]{1, 2}));
buf.writerIndex(3);
buf.readerIndex(1);
TestGatheringByteChannel channel = new TestGatheringByteChannel();
buf.readBytes(channel, 2);
byte[] data = new byte[2];
buf.getBytes(1, data);
assertArrayEquals(data, channel.writtenBytes());
}
@Test
public void testGatheringWritesPartialHeap() throws Exception {
testGatheringWritesPartial(buffer().order(order), buffer().order(order), false);
}
@Test
public void testGatheringWritesPartialDirect() throws Exception {
testGatheringWritesPartial(directBuffer().order(order), directBuffer().order(order), false);
}
@Test
public void testGatheringWritesPartialMixes() throws Exception {
testGatheringWritesPartial(buffer().order(order), directBuffer().order(order), false);
}
@Test
public void testGatheringWritesPartialHeapSlice() throws Exception {
testGatheringWritesPartial(buffer().order(order), buffer().order(order), true);
}
@Test
public void testGatheringWritesPartialDirectSlice() throws Exception {
testGatheringWritesPartial(directBuffer().order(order), directBuffer().order(order), true);
}
@Test
public void testGatheringWritesPartialMixesSlice() throws Exception {
testGatheringWritesPartial(buffer().order(order), directBuffer().order(order), true);
}
@Test
public void testGatheringWritesPartialHeapPooled() throws Exception {
testGatheringWritesPartial(PooledByteBufAllocator.DEFAULT.heapBuffer().order(order),
PooledByteBufAllocator.DEFAULT.heapBuffer().order(order), false);
}
@Test
public void testGatheringWritesPartialDirectPooled() throws Exception {
testGatheringWritesPartial(PooledByteBufAllocator.DEFAULT.directBuffer().order(order),
PooledByteBufAllocator.DEFAULT.directBuffer().order(order), false);
}
@Test
public void testGatheringWritesPartialMixesPooled() throws Exception {
testGatheringWritesPartial(PooledByteBufAllocator.DEFAULT.heapBuffer().order(order),
PooledByteBufAllocator.DEFAULT.directBuffer().order(order), false);
}
@Test
public void testGatheringWritesPartialHeapPooledSliced() throws Exception {
testGatheringWritesPartial(PooledByteBufAllocator.DEFAULT.heapBuffer().order(order),
PooledByteBufAllocator.DEFAULT.heapBuffer().order(order), true);
}
@Test
public void testGatheringWritesPartialDirectPooledSliced() throws Exception {
testGatheringWritesPartial(PooledByteBufAllocator.DEFAULT.directBuffer().order(order),
PooledByteBufAllocator.DEFAULT.directBuffer().order(order), true);
}
@Test
public void testGatheringWritesPartialMixesPooledSliced() throws Exception {
testGatheringWritesPartial(PooledByteBufAllocator.DEFAULT.heapBuffer().order(order),
PooledByteBufAllocator.DEFAULT.directBuffer().order(order), true);
}
private static void testGatheringWritesPartial(ByteBuf buf1, ByteBuf buf2, boolean slice) throws Exception {
CompositeByteBuf buf = releaseLater(compositeBuffer());
buf1.writeBytes(new byte[]{1, 2, 3, 4});
buf2.writeBytes(new byte[]{1, 2, 3, 4});
if (slice) {
buf1 = buf1.readerIndex(1).slice();
buf2 = buf2.writerIndex(3).slice();
buf.addComponent(buf1);
buf.addComponent(buf2);
buf.writerIndex(6);
} else {
buf.addComponent(buf1);
buf.addComponent(buf2);
buf.writerIndex(7);
buf.readerIndex(1);
}
TestGatheringByteChannel channel = new TestGatheringByteChannel(1);
while (buf.isReadable()) {
buf.readBytes(channel, buf.readableBytes());
}
byte[] data = new byte[6];
if (slice) {
buf.getBytes(0, data);
} else {
buf.getBytes(1, data);
}
assertArrayEquals(data, channel.writtenBytes());
}
@Test
public void testGatheringWritesSingleHeap() throws Exception {
testGatheringWritesSingleBuf(buffer().order(order));
}
@Test
public void testGatheringWritesSingleDirect() throws Exception {
testGatheringWritesSingleBuf(directBuffer().order(order));
}
private static void testGatheringWritesSingleBuf(ByteBuf buf1) throws Exception {
CompositeByteBuf buf = releaseLater(compositeBuffer());
buf.addComponent(buf1.writeBytes(new byte[]{1, 2, 3, 4}));
buf.writerIndex(3);
buf.readerIndex(1);
TestGatheringByteChannel channel = new TestGatheringByteChannel();
buf.readBytes(channel, 2);
byte[] data = new byte[2];
buf.getBytes(1, data);
assertArrayEquals(data, channel.writtenBytes());
}
@Override
@Test
public void testInternalNioBuffer() {
// ignore
}
@Test
public void testisDirectMultipleBufs() {
CompositeByteBuf buf = releaseLater(compositeBuffer());
assertFalse(buf.isDirect());
buf.addComponent(directBuffer().writeByte(1));
assertTrue(buf.isDirect());
buf.addComponent(directBuffer().writeByte(1));
assertTrue(buf.isDirect());
buf.addComponent(buffer().writeByte(1));
assertFalse(buf.isDirect());
}
// See https://github.com/netty/netty/issues/1976
@Test
public void testDiscardSomeReadBytes() {
CompositeByteBuf cbuf = releaseLater(compositeBuffer());
int len = 8 * 4;
for (int i = 0; i < len; i += 4) {
ByteBuf buf = buffer().writeInt(i);
cbuf.capacity(cbuf.writerIndex()).addComponent(buf).writerIndex(i + 4);
}
cbuf.writeByte(1);
byte[] me = new byte[len];
cbuf.readBytes(me);
cbuf.readByte();
cbuf.discardSomeReadBytes();
}
@Test
public void testAddEmptyBufferRelease() {
CompositeByteBuf cbuf = compositeBuffer();
ByteBuf buf = buffer();
assertEquals(1, buf.refCnt());
cbuf.addComponent(buf);
assertEquals(1, buf.refCnt());
cbuf.release();
assertEquals(0, buf.refCnt());
}
@Test
public void testAddEmptyBuffersRelease() {
CompositeByteBuf cbuf = compositeBuffer();
ByteBuf buf = buffer();
ByteBuf buf2 = buffer().writeInt(1);
ByteBuf buf3 = buffer();
assertEquals(1, buf.refCnt());
assertEquals(1, buf2.refCnt());
assertEquals(1, buf3.refCnt());
cbuf.addComponents(buf, buf2, buf3);
assertEquals(1, buf.refCnt());
assertEquals(1, buf2.refCnt());
assertEquals(1, buf3.refCnt());
cbuf.release();
assertEquals(0, buf.refCnt());
assertEquals(0, buf2.refCnt());
assertEquals(0, buf3.refCnt());
}
@Test
public void testAddEmptyBufferInMiddle() {
CompositeByteBuf cbuf = compositeBuffer();
ByteBuf buf1 = buffer().writeByte((byte) 1);
cbuf.addComponent(true, buf1);
ByteBuf buf2 = EMPTY_BUFFER;
cbuf.addComponent(true, buf2);
ByteBuf buf3 = buffer().writeByte((byte) 2);
cbuf.addComponent(true, buf3);
assertEquals(2, cbuf.readableBytes());
assertEquals((byte) 1, cbuf.readByte());
assertEquals((byte) 2, cbuf.readByte());
assertSame(EMPTY_BUFFER, cbuf.internalComponent(1));
assertNotSame(EMPTY_BUFFER, cbuf.internalComponentAtOffset(1));
cbuf.release();
}
@Test
public void testIterator() {
CompositeByteBuf cbuf = compositeBuffer();
cbuf.addComponent(EMPTY_BUFFER);
cbuf.addComponent(EMPTY_BUFFER);
Iterator<ByteBuf> it = cbuf.iterator();
assertTrue(it.hasNext());
assertSame(EMPTY_BUFFER, it.next());
assertTrue(it.hasNext());
assertSame(EMPTY_BUFFER, it.next());
assertFalse(it.hasNext());
try {
it.next();
fail();
} catch (NoSuchElementException e) {
//Expected
}
cbuf.release();
}
@Test
public void testEmptyIterator() {
CompositeByteBuf cbuf = compositeBuffer();
Iterator<ByteBuf> it = cbuf.iterator();
assertFalse(it.hasNext());
try {
it.next();
fail();
} catch (NoSuchElementException e) {
//Expected
}
cbuf.release();
}
@Test(expected = ConcurrentModificationException.class)
public void testIteratorConcurrentModificationAdd() {
CompositeByteBuf cbuf = compositeBuffer();
cbuf.addComponent(EMPTY_BUFFER);
Iterator<ByteBuf> it = cbuf.iterator();
cbuf.addComponent(EMPTY_BUFFER);
assertTrue(it.hasNext());
try {
it.next();
} finally {
cbuf.release();
}
}
@Test(expected = ConcurrentModificationException.class)
public void testIteratorConcurrentModificationRemove() {
CompositeByteBuf cbuf = compositeBuffer();
cbuf.addComponent(EMPTY_BUFFER);
Iterator<ByteBuf> it = cbuf.iterator();
cbuf.removeComponent(0);
assertTrue(it.hasNext());
try {
it.next();
} finally {
cbuf.release();
}
}
@Test
public void testReleasesItsComponents() {
ByteBuf buffer = PooledByteBufAllocator.DEFAULT.buffer(); // 1
buffer.writeBytes(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10});
ByteBuf s1 = buffer.readSlice(2).retain(); // 2
ByteBuf s2 = s1.readSlice(2).retain(); // 3
ByteBuf s3 = s2.readSlice(2).retain(); // 4
ByteBuf s4 = s3.readSlice(2).retain(); // 5
ByteBuf composite = PooledByteBufAllocator.DEFAULT.compositeBuffer()
.addComponent(s1)
.addComponents(s2, s3, s4)
.order(ByteOrder.LITTLE_ENDIAN);
assertEquals(composite.refCnt(), 1);
assertEquals(buffer.refCnt(), 5);
// releasing composite should release the 4 components
ReferenceCountUtil.release(composite);
assertEquals(composite.refCnt(), 0);
assertEquals(buffer.refCnt(), 1);
// last remaining ref to buffer
ReferenceCountUtil.release(buffer);
assertEquals(buffer.refCnt(), 0);
}
}
| apache-2.0 |
bobbyyit/HelloWorldAndroid | app/src/androidTest/java/com/cornpops/bobbyyit/firstapp/ApplicationTest.java | 361 | package com.cornpops.bobbyyit.firstapp;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
} | apache-2.0 |
googleapis/java-aiplatform | proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/MigrateResourceRequestOrBuilder.java | 6407 | /*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/migration_service.proto
package com.google.cloud.aiplatform.v1beta1;
public interface MigrateResourceRequestOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1beta1.MigrateResourceRequest)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Config for migrating Version in ml.googleapis.com to Vertex AI's Model.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.MigrateResourceRequest.MigrateMlEngineModelVersionConfig migrate_ml_engine_model_version_config = 1;
* </code>
*
* @return Whether the migrateMlEngineModelVersionConfig field is set.
*/
boolean hasMigrateMlEngineModelVersionConfig();
/**
*
*
* <pre>
* Config for migrating Version in ml.googleapis.com to Vertex AI's Model.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.MigrateResourceRequest.MigrateMlEngineModelVersionConfig migrate_ml_engine_model_version_config = 1;
* </code>
*
* @return The migrateMlEngineModelVersionConfig.
*/
com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.MigrateMlEngineModelVersionConfig
getMigrateMlEngineModelVersionConfig();
/**
*
*
* <pre>
* Config for migrating Version in ml.googleapis.com to Vertex AI's Model.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.MigrateResourceRequest.MigrateMlEngineModelVersionConfig migrate_ml_engine_model_version_config = 1;
* </code>
*/
com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest
.MigrateMlEngineModelVersionConfigOrBuilder
getMigrateMlEngineModelVersionConfigOrBuilder();
/**
*
*
* <pre>
* Config for migrating Model in automl.googleapis.com to Vertex AI's
* Model.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.MigrateResourceRequest.MigrateAutomlModelConfig migrate_automl_model_config = 2;
* </code>
*
* @return Whether the migrateAutomlModelConfig field is set.
*/
boolean hasMigrateAutomlModelConfig();
/**
*
*
* <pre>
* Config for migrating Model in automl.googleapis.com to Vertex AI's
* Model.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.MigrateResourceRequest.MigrateAutomlModelConfig migrate_automl_model_config = 2;
* </code>
*
* @return The migrateAutomlModelConfig.
*/
com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.MigrateAutomlModelConfig
getMigrateAutomlModelConfig();
/**
*
*
* <pre>
* Config for migrating Model in automl.googleapis.com to Vertex AI's
* Model.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.MigrateResourceRequest.MigrateAutomlModelConfig migrate_automl_model_config = 2;
* </code>
*/
com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.MigrateAutomlModelConfigOrBuilder
getMigrateAutomlModelConfigOrBuilder();
/**
*
*
* <pre>
* Config for migrating Dataset in automl.googleapis.com to Vertex AI's
* Dataset.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.MigrateResourceRequest.MigrateAutomlDatasetConfig migrate_automl_dataset_config = 3;
* </code>
*
* @return Whether the migrateAutomlDatasetConfig field is set.
*/
boolean hasMigrateAutomlDatasetConfig();
/**
*
*
* <pre>
* Config for migrating Dataset in automl.googleapis.com to Vertex AI's
* Dataset.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.MigrateResourceRequest.MigrateAutomlDatasetConfig migrate_automl_dataset_config = 3;
* </code>
*
* @return The migrateAutomlDatasetConfig.
*/
com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.MigrateAutomlDatasetConfig
getMigrateAutomlDatasetConfig();
/**
*
*
* <pre>
* Config for migrating Dataset in automl.googleapis.com to Vertex AI's
* Dataset.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.MigrateResourceRequest.MigrateAutomlDatasetConfig migrate_automl_dataset_config = 3;
* </code>
*/
com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.MigrateAutomlDatasetConfigOrBuilder
getMigrateAutomlDatasetConfigOrBuilder();
/**
*
*
* <pre>
* Config for migrating Dataset in datalabeling.googleapis.com to
* Vertex AI's Dataset.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.MigrateResourceRequest.MigrateDataLabelingDatasetConfig migrate_data_labeling_dataset_config = 4;
* </code>
*
* @return Whether the migrateDataLabelingDatasetConfig field is set.
*/
boolean hasMigrateDataLabelingDatasetConfig();
/**
*
*
* <pre>
* Config for migrating Dataset in datalabeling.googleapis.com to
* Vertex AI's Dataset.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.MigrateResourceRequest.MigrateDataLabelingDatasetConfig migrate_data_labeling_dataset_config = 4;
* </code>
*
* @return The migrateDataLabelingDatasetConfig.
*/
com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.MigrateDataLabelingDatasetConfig
getMigrateDataLabelingDatasetConfig();
/**
*
*
* <pre>
* Config for migrating Dataset in datalabeling.googleapis.com to
* Vertex AI's Dataset.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.MigrateResourceRequest.MigrateDataLabelingDatasetConfig migrate_data_labeling_dataset_config = 4;
* </code>
*/
com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest
.MigrateDataLabelingDatasetConfigOrBuilder
getMigrateDataLabelingDatasetConfigOrBuilder();
public com.google.cloud.aiplatform.v1beta1.MigrateResourceRequest.RequestCase getRequestCase();
}
| apache-2.0 |
googleads/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/admanager/jaxws/v202105/CustomTargetingValueStatus.java | 1896 | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.ads.admanager.jaxws.v202105;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for CustomTargetingValue.Status.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType name="CustomTargetingValue.Status">
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="ACTIVE"/>
* <enumeration value="INACTIVE"/>
* <enumeration value="UNKNOWN"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "CustomTargetingValue.Status")
@XmlEnum
public enum CustomTargetingValueStatus {
/**
*
* The object is active.
*
*
*/
ACTIVE,
/**
*
* The object is no longer active.
*
*
*/
INACTIVE,
/**
*
* The value returned if the actual value is not exposed by the requested
* API version.
*
*
*/
UNKNOWN;
public String value() {
return name();
}
public static CustomTargetingValueStatus fromValue(String v) {
return valueOf(v);
}
}
| apache-2.0 |
CHINA-JD/presto | presto-hive/src/test/java/com/facebook/presto/hive/AbstractTestHiveClient.java | 83180 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive;
import com.facebook.presto.hive.metastore.CachingHiveMetastore;
import com.facebook.presto.hive.metastore.HiveMetastore;
import com.facebook.presto.hive.orc.DwrfHiveRecordCursor;
import com.facebook.presto.hive.orc.DwrfRecordCursorProvider;
import com.facebook.presto.hive.orc.OrcHiveRecordCursor;
import com.facebook.presto.hive.orc.OrcPageSource;
import com.facebook.presto.hive.orc.OrcRecordCursorProvider;
import com.facebook.presto.hive.rcfile.RcFilePageSource;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ColumnMetadata;
import com.facebook.presto.spi.ConnectorMetadata;
import com.facebook.presto.spi.ConnectorOutputTableHandle;
import com.facebook.presto.spi.ConnectorPageSource;
import com.facebook.presto.spi.ConnectorPageSourceProvider;
import com.facebook.presto.spi.ConnectorPartition;
import com.facebook.presto.spi.ConnectorPartitionResult;
import com.facebook.presto.spi.ConnectorRecordSinkProvider;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.ConnectorSplit;
import com.facebook.presto.spi.ConnectorSplitManager;
import com.facebook.presto.spi.ConnectorSplitSource;
import com.facebook.presto.spi.ConnectorTableHandle;
import com.facebook.presto.spi.ConnectorTableMetadata;
import com.facebook.presto.spi.Domain;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.RecordPageSource;
import com.facebook.presto.spi.RecordSink;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.SchemaTablePrefix;
import com.facebook.presto.spi.SerializableNativeValue;
import com.facebook.presto.spi.TableNotFoundException;
import com.facebook.presto.spi.TupleDomain;
import com.facebook.presto.spi.ViewNotFoundException;
import com.facebook.presto.spi.type.SqlDate;
import com.facebook.presto.spi.type.SqlTimestamp;
import com.facebook.presto.spi.type.SqlVarbinary;
import com.facebook.presto.spi.type.StandardTypes;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.testing.MaterializedResult;
import com.facebook.presto.testing.MaterializedRow;
import com.facebook.presto.type.ArrayType;
import com.facebook.presto.type.MapType;
import com.facebook.presto.type.TypeRegistry;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.net.HostAndPort;
import com.google.common.primitives.Ints;
import io.airlift.log.Logger;
import io.airlift.slice.Slice;
import io.airlift.units.Duration;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.serde2.ReaderWriterProfiler;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.io.IOException;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.TimeZone;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_INVALID_PARTITION_VALUE;
import static com.facebook.presto.hive.HiveSessionProperties.STORAGE_FORMAT_PROPERTY;
import static com.facebook.presto.hive.HiveStorageFormat.DWRF;
import static com.facebook.presto.hive.HiveStorageFormat.ORC;
import static com.facebook.presto.hive.HiveStorageFormat.PARQUET;
import static com.facebook.presto.hive.HiveStorageFormat.RCBINARY;
import static com.facebook.presto.hive.HiveStorageFormat.RCTEXT;
import static com.facebook.presto.hive.HiveStorageFormat.SEQUENCEFILE;
import static com.facebook.presto.hive.HiveStorageFormat.TEXTFILE;
import static com.facebook.presto.hive.HiveTestUtils.DEFAULT_HIVE_DATA_STREAM_FACTORIES;
import static com.facebook.presto.hive.HiveTestUtils.DEFAULT_HIVE_RECORD_CURSOR_PROVIDER;
import static com.facebook.presto.hive.HiveTestUtils.TYPE_MANAGER;
import static com.facebook.presto.hive.HiveTestUtils.getTypes;
import static com.facebook.presto.hive.HiveType.HIVE_INT;
import static com.facebook.presto.hive.HiveType.HIVE_STRING;
import static com.facebook.presto.hive.util.Types.checkType;
import static com.facebook.presto.spi.StandardErrorCode.NOT_SUPPORTED;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.BooleanType.BOOLEAN;
import static com.facebook.presto.spi.type.DateType.DATE;
import static com.facebook.presto.spi.type.DoubleType.DOUBLE;
import static com.facebook.presto.spi.type.HyperLogLogType.HYPER_LOG_LOG;
import static com.facebook.presto.spi.type.TimeZoneKey.UTC_KEY;
import static com.facebook.presto.spi.type.TimestampType.TIMESTAMP;
import static com.facebook.presto.spi.type.TypeSignature.parseTypeSignature;
import static com.facebook.presto.spi.type.VarbinaryType.VARBINARY;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.facebook.presto.testing.MaterializedResult.materializeSourceDataStream;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.collect.Maps.uniqueIndex;
import static com.google.common.util.concurrent.MoreExecutors.newDirectExecutorService;
import static io.airlift.concurrent.MoreFutures.getFutureValue;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static io.airlift.slice.Slices.utf8Slice;
import static io.airlift.testing.Assertions.assertInstanceOf;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Locale.ENGLISH;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.Executors.newCachedThreadPool;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
@Test(groups = "hive")
public abstract class AbstractTestHiveClient
{
private static final ConnectorSession SESSION = new ConnectorSession("presto_test", UTC_KEY, ENGLISH, System.currentTimeMillis(), null);
protected static final String INVALID_DATABASE = "totally_invalid_database_name";
protected static final String INVALID_TABLE = "totally_invalid_table_name";
protected static final String INVALID_COLUMN = "totally_invalid_column_name";
protected Set<HiveStorageFormat> createTableFormats = ImmutableSet.copyOf(HiveStorageFormat.values());
protected String database;
protected SchemaTableName tablePartitionFormat;
protected SchemaTableName tableUnpartitioned;
protected SchemaTableName tableOffline;
protected SchemaTableName tableOfflinePartition;
protected SchemaTableName view;
protected SchemaTableName invalidTable;
protected SchemaTableName tableBucketedStringInt;
protected SchemaTableName tableBucketedBigintBoolean;
protected SchemaTableName tableBucketedDoubleFloat;
protected SchemaTableName tablePartitionSchemaChange;
protected SchemaTableName tablePartitionSchemaChangeNonCanonical;
protected SchemaTableName temporaryCreateTable;
protected SchemaTableName temporaryCreateSampledTable;
protected SchemaTableName temporaryCreateEmptyTable;
protected SchemaTableName temporaryRenameTableOld;
protected SchemaTableName temporaryRenameTableNew;
protected SchemaTableName temporaryCreateView;
protected ConnectorTableHandle invalidTableHandle;
protected ColumnHandle dsColumn;
protected ColumnHandle fileFormatColumn;
protected ColumnHandle dummyColumn;
protected ColumnHandle intColumn;
protected ColumnHandle invalidColumnHandle;
protected Set<ConnectorPartition> partitions;
protected Set<ConnectorPartition> unpartitionedPartitions;
protected ConnectorPartition invalidPartition;
protected DateTimeZone timeZone;
protected HdfsEnvironment hdfsEnvironment;
protected ConnectorMetadata metadata;
protected ConnectorSplitManager splitManager;
protected ConnectorPageSourceProvider pageSourceProvider;
protected ConnectorRecordSinkProvider recordSinkProvider;
protected ExecutorService executor;
@BeforeClass
public void setUp()
throws Exception
{
executor = newCachedThreadPool(daemonThreadsNamed("hive-%s"));
}
@AfterClass
public void tearDown()
throws Exception
{
if (executor != null) {
executor.shutdownNow();
executor = null;
}
}
protected void setupHive(String connectorId, String databaseName, String timeZoneId)
{
database = databaseName;
tablePartitionFormat = new SchemaTableName(database, "presto_test_partition_format");
tableUnpartitioned = new SchemaTableName(database, "presto_test_unpartitioned");
tableOffline = new SchemaTableName(database, "presto_test_offline");
tableOfflinePartition = new SchemaTableName(database, "presto_test_offline_partition");
view = new SchemaTableName(database, "presto_test_view");
invalidTable = new SchemaTableName(database, INVALID_TABLE);
tableBucketedStringInt = new SchemaTableName(database, "presto_test_bucketed_by_string_int");
tableBucketedBigintBoolean = new SchemaTableName(database, "presto_test_bucketed_by_bigint_boolean");
tableBucketedDoubleFloat = new SchemaTableName(database, "presto_test_bucketed_by_double_float");
tablePartitionSchemaChange = new SchemaTableName(database, "presto_test_partition_schema_change");
tablePartitionSchemaChangeNonCanonical = new SchemaTableName(database, "presto_test_partition_schema_change_non_canonical");
temporaryCreateTable = new SchemaTableName(database, "tmp_presto_test_create_" + randomName());
temporaryCreateSampledTable = new SchemaTableName(database, "tmp_presto_test_create_" + randomName());
temporaryCreateEmptyTable = new SchemaTableName(database, "tmp_presto_test_create_" + randomName());
temporaryRenameTableOld = new SchemaTableName(database, "tmp_presto_test_rename_" + randomName());
temporaryRenameTableNew = new SchemaTableName(database, "tmp_presto_test_rename_" + randomName());
temporaryCreateView = new SchemaTableName(database, "tmp_presto_test_create_" + randomName());
invalidTableHandle = new HiveTableHandle("hive", database, INVALID_TABLE, SESSION);
dsColumn = new HiveColumnHandle(connectorId, "ds", 0, HIVE_STRING, parseTypeSignature(StandardTypes.VARCHAR), -1, true);
fileFormatColumn = new HiveColumnHandle(connectorId, "file_format", 1, HIVE_STRING, parseTypeSignature(StandardTypes.VARCHAR), -1, true);
dummyColumn = new HiveColumnHandle(connectorId, "dummy", 2, HIVE_INT, parseTypeSignature(StandardTypes.BIGINT), -1, true);
intColumn = new HiveColumnHandle(connectorId, "t_int", 0, HIVE_INT, parseTypeSignature(StandardTypes.BIGINT), -1, true);
invalidColumnHandle = new HiveColumnHandle(connectorId, INVALID_COLUMN, 0, HIVE_STRING, parseTypeSignature(StandardTypes.VARCHAR), 0, false);
partitions = ImmutableSet.<ConnectorPartition>builder()
.add(new HivePartition(tablePartitionFormat,
TupleDomain.<HiveColumnHandle>all(),
"ds=2012-12-29/file_format=textfile/dummy=1",
ImmutableMap.<ColumnHandle, SerializableNativeValue>builder()
.put(dsColumn, new SerializableNativeValue(Slice.class, utf8Slice("2012-12-29")))
.put(fileFormatColumn, new SerializableNativeValue(Slice.class, utf8Slice("textfile")))
.put(dummyColumn, new SerializableNativeValue(Long.class, 1L))
.build(),
Optional.empty()))
.add(new HivePartition(tablePartitionFormat,
TupleDomain.<HiveColumnHandle>all(),
"ds=2012-12-29/file_format=sequencefile/dummy=2",
ImmutableMap.<ColumnHandle, SerializableNativeValue>builder()
.put(dsColumn, new SerializableNativeValue(Slice.class, utf8Slice("2012-12-29")))
.put(fileFormatColumn, new SerializableNativeValue(Slice.class, utf8Slice("sequencefile")))
.put(dummyColumn, new SerializableNativeValue(Long.class, 2L))
.build(),
Optional.empty()))
.add(new HivePartition(tablePartitionFormat,
TupleDomain.<HiveColumnHandle>all(),
"ds=2012-12-29/file_format=rctext/dummy=3",
ImmutableMap.<ColumnHandle, SerializableNativeValue>builder()
.put(dsColumn, new SerializableNativeValue(Slice.class, utf8Slice("2012-12-29")))
.put(fileFormatColumn, new SerializableNativeValue(Slice.class, utf8Slice("rctext")))
.put(dummyColumn, new SerializableNativeValue(Long.class, 3L))
.build(),
Optional.empty()))
.add(new HivePartition(tablePartitionFormat,
TupleDomain.<HiveColumnHandle>all(),
"ds=2012-12-29/file_format=rcbinary/dummy=4",
ImmutableMap.<ColumnHandle, SerializableNativeValue>builder()
.put(dsColumn, new SerializableNativeValue(Slice.class, utf8Slice("2012-12-29")))
.put(fileFormatColumn, new SerializableNativeValue(Slice.class, utf8Slice("rcbinary")))
.put(dummyColumn, new SerializableNativeValue(Long.class, 4L))
.build(),
Optional.empty()))
.build();
unpartitionedPartitions = ImmutableSet.<ConnectorPartition>of(new HivePartition(tableUnpartitioned, TupleDomain.<HiveColumnHandle>all()));
invalidPartition = new HivePartition(invalidTable, TupleDomain.<HiveColumnHandle>all(), "unknown", ImmutableMap.<ColumnHandle, SerializableNativeValue>of(), Optional.empty());
timeZone = DateTimeZone.forTimeZone(TimeZone.getTimeZone(timeZoneId));
}
protected void setup(String host, int port, String databaseName, String timeZone)
{
setup(host, port, databaseName, timeZone, "hive-test", 100, 50);
}
protected void setup(String host, int port, String databaseName, String timeZoneId, String connectorName, int maxOutstandingSplits, int maxThreads)
{
setupHive(connectorName, databaseName, timeZoneId);
HiveClientConfig hiveClientConfig = new HiveClientConfig();
hiveClientConfig.setTimeZone(timeZoneId);
String proxy = System.getProperty("hive.metastore.thrift.client.socks-proxy");
if (proxy != null) {
hiveClientConfig.setMetastoreSocksProxy(HostAndPort.fromString(proxy));
}
HiveCluster hiveCluster = new TestingHiveCluster(hiveClientConfig, host, port);
HiveMetastore metastoreClient = new CachingHiveMetastore(hiveCluster, executor, Duration.valueOf("1m"), Duration.valueOf("15s"));
HiveConnectorId connectorId = new HiveConnectorId(connectorName);
HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationUpdater(hiveClientConfig));
hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hiveClientConfig);
splitManager = new HiveSplitManager(
connectorId,
metastoreClient,
new NamenodeStats(),
hdfsEnvironment,
new HadoopDirectoryLister(),
timeZone,
newDirectExecutorService(),
maxOutstandingSplits,
hiveClientConfig.getMinPartitionBatchSize(),
hiveClientConfig.getMaxPartitionBatchSize(),
hiveClientConfig.getMaxSplitSize(),
hiveClientConfig.getMaxInitialSplitSize(),
hiveClientConfig.getMaxInitialSplits(),
false,
false,
false);
metadata = new HiveMetadata(
connectorId,
metastoreClient,
hdfsEnvironment,
timeZone,
true,
true,
true,
hiveClientConfig.getHiveStorageFormat(),
new TypeRegistry(),
splitManager);
recordSinkProvider = new HiveRecordSinkProvider(hdfsEnvironment);
pageSourceProvider = new HivePageSourceProvider(hiveClientConfig, hdfsEnvironment, DEFAULT_HIVE_RECORD_CURSOR_PROVIDER, DEFAULT_HIVE_DATA_STREAM_FACTORIES, TYPE_MANAGER);
}
@Test
public void testGetDatabaseNames()
throws Exception
{
List<String> databases = metadata.listSchemaNames(SESSION);
assertTrue(databases.contains(database));
}
@Test
public void testGetTableNames()
throws Exception
{
List<SchemaTableName> tables = metadata.listTables(SESSION, database);
assertTrue(tables.contains(tablePartitionFormat));
assertTrue(tables.contains(tableUnpartitioned));
}
@Test
public void testGetAllTableNames()
throws Exception
{
List<SchemaTableName> tables = metadata.listTables(SESSION, null);
assertTrue(tables.contains(tablePartitionFormat));
assertTrue(tables.contains(tableUnpartitioned));
}
@Test
public void testGetAllTableColumns()
{
Map<SchemaTableName, List<ColumnMetadata>> allColumns = metadata.listTableColumns(SESSION, new SchemaTablePrefix());
assertTrue(allColumns.containsKey(tablePartitionFormat));
assertTrue(allColumns.containsKey(tableUnpartitioned));
}
@Test
public void testGetAllTableColumnsInSchema()
{
Map<SchemaTableName, List<ColumnMetadata>> allColumns = metadata.listTableColumns(SESSION, new SchemaTablePrefix(database));
assertTrue(allColumns.containsKey(tablePartitionFormat));
assertTrue(allColumns.containsKey(tableUnpartitioned));
}
@Test
public void testListUnknownSchema()
{
assertNull(metadata.getTableHandle(SESSION, new SchemaTableName(INVALID_DATABASE, INVALID_TABLE)));
assertEquals(metadata.listTables(SESSION, INVALID_DATABASE), ImmutableList.of());
assertEquals(metadata.listTableColumns(SESSION, new SchemaTablePrefix(INVALID_DATABASE, INVALID_TABLE)), ImmutableMap.of());
assertEquals(metadata.listViews(SESSION, INVALID_DATABASE), ImmutableList.of());
assertEquals(metadata.getViews(SESSION, new SchemaTablePrefix(INVALID_DATABASE, INVALID_TABLE)), ImmutableMap.of());
}
@Test
public void testGetPartitions()
throws Exception
{
ConnectorTableHandle tableHandle = getTableHandle(tablePartitionFormat);
ConnectorPartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.<ColumnHandle>all());
assertExpectedPartitions(partitionResult.getPartitions(), partitions);
}
@Test
public void testGetPartitionsWithBindings()
throws Exception
{
ConnectorTableHandle tableHandle = getTableHandle(tablePartitionFormat);
ConnectorPartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.withColumnDomains(ImmutableMap.of(intColumn, Domain.singleValue(5L))));
assertExpectedPartitions(partitionResult.getPartitions(), partitions);
}
@Test(expectedExceptions = TableNotFoundException.class)
public void testGetPartitionsException()
throws Exception
{
splitManager.getPartitions(invalidTableHandle, TupleDomain.<ColumnHandle>all());
}
@Test
public void testGetPartitionNames()
throws Exception
{
ConnectorTableHandle tableHandle = getTableHandle(tablePartitionFormat);
ConnectorPartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.<ColumnHandle>all());
assertExpectedPartitions(partitionResult.getPartitions(), partitions);
}
protected void assertExpectedPartitions(List<ConnectorPartition> actualPartitions, Iterable<ConnectorPartition> expectedPartitions)
{
Map<String, ConnectorPartition> actualById = uniqueIndex(actualPartitions, ConnectorPartition::getPartitionId);
for (ConnectorPartition expected : expectedPartitions) {
assertInstanceOf(expected, HivePartition.class);
HivePartition expectedPartition = (HivePartition) expected;
ConnectorPartition actual = actualById.get(expectedPartition.getPartitionId());
assertEquals(actual, expected);
assertInstanceOf(actual, HivePartition.class);
HivePartition actualPartition = (HivePartition) actual;
assertNotNull(actualPartition, "partition " + expectedPartition.getPartitionId());
assertEquals(actualPartition.getPartitionId(), expectedPartition.getPartitionId());
assertEquals(actualPartition.getKeys(), expectedPartition.getKeys());
assertEquals(actualPartition.getTableName(), expectedPartition.getTableName());
assertEquals(actualPartition.getBucket(), expectedPartition.getBucket());
assertEquals(actualPartition.getTupleDomain(), expectedPartition.getTupleDomain());
}
}
@Test
public void testGetPartitionNamesUnpartitioned()
throws Exception
{
ConnectorTableHandle tableHandle = getTableHandle(tableUnpartitioned);
ConnectorPartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.<ColumnHandle>all());
assertEquals(partitionResult.getPartitions().size(), 1);
assertEquals(partitionResult.getPartitions(), unpartitionedPartitions);
}
@Test(expectedExceptions = TableNotFoundException.class)
public void testGetPartitionNamesException()
throws Exception
{
splitManager.getPartitions(invalidTableHandle, TupleDomain.<ColumnHandle>all());
}
@SuppressWarnings({"ValueOfIncrementOrDecrementUsed", "UnusedAssignment"})
@Test
public void testGetTableSchemaPartitionFormat()
throws Exception
{
ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(getTableHandle(tablePartitionFormat));
Map<String, ColumnMetadata> map = uniqueIndex(tableMetadata.getColumns(), ColumnMetadata::getName);
int i = 0;
assertPrimitiveField(map, i++, "t_string", VARCHAR, false);
assertPrimitiveField(map, i++, "t_tinyint", BIGINT, false);
assertPrimitiveField(map, i++, "t_smallint", BIGINT, false);
assertPrimitiveField(map, i++, "t_int", BIGINT, false);
assertPrimitiveField(map, i++, "t_bigint", BIGINT, false);
assertPrimitiveField(map, i++, "t_float", DOUBLE, false);
assertPrimitiveField(map, i++, "t_double", DOUBLE, false);
assertPrimitiveField(map, i++, "t_boolean", BOOLEAN, false);
assertPrimitiveField(map, i++, "ds", VARCHAR, true);
assertPrimitiveField(map, i++, "file_format", VARCHAR, true);
assertPrimitiveField(map, i++, "dummy", BIGINT, true);
}
@Test
public void testGetTableSchemaUnpartitioned()
throws Exception
{
ConnectorTableHandle tableHandle = getTableHandle(tableUnpartitioned);
ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(tableHandle);
Map<String, ColumnMetadata> map = uniqueIndex(tableMetadata.getColumns(), ColumnMetadata::getName);
assertPrimitiveField(map, 0, "t_string", VARCHAR, false);
assertPrimitiveField(map, 1, "t_tinyint", BIGINT, false);
}
@Test
public void testGetTableSchemaOffline()
throws Exception
{
ConnectorTableHandle tableHandle = getTableHandle(tableOffline);
ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(tableHandle);
Map<String, ColumnMetadata> map = uniqueIndex(tableMetadata.getColumns(), ColumnMetadata::getName);
assertPrimitiveField(map, 0, "t_string", VARCHAR, false);
}
@Test
public void testGetTableSchemaOfflinePartition()
throws Exception
{
ConnectorTableHandle tableHandle = getTableHandle(tableOfflinePartition);
ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(tableHandle);
Map<String, ColumnMetadata> map = uniqueIndex(tableMetadata.getColumns(), ColumnMetadata::getName);
assertPrimitiveField(map, 0, "t_string", VARCHAR, false);
}
@Test
public void testGetTableSchemaException()
throws Exception
{
assertNull(metadata.getTableHandle(SESSION, invalidTable));
}
@Test
public void testGetPartitionSplitsBatch()
throws Exception
{
ConnectorTableHandle tableHandle = getTableHandle(tablePartitionFormat);
ConnectorPartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.<ColumnHandle>all());
ConnectorSplitSource splitSource = splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions());
assertEquals(getSplitCount(splitSource), partitions.size());
}
@Test
public void testGetPartitionSplitsBatchUnpartitioned()
throws Exception
{
ConnectorTableHandle tableHandle = getTableHandle(tableUnpartitioned);
ConnectorPartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.<ColumnHandle>all());
ConnectorSplitSource splitSource = splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions());
assertEquals(getSplitCount(splitSource), 1);
}
@Test(expectedExceptions = TableNotFoundException.class)
public void testGetPartitionSplitsBatchInvalidTable()
throws Exception
{
splitManager.getPartitionSplits(invalidTableHandle, ImmutableList.of(invalidPartition));
}
@Test
public void testGetPartitionSplitsEmpty()
throws Exception
{
ConnectorSplitSource splitSource = splitManager.getPartitionSplits(invalidTableHandle, ImmutableList.<ConnectorPartition>of());
// fetch full list
getSplitCount(splitSource);
}
@Test
public void testGetPartitionTableOffline()
throws Exception
{
ConnectorTableHandle tableHandle = getTableHandle(tableOffline);
try {
splitManager.getPartitions(tableHandle, TupleDomain.<ColumnHandle>all());
fail("expected TableOfflineException");
}
catch (TableOfflineException e) {
assertEquals(e.getTableName(), tableOffline);
}
}
@Test
public void testGetPartitionSplitsTableOfflinePartition()
throws Exception
{
ConnectorTableHandle tableHandle = getTableHandle(tableOfflinePartition);
assertNotNull(tableHandle);
ColumnHandle dsColumn = metadata.getColumnHandles(tableHandle).get("ds");
assertNotNull(dsColumn);
Domain domain = Domain.singleValue(utf8Slice("2012-12-30"));
TupleDomain<ColumnHandle> tupleDomain = TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, domain));
ConnectorPartitionResult partitionResult = splitManager.getPartitions(tableHandle, tupleDomain);
for (ConnectorPartition partition : partitionResult.getPartitions()) {
if (domain.equals(partition.getTupleDomain().getDomains().get(dsColumn))) {
try {
getSplitCount(splitManager.getPartitionSplits(tableHandle, ImmutableList.of(partition)));
fail("Expected PartitionOfflineException");
}
catch (PartitionOfflineException e) {
assertEquals(e.getTableName(), tableOfflinePartition);
assertEquals(e.getPartition(), "ds=2012-12-30");
}
}
else {
getSplitCount(splitManager.getPartitionSplits(tableHandle, ImmutableList.of(partition)));
}
}
}
@Test
public void testBucketedTableStringInt()
throws Exception
{
ConnectorTableHandle tableHandle = getTableHandle(tableBucketedStringInt);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(tableHandle).values());
Map<String, Integer> columnIndex = indexColumns(columnHandles);
assertTableIsBucketed(tableHandle);
String testString = "test";
Long testInt = 13L;
Long testSmallint = 12L;
// Reverse the order of bindings as compared to bucketing order
ImmutableMap<ColumnHandle, SerializableNativeValue> bindings = ImmutableMap.<ColumnHandle, SerializableNativeValue>builder()
.put(columnHandles.get(columnIndex.get("t_int")), new SerializableNativeValue(Long.class, testInt))
.put(columnHandles.get(columnIndex.get("t_string")), new SerializableNativeValue(Slice.class, utf8Slice(testString)))
.put(columnHandles.get(columnIndex.get("t_smallint")), new SerializableNativeValue(Long.class, testSmallint))
.build();
ConnectorPartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.withNullableFixedValues(bindings));
List<ConnectorSplit> splits = getAllSplits(splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions()));
assertEquals(splits.size(), 1);
try (ConnectorPageSource pageSource = pageSourceProvider.createPageSource(splits.get(0), columnHandles)) {
MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, getTypes(columnHandles));
boolean rowFound = false;
for (MaterializedRow row : result) {
if (testString.equals(row.getField(columnIndex.get("t_string"))) &&
testInt.equals(row.getField(columnIndex.get("t_int"))) &&
testSmallint.equals(row.getField(columnIndex.get("t_smallint")))) {
rowFound = true;
}
}
assertTrue(rowFound);
}
}
@SuppressWarnings("ConstantConditions")
@Test
public void testBucketedTableBigintBoolean()
throws Exception
{
ConnectorTableHandle tableHandle = getTableHandle(tableBucketedBigintBoolean);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(tableHandle).values());
Map<String, Integer> columnIndex = indexColumns(columnHandles);
assertTableIsBucketed(tableHandle);
String testString = "test";
Long testBigint = 89L;
Boolean testBoolean = true;
ImmutableMap<ColumnHandle, SerializableNativeValue> bindings = ImmutableMap.<ColumnHandle, SerializableNativeValue>builder()
.put(columnHandles.get(columnIndex.get("t_string")), new SerializableNativeValue(Slice.class, utf8Slice(testString)))
.put(columnHandles.get(columnIndex.get("t_bigint")), new SerializableNativeValue(Long.class, testBigint))
.put(columnHandles.get(columnIndex.get("t_boolean")), new SerializableNativeValue(Boolean.class, testBoolean))
.build();
ConnectorPartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.withNullableFixedValues(bindings));
List<ConnectorSplit> splits = getAllSplits(splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions()));
assertEquals(splits.size(), 1);
try (ConnectorPageSource pageSource = pageSourceProvider.createPageSource(splits.get(0), columnHandles)) {
MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, getTypes(columnHandles));
boolean rowFound = false;
for (MaterializedRow row : result) {
if (testString.equals(row.getField(columnIndex.get("t_string"))) &&
testBigint.equals(row.getField(columnIndex.get("t_bigint"))) &&
testBoolean.equals(row.getField(columnIndex.get("t_boolean")))) {
rowFound = true;
break;
}
}
assertTrue(rowFound);
}
}
@Test
public void testBucketedTableDoubleFloat()
throws Exception
{
ConnectorTableHandle tableHandle = getTableHandle(tableBucketedDoubleFloat);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(tableHandle).values());
Map<String, Integer> columnIndex = indexColumns(columnHandles);
assertTableIsBucketed(tableHandle);
ImmutableMap<ColumnHandle, SerializableNativeValue> bindings = ImmutableMap.<ColumnHandle, SerializableNativeValue>builder()
.put(columnHandles.get(columnIndex.get("t_float")), new SerializableNativeValue(Double.class, 87.1))
.put(columnHandles.get(columnIndex.get("t_double")), new SerializableNativeValue(Double.class, 88.2))
.build();
// floats and doubles are not supported, so we should see all splits
ConnectorPartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.withNullableFixedValues(bindings));
List<ConnectorSplit> splits = getAllSplits(splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions()));
assertEquals(splits.size(), 32);
int count = 0;
for (ConnectorSplit split : splits) {
try (ConnectorPageSource pageSource = pageSourceProvider.createPageSource(split, columnHandles)) {
MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, getTypes(columnHandles));
count += result.getRowCount();
}
}
assertEquals(count, 100);
}
private void assertTableIsBucketed(ConnectorTableHandle tableHandle)
throws Exception
{
// the bucketed test tables should have exactly 32 splits
ConnectorPartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.<ColumnHandle>all());
List<ConnectorSplit> splits = getAllSplits(splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions()));
assertEquals(splits.size(), 32);
// verify all paths are unique
Set<String> paths = new HashSet<>();
for (ConnectorSplit split : splits) {
assertTrue(paths.add(((HiveSplit) split).getPath()));
}
}
@Test
public void testGetRecords()
throws Exception
{
ConnectorTableHandle tableHandle = getTableHandle(tablePartitionFormat);
ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(tableHandle);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(tableHandle).values());
Map<String, Integer> columnIndex = indexColumns(columnHandles);
ConnectorPartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.<ColumnHandle>all());
List<ConnectorSplit> splits = getAllSplits(splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions()));
assertEquals(splits.size(), this.partitions.size());
for (ConnectorSplit split : splits) {
HiveSplit hiveSplit = (HiveSplit) split;
List<HivePartitionKey> partitionKeys = hiveSplit.getPartitionKeys();
String ds = partitionKeys.get(0).getValue();
String fileFormat = partitionKeys.get(1).getValue();
HiveStorageFormat fileType = HiveStorageFormat.valueOf(fileFormat.toUpperCase());
long dummyPartition = Long.parseLong(partitionKeys.get(2).getValue());
long rowNumber = 0;
long completedBytes = 0;
try (ConnectorPageSource pageSource = pageSourceProvider.createPageSource(hiveSplit, columnHandles)) {
MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, getTypes(columnHandles));
assertPageSourceType(pageSource, fileType);
for (MaterializedRow row : result) {
try {
assertValueTypes(row, tableMetadata.getColumns());
}
catch (RuntimeException e) {
throw new RuntimeException("row " + rowNumber, e);
}
rowNumber++;
if (rowNumber % 19 == 0) {
assertNull(row.getField(columnIndex.get("t_string")));
}
else if (rowNumber % 19 == 1) {
assertEquals(row.getField(columnIndex.get("t_string")), "");
}
else {
assertEquals(row.getField(columnIndex.get("t_string")), "test");
}
assertEquals(row.getField(columnIndex.get("t_tinyint")), 1 + rowNumber);
assertEquals(row.getField(columnIndex.get("t_smallint")), 2 + rowNumber);
assertEquals(row.getField(columnIndex.get("t_int")), 3 + rowNumber);
if (rowNumber % 13 == 0) {
assertNull(row.getField(columnIndex.get("t_bigint")));
}
else {
assertEquals(row.getField(columnIndex.get("t_bigint")), 4 + rowNumber);
}
assertEquals((Double) row.getField(columnIndex.get("t_float")), 5.1 + rowNumber, 0.001);
assertEquals(row.getField(columnIndex.get("t_double")), 6.2 + rowNumber);
if (rowNumber % 3 == 2) {
assertNull(row.getField(columnIndex.get("t_boolean")));
}
else {
assertEquals(row.getField(columnIndex.get("t_boolean")), rowNumber % 3 != 0);
}
assertEquals(row.getField(columnIndex.get("ds")), ds);
assertEquals(row.getField(columnIndex.get("file_format")), fileFormat);
assertEquals(row.getField(columnIndex.get("dummy")), dummyPartition);
long newCompletedBytes = pageSource.getCompletedBytes();
assertTrue(newCompletedBytes >= completedBytes);
assertTrue(newCompletedBytes <= hiveSplit.getLength());
completedBytes = newCompletedBytes;
}
assertTrue(completedBytes <= hiveSplit.getLength());
assertEquals(rowNumber, 100);
}
}
}
@Test
public void testGetPartialRecords()
throws Exception
{
ConnectorTableHandle tableHandle = getTableHandle(tablePartitionFormat);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(tableHandle).values());
Map<String, Integer> columnIndex = indexColumns(columnHandles);
ConnectorPartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.<ColumnHandle>all());
List<ConnectorSplit> splits = getAllSplits(splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions()));
assertEquals(splits.size(), this.partitions.size());
for (ConnectorSplit split : splits) {
HiveSplit hiveSplit = (HiveSplit) split;
List<HivePartitionKey> partitionKeys = hiveSplit.getPartitionKeys();
String ds = partitionKeys.get(0).getValue();
String fileFormat = partitionKeys.get(1).getValue();
HiveStorageFormat fileType = HiveStorageFormat.valueOf(fileFormat.toUpperCase());
long dummyPartition = Long.parseLong(partitionKeys.get(2).getValue());
long rowNumber = 0;
try (ConnectorPageSource pageSource = pageSourceProvider.createPageSource(hiveSplit, columnHandles)) {
assertPageSourceType(pageSource, fileType);
MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, getTypes(columnHandles));
for (MaterializedRow row : result) {
rowNumber++;
assertEquals(row.getField(columnIndex.get("t_double")), 6.2 + rowNumber);
assertEquals(row.getField(columnIndex.get("ds")), ds);
assertEquals(row.getField(columnIndex.get("file_format")), fileFormat);
assertEquals(row.getField(columnIndex.get("dummy")), dummyPartition);
}
}
assertEquals(rowNumber, 100);
}
}
@Test
public void testGetRecordsUnpartitioned()
throws Exception
{
ConnectorTableHandle tableHandle = getTableHandle(tableUnpartitioned);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(tableHandle).values());
Map<String, Integer> columnIndex = indexColumns(columnHandles);
ConnectorPartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.<ColumnHandle>all());
List<ConnectorSplit> splits = getAllSplits(splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions()));
assertEquals(splits.size(), 1);
for (ConnectorSplit split : splits) {
HiveSplit hiveSplit = (HiveSplit) split;
assertEquals(hiveSplit.getPartitionKeys(), ImmutableList.of());
long rowNumber = 0;
try (ConnectorPageSource pageSource = pageSourceProvider.createPageSource(split, columnHandles)) {
assertPageSourceType(pageSource, TEXTFILE);
MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, getTypes(columnHandles));
assertEquals(pageSource.getTotalBytes(), hiveSplit.getLength());
for (MaterializedRow row : result) {
rowNumber++;
if (rowNumber % 19 == 0) {
assertNull(row.getField(columnIndex.get("t_string")));
}
else if (rowNumber % 19 == 1) {
assertEquals(row.getField(columnIndex.get("t_string")), "");
}
else {
assertEquals(row.getField(columnIndex.get("t_string")), "unpartitioned");
}
assertEquals(row.getField(columnIndex.get("t_tinyint")), 1 + rowNumber);
}
}
assertEquals(rowNumber, 100);
}
}
@Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = ".*" + INVALID_COLUMN + ".*")
public void testGetRecordsInvalidColumn()
throws Exception
{
ConnectorTableHandle table = getTableHandle(tableUnpartitioned);
ConnectorPartitionResult partitionResult = splitManager.getPartitions(table, TupleDomain.<ColumnHandle>all());
ConnectorSplit split = Iterables.getFirst(getAllSplits(splitManager.getPartitionSplits(table, partitionResult.getPartitions())), null);
pageSourceProvider.createPageSource(split, ImmutableList.of(invalidColumnHandle));
}
@Test(expectedExceptions = PrestoException.class, expectedExceptionsMessageRegExp = ".*The column 't_data' in table '.*\\.presto_test_partition_schema_change' is declared as type 'bigint', but partition 'ds=2012-12-29' declared column 't_data' as type 'string'.")
public void testPartitionSchemaMismatch()
throws Exception
{
ConnectorTableHandle table = getTableHandle(tablePartitionSchemaChange);
ConnectorPartitionResult partitionResult = splitManager.getPartitions(table, TupleDomain.<ColumnHandle>all());
getAllSplits(splitManager.getPartitionSplits(table, partitionResult.getPartitions()));
}
@Test
public void testPartitionSchemaNonCanonical()
throws Exception
{
ConnectorTableHandle table = getTableHandle(tablePartitionSchemaChangeNonCanonical);
ColumnHandle column = metadata.getColumnHandles(table).get("t_boolean");
assertNotNull(column);
ConnectorPartitionResult partitionResult = splitManager.getPartitions(table, TupleDomain.withFixedValues(ImmutableMap.of(column, false)));
assertEquals(partitionResult.getPartitions().size(), 1);
assertEquals(partitionResult.getPartitions().get(0).getPartitionId(), "t_boolean=0");
ConnectorSplitSource splitSource = splitManager.getPartitionSplits(table, partitionResult.getPartitions());
ConnectorSplit split = getOnlyElement(getAllSplits(splitSource));
ImmutableList<ColumnHandle> columnHandles = ImmutableList.of(column);
try (ConnectorPageSource pageSource = pageSourceProvider.createPageSource(split, columnHandles)) {
// TODO coercion of non-canonical values should be supported
fail("expected exception");
}
catch (PrestoException e) {
assertEquals(e.getErrorCode(), HIVE_INVALID_PARTITION_VALUE.toErrorCode());
}
}
@Test
public void testTypesTextFile()
throws Exception
{
assertGetRecords("presto_test_types_textfile", TEXTFILE);
}
@Test
public void testTypesSequenceFile()
throws Exception
{
assertGetRecords("presto_test_types_sequencefile", SEQUENCEFILE);
}
@Test
public void testTypesRcText()
throws Exception
{
assertGetRecords("presto_test_types_rctext", RCTEXT);
}
@Test
public void testTypesRcTextRecordCursor()
throws Exception
{
if (metadata.getTableHandle(SESSION, new SchemaTableName(database, "presto_test_types_rctext")) == null) {
return;
}
ConnectorTableHandle tableHandle = getTableHandle(new SchemaTableName(database, "presto_test_types_rctext"));
ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(tableHandle);
HiveSplit hiveSplit = getHiveSplit(tableHandle);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(tableHandle).values());
ConnectorPageSourceProvider pageSourceProvider = new HivePageSourceProvider(
new HiveClientConfig().setTimeZone(timeZone.getID()),
hdfsEnvironment,
ImmutableSet.<HiveRecordCursorProvider>of(new ColumnarTextHiveRecordCursorProvider()),
ImmutableSet.<HivePageSourceFactory>of(),
TYPE_MANAGER);
ConnectorPageSource pageSource = pageSourceProvider.createPageSource(hiveSplit, columnHandles);
assertGetRecords(RCTEXT, tableMetadata, hiveSplit, pageSource, columnHandles);
}
@Test
public void testTypesRcBinary()
throws Exception
{
assertGetRecords("presto_test_types_rcbinary", RCBINARY);
}
@Test
public void testTypesRcBinaryRecordCursor()
throws Exception
{
if (metadata.getTableHandle(SESSION, new SchemaTableName(database, "presto_test_types_rcbinary")) == null) {
return;
}
ConnectorTableHandle tableHandle = getTableHandle(new SchemaTableName(database, "presto_test_types_rcbinary"));
ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(tableHandle);
HiveSplit hiveSplit = getHiveSplit(tableHandle);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(tableHandle).values());
ConnectorPageSourceProvider pageSourceProvider = new HivePageSourceProvider(
new HiveClientConfig().setTimeZone(timeZone.getID()),
hdfsEnvironment,
ImmutableSet.<HiveRecordCursorProvider>of(new ColumnarBinaryHiveRecordCursorProvider()),
ImmutableSet.<HivePageSourceFactory>of(),
TYPE_MANAGER);
ConnectorPageSource pageSource = pageSourceProvider.createPageSource(hiveSplit, columnHandles);
assertGetRecords(RCBINARY, tableMetadata, hiveSplit, pageSource, columnHandles);
}
@Test
public void testTypesOrc()
throws Exception
{
assertGetRecordsOptional("presto_test_types_orc", ORC);
}
@Test
public void testTypesOrcRecordCursor()
throws Exception
{
if (metadata.getTableHandle(SESSION, new SchemaTableName(database, "presto_test_types_orc")) == null) {
return;
}
ConnectorTableHandle tableHandle = getTableHandle(new SchemaTableName(database, "presto_test_types_orc"));
ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(tableHandle);
HiveSplit hiveSplit = getHiveSplit(tableHandle);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(tableHandle).values());
ConnectorPageSourceProvider pageSourceProvider = new HivePageSourceProvider(
new HiveClientConfig().setTimeZone(timeZone.getID()),
hdfsEnvironment,
ImmutableSet.<HiveRecordCursorProvider>of(new OrcRecordCursorProvider()),
ImmutableSet.<HivePageSourceFactory>of(),
TYPE_MANAGER);
ConnectorPageSource pageSource = pageSourceProvider.createPageSource(hiveSplit, columnHandles);
assertGetRecords(ORC, tableMetadata, hiveSplit, pageSource, columnHandles);
}
@Test
public void testTypesParquet()
throws Exception
{
assertGetRecordsOptional("presto_test_types_parquet", PARQUET);
}
@Test
public void testTypesDwrf()
throws Exception
{
assertGetRecordsOptional("presto_test_types_dwrf", DWRF);
}
@Test
public void testTypesDwrfRecordCursor()
throws Exception
{
if (metadata.getTableHandle(SESSION, new SchemaTableName(database, "presto_test_types_dwrf")) == null) {
return;
}
ConnectorTableHandle tableHandle = getTableHandle(new SchemaTableName(database, "presto_test_types_dwrf"));
ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(tableHandle);
HiveSplit hiveSplit = getHiveSplit(tableHandle);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(tableHandle).values());
ReaderWriterProfiler.setProfilerOptions(new Configuration());
ConnectorPageSourceProvider pageSourceProvider = new HivePageSourceProvider(
new HiveClientConfig().setTimeZone(timeZone.getID()),
hdfsEnvironment,
ImmutableSet.<HiveRecordCursorProvider>of(new DwrfRecordCursorProvider()),
ImmutableSet.<HivePageSourceFactory>of(),
TYPE_MANAGER);
ConnectorPageSource pageSource = pageSourceProvider.createPageSource(hiveSplit, columnHandles);
assertGetRecords(DWRF, tableMetadata, hiveSplit, pageSource, columnHandles);
}
@Test
public void testHiveViewsAreNotSupported()
throws Exception
{
try {
getTableHandle(view);
fail("Expected HiveViewNotSupportedException");
}
catch (HiveViewNotSupportedException e) {
assertEquals(e.getTableName(), view);
}
}
@Test
public void testHiveViewsHaveNoColumns()
throws Exception
{
assertEquals(metadata.listTableColumns(SESSION, new SchemaTablePrefix(view.getSchemaName(), view.getTableName())), ImmutableMap.of());
}
@Test
public void testRenameTable()
{
try {
createDummyTable(temporaryRenameTableOld);
metadata.renameTable(getTableHandle(temporaryRenameTableOld), temporaryRenameTableNew);
assertNull(metadata.getTableHandle(SESSION, temporaryRenameTableOld));
assertNotNull(metadata.getTableHandle(SESSION, temporaryRenameTableNew));
}
finally {
dropTable(temporaryRenameTableOld);
dropTable(temporaryRenameTableNew);
}
}
@Test
public void testTableCreation()
throws Exception
{
for (HiveStorageFormat storageFormat : createTableFormats) {
try {
doCreateTable(storageFormat);
}
finally {
dropTable(temporaryCreateTable);
}
}
}
@Test
public void testSampledTableCreation()
throws Exception
{
try {
doCreateSampledTable();
}
finally {
dropTable(temporaryCreateSampledTable);
}
}
@Test
public void testEmptyTableCreation()
throws Exception
{
for (HiveStorageFormat storageFormat : createTableFormats) {
try {
doCreateEmptyTable(storageFormat);
}
finally {
dropTable(temporaryCreateEmptyTable);
}
}
}
@Test
public void testViewCreation()
{
try {
verifyViewCreation();
}
finally {
try {
metadata.dropView(SESSION, temporaryCreateView);
}
catch (RuntimeException e) {
// this usually occurs because the view was not created
}
}
}
@Test
public void testCreateTableUnsupportedType()
{
for (HiveStorageFormat storageFormat : createTableFormats) {
try {
ConnectorSession session = createSession(storageFormat);
List<ColumnMetadata> columns = ImmutableList.of(new ColumnMetadata("dummy", HYPER_LOG_LOG, false));
ConnectorTableMetadata tableMetadata = new ConnectorTableMetadata(invalidTable, columns, session.getUser());
metadata.beginCreateTable(session, tableMetadata);
fail("create table with unsupported type should fail for storage format " + storageFormat);
}
catch (PrestoException e) {
assertEquals(e.getErrorCode(), NOT_SUPPORTED.toErrorCode());
}
}
}
private void createDummyTable(SchemaTableName tableName)
{
List<ColumnMetadata> columns = ImmutableList.of(new ColumnMetadata("dummy", VARCHAR, false));
ConnectorTableMetadata tableMetadata = new ConnectorTableMetadata(tableName, columns, SESSION.getUser());
ConnectorOutputTableHandle handle = metadata.beginCreateTable(SESSION, tableMetadata);
metadata.commitCreateTable(handle, ImmutableList.of());
}
private void verifyViewCreation()
{
// replace works for new view
doCreateView(temporaryCreateView, true);
// replace works for existing view
doCreateView(temporaryCreateView, true);
// create fails for existing view
try {
doCreateView(temporaryCreateView, false);
fail("create existing should fail");
}
catch (ViewAlreadyExistsException e) {
assertEquals(e.getViewName(), temporaryCreateView);
}
// drop works when view exists
metadata.dropView(SESSION, temporaryCreateView);
assertEquals(metadata.getViews(SESSION, temporaryCreateView.toSchemaTablePrefix()).size(), 0);
assertFalse(metadata.listViews(SESSION, temporaryCreateView.getSchemaName()).contains(temporaryCreateView));
// drop fails when view does not exist
try {
metadata.dropView(SESSION, temporaryCreateView);
fail("drop non-existing should fail");
}
catch (ViewNotFoundException e) {
assertEquals(e.getViewName(), temporaryCreateView);
}
// create works for new view
doCreateView(temporaryCreateView, false);
}
private void doCreateView(SchemaTableName viewName, boolean replace)
{
String viewData = "test data";
metadata.createView(SESSION, viewName, viewData, replace);
Map<SchemaTableName, String> views = metadata.getViews(SESSION, viewName.toSchemaTablePrefix());
assertEquals(views.size(), 1);
assertEquals(views.get(viewName), viewData);
assertTrue(metadata.listViews(SESSION, viewName.getSchemaName()).contains(viewName));
}
private void doCreateSampledTable()
throws Exception
{
// begin creating the table
List<ColumnMetadata> columns = ImmutableList.<ColumnMetadata>builder()
.add(new ColumnMetadata("sales", BIGINT, false))
.build();
ConnectorTableMetadata tableMetadata = new ConnectorTableMetadata(temporaryCreateSampledTable, columns, SESSION.getUser(), true);
ConnectorOutputTableHandle outputHandle = metadata.beginCreateTable(SESSION, tableMetadata);
// write the records
RecordSink sink = recordSinkProvider.getRecordSink(outputHandle);
sink.beginRecord(8);
sink.appendLong(2);
sink.finishRecord();
sink.beginRecord(5);
sink.appendLong(3);
sink.finishRecord();
sink.beginRecord(7);
sink.appendLong(4);
sink.finishRecord();
Collection<Slice> fragments = sink.commit();
// commit the table
metadata.commitCreateTable(outputHandle, fragments);
// load the new table
ConnectorTableHandle tableHandle = getTableHandle(temporaryCreateSampledTable);
List<ColumnHandle> columnHandles = ImmutableList.<ColumnHandle>builder()
.addAll(metadata.getColumnHandles(tableHandle).values())
.add(metadata.getSampleWeightColumnHandle(tableHandle))
.build();
assertEquals(columnHandles.size(), 2);
// verify the metadata
tableMetadata = metadata.getTableMetadata(getTableHandle(temporaryCreateSampledTable));
assertEquals(tableMetadata.getOwner(), SESSION.getUser());
Map<String, ColumnMetadata> columnMap = uniqueIndex(tableMetadata.getColumns(), ColumnMetadata::getName);
assertEquals(columnMap.size(), 1);
assertPrimitiveField(columnMap, 0, "sales", BIGINT, false);
// verify the data
ConnectorPartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.<ColumnHandle>all());
assertEquals(partitionResult.getPartitions().size(), 1);
ConnectorSplitSource splitSource = splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions());
ConnectorSplit split = getOnlyElement(getAllSplits(splitSource));
try (ConnectorPageSource pageSource = pageSourceProvider.createPageSource(split, columnHandles)) {
assertPageSourceType(pageSource, RCBINARY);
MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, getTypes(columnHandles));
assertEquals(result.getRowCount(), 3);
MaterializedRow row;
row = result.getMaterializedRows().get(0);
assertEquals(row.getField(0), 2L);
assertEquals(row.getField(1), 8L);
row = result.getMaterializedRows().get(1);
assertEquals(row.getField(0), 3L);
assertEquals(row.getField(1), 5L);
row = result.getMaterializedRows().get(2);
assertEquals(row.getField(0), 4L);
assertEquals(row.getField(1), 7L);
}
}
private void doCreateTable(HiveStorageFormat storageFormat)
throws Exception
{
// begin creating the table
List<ColumnMetadata> columns = ImmutableList.<ColumnMetadata>builder()
.add(new ColumnMetadata("id", BIGINT, false))
.add(new ColumnMetadata("t_string", VARCHAR, false))
.add(new ColumnMetadata("t_bigint", BIGINT, false))
.add(new ColumnMetadata("t_double", DOUBLE, false))
.add(new ColumnMetadata("t_boolean", BOOLEAN, false))
.build();
ConnectorTableMetadata tableMetadata = new ConnectorTableMetadata(temporaryCreateTable, columns, SESSION.getUser());
ConnectorSession session = createSession(storageFormat);
ConnectorOutputTableHandle outputHandle = metadata.beginCreateTable(session, tableMetadata);
// write the records
RecordSink sink = recordSinkProvider.getRecordSink(outputHandle);
sink.beginRecord(1);
sink.appendLong(1);
sink.appendString("hello".getBytes(UTF_8));
sink.appendLong(123);
sink.appendDouble(43.5);
sink.appendBoolean(true);
sink.finishRecord();
sink.beginRecord(1);
sink.appendLong(2);
sink.appendNull();
sink.appendNull();
sink.appendNull();
sink.appendNull();
sink.finishRecord();
sink.beginRecord(1);
sink.appendLong(3);
sink.appendString("bye".getBytes(UTF_8));
sink.appendLong(456);
sink.appendDouble(98.1);
sink.appendBoolean(false);
sink.finishRecord();
Collection<Slice> fragments = sink.commit();
// commit the table
metadata.commitCreateTable(outputHandle, fragments);
// load the new table
ConnectorTableHandle tableHandle = getTableHandle(temporaryCreateTable);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(tableHandle).values());
// verify the metadata
tableMetadata = metadata.getTableMetadata(getTableHandle(temporaryCreateTable));
assertEquals(tableMetadata.getOwner(), session.getUser());
Map<String, ColumnMetadata> columnMap = uniqueIndex(tableMetadata.getColumns(), ColumnMetadata::getName);
assertPrimitiveField(columnMap, 0, "id", BIGINT, false);
assertPrimitiveField(columnMap, 1, "t_string", VARCHAR, false);
assertPrimitiveField(columnMap, 2, "t_bigint", BIGINT, false);
assertPrimitiveField(columnMap, 3, "t_double", DOUBLE, false);
assertPrimitiveField(columnMap, 4, "t_boolean", BOOLEAN, false);
// verify the data
ConnectorPartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.<ColumnHandle>all());
assertEquals(partitionResult.getPartitions().size(), 1);
ConnectorSplitSource splitSource = splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions());
ConnectorSplit split = getOnlyElement(getAllSplits(splitSource));
try (ConnectorPageSource pageSource = pageSourceProvider.createPageSource(split, columnHandles)) {
assertPageSourceType(pageSource, storageFormat);
MaterializedResult result = materializeSourceDataStream(session, pageSource, getTypes(columnHandles));
assertEquals(result.getRowCount(), 3);
MaterializedRow row;
row = result.getMaterializedRows().get(0);
assertEquals(row.getField(0), 1L);
assertEquals(row.getField(1), "hello");
assertEquals(row.getField(2), 123L);
assertEquals(row.getField(3), 43.5);
assertEquals(row.getField(4), true);
row = result.getMaterializedRows().get(1);
assertEquals(row.getField(0), 2L);
assertNull(row.getField(1));
assertNull(row.getField(2));
assertNull(row.getField(3));
assertNull(row.getField(4));
row = result.getMaterializedRows().get(2);
assertEquals(row.getField(0), 3L);
assertEquals(row.getField(1), "bye");
assertEquals(row.getField(2), 456L);
assertEquals(row.getField(3), 98.1);
assertEquals(row.getField(4), false);
}
}
private void doCreateEmptyTable(HiveStorageFormat storageFormat)
throws Exception
{
// create the table
Type arrayStringType = requireNonNull(TYPE_MANAGER.getType(parseTypeSignature("array<varchar>")));
List<ColumnMetadata> columns = ImmutableList.<ColumnMetadata>builder()
.add(new ColumnMetadata("id", BIGINT, false))
.add(new ColumnMetadata("t_string", VARCHAR, false))
.add(new ColumnMetadata("t_bigint", BIGINT, false))
.add(new ColumnMetadata("t_double", DOUBLE, false))
.add(new ColumnMetadata("t_boolean", BOOLEAN, false))
.add(new ColumnMetadata("t_array_string", arrayStringType, false))
.build();
ConnectorTableMetadata tableMetadata = new ConnectorTableMetadata(temporaryCreateEmptyTable, columns, SESSION.getUser());
ConnectorSession session = createSession(storageFormat);
metadata.createTable(session, tableMetadata);
// load the new table
ConnectorTableHandle tableHandle = getTableHandle(temporaryCreateEmptyTable);
// verify the metadata
tableMetadata = metadata.getTableMetadata(getTableHandle(temporaryCreateEmptyTable));
assertEquals(tableMetadata.getOwner(), session.getUser());
Map<String, ColumnMetadata> columnMap = uniqueIndex(tableMetadata.getColumns(), ColumnMetadata::getName);
assertPrimitiveField(columnMap, 0, "id", BIGINT, false);
assertPrimitiveField(columnMap, 1, "t_string", VARCHAR, false);
assertPrimitiveField(columnMap, 2, "t_bigint", BIGINT, false);
assertPrimitiveField(columnMap, 3, "t_double", DOUBLE, false);
assertPrimitiveField(columnMap, 4, "t_boolean", BOOLEAN, false);
assertPrimitiveField(columnMap, 5, "t_array_string", arrayStringType, false);
// verify the table is empty
ConnectorPartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.<ColumnHandle>all());
assertEquals(partitionResult.getPartitions().size(), 1);
ConnectorSplitSource splitSource = splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions());
assertEquals(getAllSplits(splitSource).size(), 0);
}
protected void assertGetRecordsOptional(String tableName, HiveStorageFormat hiveStorageFormat)
throws Exception
{
if (metadata.getTableHandle(SESSION, new SchemaTableName(database, tableName)) != null) {
assertGetRecords(tableName, hiveStorageFormat);
}
}
protected void assertGetRecords(String tableName, HiveStorageFormat hiveStorageFormat)
throws Exception
{
ConnectorTableHandle tableHandle = getTableHandle(new SchemaTableName(database, tableName));
ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(tableHandle);
HiveSplit hiveSplit = getHiveSplit(tableHandle);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(tableHandle).values());
ConnectorPageSource pageSource = pageSourceProvider.createPageSource(hiveSplit, columnHandles);
assertGetRecords(hiveStorageFormat, tableMetadata, hiveSplit, pageSource, columnHandles);
}
protected HiveSplit getHiveSplit(ConnectorTableHandle tableHandle)
throws InterruptedException
{
ConnectorPartitionResult partitionResult = splitManager.getPartitions(tableHandle, TupleDomain.<ColumnHandle>all());
List<ConnectorSplit> splits = getAllSplits(splitManager.getPartitionSplits(tableHandle, partitionResult.getPartitions()));
assertEquals(splits.size(), 1);
return checkType(getOnlyElement(splits), HiveSplit.class, "split");
}
protected void assertGetRecords(
HiveStorageFormat hiveStorageFormat,
ConnectorTableMetadata tableMetadata,
HiveSplit hiveSplit,
ConnectorPageSource pageSource,
List<? extends ColumnHandle> columnHandles)
throws IOException
{
try {
MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, getTypes(columnHandles));
assertPageSourceType(pageSource, hiveStorageFormat);
ImmutableMap<String, Integer> columnIndex = indexColumns(tableMetadata);
long rowNumber = 0;
long completedBytes = 0;
for (MaterializedRow row : result) {
try {
assertValueTypes(row, tableMetadata.getColumns());
}
catch (RuntimeException e) {
throw new RuntimeException("row " + rowNumber, e);
}
rowNumber++;
Integer index;
// STRING
index = columnIndex.get("t_string");
if ((rowNumber % 19) == 0) {
assertNull(row.getField(index));
}
else {
assertEquals(row.getField(index), ((rowNumber % 19) == 1) ? "" : "test");
}
// NUMBERS
assertEquals(row.getField(columnIndex.get("t_tinyint")), 1 + rowNumber);
assertEquals(row.getField(columnIndex.get("t_smallint")), 2 + rowNumber);
assertEquals(row.getField(columnIndex.get("t_int")), 3 + rowNumber);
index = columnIndex.get("t_bigint");
if ((rowNumber % 13) == 0) {
assertNull(row.getField(index));
}
else {
assertEquals(row.getField(index), 4 + rowNumber);
}
assertEquals((Double) row.getField(columnIndex.get("t_float")), 5.1 + rowNumber, 0.001);
assertEquals(row.getField(columnIndex.get("t_double")), 6.2 + rowNumber);
// BOOLEAN
index = columnIndex.get("t_boolean");
if ((rowNumber % 3) == 2) {
assertNull(row.getField(index));
}
else {
assertEquals(row.getField(index), (rowNumber % 3) != 0);
}
// TIMESTAMP
index = columnIndex.get("t_timestamp");
if (index != null) {
if ((rowNumber % 17) == 0) {
assertNull(row.getField(index));
}
else {
SqlTimestamp expected = new SqlTimestamp(new DateTime(2011, 5, 6, 7, 8, 9, 123, timeZone).getMillis(), UTC_KEY);
assertEquals(row.getField(index), expected);
}
}
// BINARY
index = columnIndex.get("t_binary");
if (index != null) {
if ((rowNumber % 23) == 0) {
assertNull(row.getField(index));
}
else {
assertEquals(row.getField(index), new SqlVarbinary("test binary".getBytes(UTF_8)));
}
}
// DATE
index = columnIndex.get("t_date");
if (index != null) {
if ((rowNumber % 37) == 0) {
assertNull(row.getField(index));
}
else {
SqlDate expected = new SqlDate(Ints.checkedCast(TimeUnit.MILLISECONDS.toDays(new DateTime(2013, 8, 9, 0, 0, 0, DateTimeZone.UTC).getMillis())));
assertEquals(row.getField(index), expected);
}
}
/* TODO: enable these tests when the types are supported
// VARCHAR(50)
index = columnIndex.get("t_varchar");
if (index != null) {
if ((rowNumber % 39) == 0) {
assertTrue(cursor.isNull(index));
}
else {
String stringValue = cursor.getSlice(index).toStringUtf8();
assertEquals(stringValue, ((rowNumber % 39) == 1) ? "" : "test varchar");
}
}
// CHAR(25)
index = columnIndex.get("t_char");
if (index != null) {
if ((rowNumber % 41) == 0) {
assertTrue(cursor.isNull(index));
}
else {
String stringValue = cursor.getSlice(index).toStringUtf8();
assertEquals(stringValue, ((rowNumber % 41) == 1) ? "" : "test char");
}
}
*/
// MAP<STRING, STRING>
index = columnIndex.get("t_map");
if (index != null) {
if ((rowNumber % 27) == 0) {
assertNull(row.getField(index));
}
else {
assertEquals(row.getField(index), ImmutableMap.of("test key", "test value"));
}
}
// ARRAY<STRING>
index = columnIndex.get("t_array_string");
if (index != null) {
if ((rowNumber % 29) == 0) {
assertNull(row.getField(index));
}
else {
assertEquals(row.getField(index), ImmutableList.of("abc", "xyz", "data"));
}
}
// ARRAY<STRUCT<s_string: STRING, s_double:DOUBLE>>
index = columnIndex.get("t_array_struct");
if (index != null) {
if ((rowNumber % 31) == 0) {
assertNull(row.getField(index));
}
else {
List<Object> expected1 = ImmutableList.<Object>of("test abc", 0.1);
List<Object> expected2 = ImmutableList.<Object>of("test xyz", 0.2);
assertEquals(row.getField(index), ImmutableList.of(expected1, expected2));
}
}
// MAP<INT, ARRAY<STRUCT<s_string: STRING, s_double:DOUBLE>>>
index = columnIndex.get("t_complex");
if (index != null) {
if ((rowNumber % 33) == 0) {
assertNull(row.getField(index));
}
else {
List<Object> expected1 = ImmutableList.<Object>of("test abc", 0.1);
List<Object> expected2 = ImmutableList.<Object>of("test xyz", 0.2);
assertEquals(row.getField(index), ImmutableMap.of(1L, ImmutableList.of(expected1, expected2)));
}
}
// NEW COLUMN
assertNull(row.getField(columnIndex.get("new_column")));
long newCompletedBytes = pageSource.getCompletedBytes();
assertTrue(newCompletedBytes >= completedBytes);
assertTrue(newCompletedBytes <= hiveSplit.getLength());
completedBytes = newCompletedBytes;
}
assertTrue(completedBytes <= hiveSplit.getLength());
assertEquals(rowNumber, 100);
}
finally {
pageSource.close();
}
}
private void dropTable(SchemaTableName table)
{
try {
ConnectorTableHandle handle = metadata.getTableHandle(SESSION, table);
if (handle != null) {
metadata.dropTable(handle);
}
}
catch (RuntimeException e) {
Logger.get(getClass()).warn(e, "failed to drop table");
}
}
protected ConnectorTableHandle getTableHandle(SchemaTableName tableName)
{
ConnectorTableHandle handle = metadata.getTableHandle(SESSION, tableName);
checkArgument(handle != null, "table not found: %s", tableName);
return handle;
}
protected static int getSplitCount(ConnectorSplitSource splitSource)
throws InterruptedException
{
int splitCount = 0;
while (!splitSource.isFinished()) {
List<ConnectorSplit> batch = getFutureValue(splitSource.getNextBatch(1000));
splitCount += batch.size();
}
return splitCount;
}
protected static List<ConnectorSplit> getAllSplits(ConnectorSplitSource splitSource)
throws InterruptedException
{
ImmutableList.Builder<ConnectorSplit> splits = ImmutableList.builder();
while (!splitSource.isFinished()) {
List<ConnectorSplit> batch = getFutureValue(splitSource.getNextBatch(1000));
splits.addAll(batch);
}
return splits.build();
}
protected static void assertPageSourceType(ConnectorPageSource pageSource, HiveStorageFormat hiveStorageFormat)
{
if (pageSource instanceof RecordPageSource) {
assertInstanceOf(((RecordPageSource) pageSource).getCursor(), recordCursorType(hiveStorageFormat), hiveStorageFormat.name());
}
else {
assertInstanceOf(pageSource, pageSourceType(hiveStorageFormat), hiveStorageFormat.name());
}
}
private static Class<? extends HiveRecordCursor> recordCursorType(HiveStorageFormat hiveStorageFormat)
{
switch (hiveStorageFormat) {
case RCTEXT:
return ColumnarTextHiveRecordCursor.class;
case RCBINARY:
return ColumnarBinaryHiveRecordCursor.class;
case ORC:
return OrcHiveRecordCursor.class;
case PARQUET:
return ParquetHiveRecordCursor.class;
case DWRF:
return DwrfHiveRecordCursor.class;
}
return GenericHiveRecordCursor.class;
}
private static Class<? extends ConnectorPageSource> pageSourceType(HiveStorageFormat hiveStorageFormat)
{
switch (hiveStorageFormat) {
case RCTEXT:
case RCBINARY:
return RcFilePageSource.class;
case ORC:
case DWRF:
return OrcPageSource.class;
default:
throw new AssertionError("Filed type " + hiveStorageFormat + " does not use a page source");
}
}
private static void assertValueTypes(MaterializedRow row, List<ColumnMetadata> schema)
{
for (int columnIndex = 0; columnIndex < schema.size(); columnIndex++) {
ColumnMetadata column = schema.get(columnIndex);
Object value = row.getField(columnIndex);
if (value != null) {
if (BOOLEAN.equals(column.getType())) {
assertInstanceOf(value, Boolean.class);
}
else if (BIGINT.equals(column.getType())) {
assertInstanceOf(value, Long.class);
}
else if (DOUBLE.equals(column.getType())) {
assertInstanceOf(value, Double.class);
}
else if (VARCHAR.equals(column.getType())) {
assertInstanceOf(value, String.class);
}
else if (VARBINARY.equals(column.getType())) {
assertInstanceOf(value, SqlVarbinary.class);
}
else if (TIMESTAMP.equals(column.getType())) {
assertInstanceOf(value, SqlTimestamp.class);
}
else if (DATE.equals(column.getType())) {
assertInstanceOf(value, SqlDate.class);
}
else if (column.getType() instanceof ArrayType) {
assertInstanceOf(value, List.class);
}
else if (column.getType() instanceof MapType) {
assertInstanceOf(value, Map.class);
}
else {
fail("Unknown primitive type " + columnIndex);
}
}
}
}
private static void assertPrimitiveField(Map<String, ColumnMetadata> map, int position, String name, Type type, boolean partitionKey)
{
assertTrue(map.containsKey(name));
ColumnMetadata column = map.get(name);
assertEquals(column.getType(), type, name);
assertEquals(column.isPartitionKey(), partitionKey, name);
}
protected static ImmutableMap<String, Integer> indexColumns(List<ColumnHandle> columnHandles)
{
ImmutableMap.Builder<String, Integer> index = ImmutableMap.builder();
int i = 0;
for (ColumnHandle columnHandle : columnHandles) {
HiveColumnHandle hiveColumnHandle = checkType(columnHandle, HiveColumnHandle.class, "columnHandle");
index.put(hiveColumnHandle.getName(), i);
i++;
}
return index.build();
}
protected static ImmutableMap<String, Integer> indexColumns(ConnectorTableMetadata tableMetadata)
{
ImmutableMap.Builder<String, Integer> index = ImmutableMap.builder();
int i = 0;
for (ColumnMetadata columnMetadata : tableMetadata.getColumns()) {
index.put(columnMetadata.getName(), i);
i++;
}
return index.build();
}
private static ConnectorSession createSession(HiveStorageFormat storageFormat)
{
return new ConnectorSession(
SESSION.getUser(),
SESSION.getTimeZoneKey(),
SESSION.getLocale(),
SESSION.getStartTime(),
ImmutableMap.of(STORAGE_FORMAT_PROPERTY, storageFormat.name().toLowerCase()));
}
private static String randomName()
{
return UUID.randomUUID().toString().toLowerCase(ENGLISH).replace("-", "");
}
}
| apache-2.0 |
firebase/snippets-android | firestore/app/src/main/java/com/google/example/firestore/DocSnippets.java | 50857 | package com.google.example.firestore;
import android.util.Log;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.WorkerThread;
import com.google.android.gms.tasks.OnCompleteListener;
import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.android.gms.tasks.Task;
import com.google.android.gms.tasks.Tasks;
import com.google.firebase.Timestamp;
import com.google.firebase.firestore.CollectionReference;
import com.google.firebase.firestore.DocumentChange;
import com.google.firebase.firestore.DocumentChange.Type;
import com.google.firebase.firestore.DocumentReference;
import com.google.firebase.firestore.DocumentSnapshot;
import com.google.firebase.firestore.EventListener;
import com.google.firebase.firestore.FieldPath;
import com.google.firebase.firestore.FieldValue;
import com.google.firebase.firestore.FirebaseFirestore;
import com.google.firebase.firestore.FirebaseFirestoreException;
import com.google.firebase.firestore.FirebaseFirestoreSettings;
import com.google.firebase.firestore.ListenerRegistration;
import com.google.firebase.firestore.MetadataChanges;
import com.google.firebase.firestore.Query;
import com.google.firebase.firestore.Query.Direction;
import com.google.firebase.firestore.QueryDocumentSnapshot;
import com.google.firebase.firestore.QuerySnapshot;
import com.google.firebase.firestore.ServerTimestamp;
import com.google.firebase.firestore.SetOptions;
import com.google.firebase.firestore.Source;
import com.google.firebase.firestore.Transaction;
import com.google.firebase.firestore.WriteBatch;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.Executor;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
* Snippets for inclusion in documentation.
*/
@SuppressWarnings({"unused", "Convert2Lambda"})
public class DocSnippets {
private static final String TAG = "DocSnippets";
private static final ThreadPoolExecutor EXECUTOR = new ThreadPoolExecutor(2, 4,
60, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>());
private final FirebaseFirestore db;
DocSnippets(FirebaseFirestore db) {
this.db = db;
}
void runAll() {
Log.d(TAG, "================= BEGIN RUN ALL ===============");
// Write example data
exampleData();
exampleDataCollectionGroup();
// Run all other methods
addAdaLovelace();
addAlanTuring();
getAllUsers();
listenForUsers();
docReference();
collectionReference();
subcollectionReference();
setDocument();
dataTypes();
addDocument();
newDocument();
updateDocument();
updateDocumentNested();
setFieldWithMerge();
deleteDocument();
transactions();
transactionPromise();
getDocument();
getDocumentWithOptions();
listenToDocument();
listenToDocumentLocal();
getMultipleDocs();
getAllDocs();
listenToMultiple();
listenToDiffs();
listenState();
detachListener();
handleListenErrors();
simpleQueries();
compoundQueries();
orderAndLimit();
queryStartAtEndAt();
// Run methods that should fail
try {
compoundQueriesInvalid();
} catch (Exception e) {
Log.d(TAG, "compoundQueriesInvalid", e);
}
try {
orderAndLimitInvalid();
} catch (Exception e) {
Log.d(TAG, "orderAndLimitInvalid", e);
}
}
void deleteAll() {
deleteCollection("cities");
deleteCollection("users");
}
private void deleteCollection(final String path) {
deleteCollection(db.collection(path), 50, EXECUTOR);
}
public void setup() {
// [START get_firestore_instance]
FirebaseFirestore db = FirebaseFirestore.getInstance();
// [END get_firestore_instance]
// [START set_firestore_settings]
FirebaseFirestoreSettings settings = new FirebaseFirestoreSettings.Builder()
.setPersistenceEnabled(true)
.build();
db.setFirestoreSettings(settings);
// [END set_firestore_settings]
}
public void setupCacheSize() {
// [START fs_setup_cache]
FirebaseFirestoreSettings settings = new FirebaseFirestoreSettings.Builder()
.setCacheSizeBytes(FirebaseFirestoreSettings.CACHE_SIZE_UNLIMITED)
.build();
db.setFirestoreSettings(settings);
// [END fs_setup_cache]
}
public void addAdaLovelace() {
// [START add_ada_lovelace]
// Create a new user with a first and last name
Map<String, Object> user = new HashMap<>();
user.put("first", "Ada");
user.put("last", "Lovelace");
user.put("born", 1815);
// Add a new document with a generated ID
db.collection("users")
.add(user)
.addOnSuccessListener(new OnSuccessListener<DocumentReference>() {
@Override
public void onSuccess(DocumentReference documentReference) {
Log.d(TAG, "DocumentSnapshot added with ID: " + documentReference.getId());
}
})
.addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
Log.w(TAG, "Error adding document", e);
}
});
// [END add_ada_lovelace]
}
public void addAlanTuring() {
// [START add_alan_turing]
// Create a new user with a first, middle, and last name
Map<String, Object> user = new HashMap<>();
user.put("first", "Alan");
user.put("middle", "Mathison");
user.put("last", "Turing");
user.put("born", 1912);
// Add a new document with a generated ID
db.collection("users")
.add(user)
.addOnSuccessListener(new OnSuccessListener<DocumentReference>() {
@Override
public void onSuccess(DocumentReference documentReference) {
Log.d(TAG, "DocumentSnapshot added with ID: " + documentReference.getId());
}
})
.addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
Log.w(TAG, "Error adding document", e);
}
});
// [END add_alan_turing]
}
public void getAllUsers() {
// [START get_all_users]
db.collection("users")
.get()
.addOnCompleteListener(new OnCompleteListener<QuerySnapshot>() {
@Override
public void onComplete(@NonNull Task<QuerySnapshot> task) {
if (task.isSuccessful()) {
for (QueryDocumentSnapshot document : task.getResult()) {
Log.d(TAG, document.getId() + " => " + document.getData());
}
} else {
Log.w(TAG, "Error getting documents.", task.getException());
}
}
});
// [END get_all_users]
}
public void listenForUsers() {
// [START listen_for_users]
// Listen for users born before 1900.
//
// You will get a first snapshot with the initial results and a new
// snapshot each time there is a change in the results.
db.collection("users")
.whereLessThan("born", 1900)
.addSnapshotListener(new EventListener<QuerySnapshot>() {
@Override
public void onEvent(@Nullable QuerySnapshot snapshots,
@Nullable FirebaseFirestoreException e) {
if (e != null) {
Log.w(TAG, "Listen failed.", e);
return;
}
Log.d(TAG, "Current users born before 1900: " + snapshots);
}
});
// [END listen_for_users]
}
public void docReference() {
// [START doc_reference]
DocumentReference alovelaceDocumentRef = db.collection("users").document("alovelace");
// [END doc_reference]
}
public void collectionReference() {
// [START collection_reference]
CollectionReference usersCollectionRef = db.collection("users");
// [END collection_reference]
}
public void subcollectionReference() {
// [START subcollection_reference]
DocumentReference messageRef = db
.collection("rooms").document("roomA")
.collection("messages").document("message1");
// [END subcollection_reference]
}
public void docReferenceAlternate() {
// [START doc_reference_alternate]
DocumentReference alovelaceDocumentRef = db.document("users/alovelace");
// [END doc_reference_alternate]
}
// [START city_class]
public class City {
private String name;
private String state;
private String country;
private boolean capital;
private long population;
private List<String> regions;
public City() {}
public City(String name, String state, String country, boolean capital, long population, List<String> regions) {
// [START_EXCLUDE]
this.name = name;
this.state = state;
this.country = country;
this.capital = capital;
this.population = population;
this.regions = regions;
// [END_EXCLUDE]
}
public String getName() {
return name;
}
public String getState() {
return state;
}
public String getCountry() {
return country;
}
public boolean isCapital() {
return capital;
}
public long getPopulation() {
return population;
}
public List<String> getRegions() {
return regions;
}
}
// [END city_class]
public void setDocument() {
// [START set_document]
Map<String, Object> city = new HashMap<>();
city.put("name", "Los Angeles");
city.put("state", "CA");
city.put("country", "USA");
db.collection("cities").document("LA")
.set(city)
.addOnSuccessListener(new OnSuccessListener<Void>() {
@Override
public void onSuccess(Void aVoid) {
Log.d(TAG, "DocumentSnapshot successfully written!");
}
})
.addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
Log.w(TAG, "Error writing document", e);
}
});
// [END set_document]
Map<String, Object> data = new HashMap<>();
// [START set_with_id]
db.collection("cities").document("new-city-id").set(data);
// [END set_with_id]
}
public void dataTypes() {
// [START data_types]
Map<String, Object> docData = new HashMap<>();
docData.put("stringExample", "Hello world!");
docData.put("booleanExample", true);
docData.put("numberExample", 3.14159265);
docData.put("dateExample", new Timestamp(new Date()));
docData.put("listExample", Arrays.asList(1, 2, 3));
docData.put("nullExample", null);
Map<String, Object> nestedData = new HashMap<>();
nestedData.put("a", 5);
nestedData.put("b", true);
docData.put("objectExample", nestedData);
db.collection("data").document("one")
.set(docData)
.addOnSuccessListener(new OnSuccessListener<Void>() {
@Override
public void onSuccess(Void aVoid) {
Log.d(TAG, "DocumentSnapshot successfully written!");
}
})
.addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
Log.w(TAG, "Error writing document", e);
}
});
// [END data_types]
}
public void addCustomClass() {
// [START add_custom_class]
City city = new City("Los Angeles", "CA", "USA",
false, 5000000L, Arrays.asList("west_coast", "sorcal"));
db.collection("cities").document("LA").set(city);
// [END add_custom_class]
}
public void addDocument() {
// [START add_document]
// Add a new document with a generated id.
Map<String, Object> data = new HashMap<>();
data.put("name", "Tokyo");
data.put("country", "Japan");
db.collection("cities")
.add(data)
.addOnSuccessListener(new OnSuccessListener<DocumentReference>() {
@Override
public void onSuccess(DocumentReference documentReference) {
Log.d(TAG, "DocumentSnapshot written with ID: " + documentReference.getId());
}
})
.addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
Log.w(TAG, "Error adding document", e);
}
});
// [END add_document]
}
public void newDocument() {
// [START new_document]
Map<String, Object> data = new HashMap<>();
DocumentReference newCityRef = db.collection("cities").document();
// Later...
newCityRef.set(data);
// [END new_document]
}
public void updateDocument() {
// [START update_document]
DocumentReference washingtonRef = db.collection("cities").document("DC");
// Set the "isCapital" field of the city 'DC'
washingtonRef
.update("capital", true)
.addOnSuccessListener(new OnSuccessListener<Void>() {
@Override
public void onSuccess(Void aVoid) {
Log.d(TAG, "DocumentSnapshot successfully updated!");
}
})
.addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
Log.w(TAG, "Error updating document", e);
}
});
// [END update_document]
}
public void updateDocumentArray() {
// [START update_document_array]
DocumentReference washingtonRef = db.collection("cities").document("DC");
// Atomically add a new region to the "regions" array field.
washingtonRef.update("regions", FieldValue.arrayUnion("greater_virginia"));
// Atomically remove a region from the "regions" array field.
washingtonRef.update("regions", FieldValue.arrayRemove("east_coast"));
// [END update_document_array]
}
public void updateDocumentIncrement() {
// [START update_document_increment]
DocumentReference washingtonRef = db.collection("cities").document("DC");
// Atomically increment the population of the city by 50.
washingtonRef.update("population", FieldValue.increment(50));
// [END update_document_increment]
}
public void updateDocumentNested() {
// [START update_document_nested]
// Assume the document contains:
// {
// name: "Frank",
// favorites: { food: "Pizza", color: "Blue", subject: "recess" }
// age: 12
// }
//
// To update age and favorite color:
db.collection("users").document("frank")
.update(
"age", 13,
"favorites.color", "Red"
);
// [END update_document_nested]
}
public void setFieldWithMerge() {
// [START set_field_with_merge]
// Update one field, creating the document if it does not already exist.
Map<String, Object> data = new HashMap<>();
data.put("capital", true);
db.collection("cities").document("BJ")
.set(data, SetOptions.merge());
// [END set_field_with_merge]
}
public void deleteDocument() {
// [START delete_document]
db.collection("cities").document("DC")
.delete()
.addOnSuccessListener(new OnSuccessListener<Void>() {
@Override
public void onSuccess(Void aVoid) {
Log.d(TAG, "DocumentSnapshot successfully deleted!");
}
})
.addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
Log.w(TAG, "Error deleting document", e);
}
});
// [END delete_document]
}
public void transactions() {
// [START transactions]
final DocumentReference sfDocRef = db.collection("cities").document("SF");
db.runTransaction(new Transaction.Function<Void>() {
@Override
public Void apply(Transaction transaction) throws FirebaseFirestoreException {
DocumentSnapshot snapshot = transaction.get(sfDocRef);
// Note: this could be done without a transaction
// by updating the population using FieldValue.increment()
double newPopulation = snapshot.getDouble("population") + 1;
transaction.update(sfDocRef, "population", newPopulation);
// Success
return null;
}
}).addOnSuccessListener(new OnSuccessListener<Void>() {
@Override
public void onSuccess(Void aVoid) {
Log.d(TAG, "Transaction success!");
}
})
.addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
Log.w(TAG, "Transaction failure.", e);
}
});
// [END transactions]
}
public void transactionPromise() {
// [START transaction_with_result]
final DocumentReference sfDocRef = db.collection("cities").document("SF");
db.runTransaction(new Transaction.Function<Double>() {
@Override
public Double apply(Transaction transaction) throws FirebaseFirestoreException {
DocumentSnapshot snapshot = transaction.get(sfDocRef);
double newPopulation = snapshot.getDouble("population") + 1;
if (newPopulation <= 1000000) {
transaction.update(sfDocRef, "population", newPopulation);
return newPopulation;
} else {
throw new FirebaseFirestoreException("Population too high",
FirebaseFirestoreException.Code.ABORTED);
}
}
}).addOnSuccessListener(new OnSuccessListener<Double>() {
@Override
public void onSuccess(Double result) {
Log.d(TAG, "Transaction success: " + result);
}
})
.addOnFailureListener(new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
Log.w(TAG, "Transaction failure.", e);
}
});
// [END transaction_with_result]
}
public void writeBatch() {
// [START write_batch]
// Get a new write batch
WriteBatch batch = db.batch();
// Set the value of 'NYC'
DocumentReference nycRef = db.collection("cities").document("NYC");
batch.set(nycRef, new City());
// Update the population of 'SF'
DocumentReference sfRef = db.collection("cities").document("SF");
batch.update(sfRef, "population", 1000000L);
// Delete the city 'LA'
DocumentReference laRef = db.collection("cities").document("LA");
batch.delete(laRef);
// Commit the batch
batch.commit().addOnCompleteListener(new OnCompleteListener<Void>() {
@Override
public void onComplete(@NonNull Task<Void> task) {
// ...
}
});
// [END write_batch]
}
public void getDocument() {
// [START get_document]
DocumentReference docRef = db.collection("cities").document("SF");
docRef.get().addOnCompleteListener(new OnCompleteListener<DocumentSnapshot>() {
@Override
public void onComplete(@NonNull Task<DocumentSnapshot> task) {
if (task.isSuccessful()) {
DocumentSnapshot document = task.getResult();
if (document.exists()) {
Log.d(TAG, "DocumentSnapshot data: " + document.getData());
} else {
Log.d(TAG, "No such document");
}
} else {
Log.d(TAG, "get failed with ", task.getException());
}
}
});
// [END get_document]
}
public void getDocumentWithOptions() {
// [START get_document_options]
DocumentReference docRef = db.collection("cities").document("SF");
// Source can be CACHE, SERVER, or DEFAULT.
Source source = Source.CACHE;
// Get the document, forcing the SDK to use the offline cache
docRef.get(source).addOnCompleteListener(new OnCompleteListener<DocumentSnapshot>() {
@Override
public void onComplete(@NonNull Task<DocumentSnapshot> task) {
if (task.isSuccessful()) {
// Document found in the offline cache
DocumentSnapshot document = task.getResult();
Log.d(TAG, "Cached document data: " + document.getData());
} else {
Log.d(TAG, "Cached get failed: ", task.getException());
}
}
});
// [END get_document_options]
}
public void customObjects() {
// [START custom_objects]
DocumentReference docRef = db.collection("cities").document("BJ");
docRef.get().addOnSuccessListener(new OnSuccessListener<DocumentSnapshot>() {
@Override
public void onSuccess(DocumentSnapshot documentSnapshot) {
City city = documentSnapshot.toObject(City.class);
}
});
// [END custom_objects]
}
public void listenToDocument() {
// [START listen_document]
final DocumentReference docRef = db.collection("cities").document("SF");
docRef.addSnapshotListener(new EventListener<DocumentSnapshot>() {
@Override
public void onEvent(@Nullable DocumentSnapshot snapshot,
@Nullable FirebaseFirestoreException e) {
if (e != null) {
Log.w(TAG, "Listen failed.", e);
return;
}
if (snapshot != null && snapshot.exists()) {
Log.d(TAG, "Current data: " + snapshot.getData());
} else {
Log.d(TAG, "Current data: null");
}
}
});
// [END listen_document]
}
public void listenToDocumentLocal() {
// [START listen_document_local]
final DocumentReference docRef = db.collection("cities").document("SF");
docRef.addSnapshotListener(new EventListener<DocumentSnapshot>() {
@Override
public void onEvent(@Nullable DocumentSnapshot snapshot,
@Nullable FirebaseFirestoreException e) {
if (e != null) {
Log.w(TAG, "Listen failed.", e);
return;
}
String source = snapshot != null && snapshot.getMetadata().hasPendingWrites()
? "Local" : "Server";
if (snapshot != null && snapshot.exists()) {
Log.d(TAG, source + " data: " + snapshot.getData());
} else {
Log.d(TAG, source + " data: null");
}
}
});
// [END listen_document_local]
}
public void listenWithMetadata() {
// [START listen_with_metadata]
// Listen for metadata changes to the document.
DocumentReference docRef = db.collection("cities").document("SF");
docRef.addSnapshotListener(MetadataChanges.INCLUDE, new EventListener<DocumentSnapshot>() {
@Override
public void onEvent(@Nullable DocumentSnapshot snapshot,
@Nullable FirebaseFirestoreException e) {
// ...
}
});
// [END listen_with_metadata]
}
public void getMultipleDocs() {
// [START get_multiple]
db.collection("cities")
.whereEqualTo("capital", true)
.get()
.addOnCompleteListener(new OnCompleteListener<QuerySnapshot>() {
@Override
public void onComplete(@NonNull Task<QuerySnapshot> task) {
if (task.isSuccessful()) {
for (QueryDocumentSnapshot document : task.getResult()) {
Log.d(TAG, document.getId() + " => " + document.getData());
}
} else {
Log.d(TAG, "Error getting documents: ", task.getException());
}
}
});
// [END get_multiple]
}
public void getAllDocs() {
// [START get_multiple_all]
db.collection("cities")
.get()
.addOnCompleteListener(new OnCompleteListener<QuerySnapshot>() {
@Override
public void onComplete(@NonNull Task<QuerySnapshot> task) {
if (task.isSuccessful()) {
for (QueryDocumentSnapshot document : task.getResult()) {
Log.d(TAG, document.getId() + " => " + document.getData());
}
} else {
Log.d(TAG, "Error getting documents: ", task.getException());
}
}
});
// [END get_multiple_all]
}
public void listenToMultiple() {
// [START listen_multiple]
db.collection("cities")
.whereEqualTo("state", "CA")
.addSnapshotListener(new EventListener<QuerySnapshot>() {
@Override
public void onEvent(@Nullable QuerySnapshot value,
@Nullable FirebaseFirestoreException e) {
if (e != null) {
Log.w(TAG, "Listen failed.", e);
return;
}
List<String> cities = new ArrayList<>();
for (QueryDocumentSnapshot doc : value) {
if (doc.get("name") != null) {
cities.add(doc.getString("name"));
}
}
Log.d(TAG, "Current cites in CA: " + cities);
}
});
// [END listen_multiple]
}
public void listenToDiffs() {
// [START listen_diffs]
db.collection("cities")
.whereEqualTo("state", "CA")
.addSnapshotListener(new EventListener<QuerySnapshot>() {
@Override
public void onEvent(@Nullable QuerySnapshot snapshots,
@Nullable FirebaseFirestoreException e) {
if (e != null) {
Log.w(TAG, "listen:error", e);
return;
}
for (DocumentChange dc : snapshots.getDocumentChanges()) {
switch (dc.getType()) {
case ADDED:
Log.d(TAG, "New city: " + dc.getDocument().getData());
break;
case MODIFIED:
Log.d(TAG, "Modified city: " + dc.getDocument().getData());
break;
case REMOVED:
Log.d(TAG, "Removed city: " + dc.getDocument().getData());
break;
}
}
}
});
// [END listen_diffs]
}
public void listenState() {
// [START listen_state]
db.collection("cities")
.whereEqualTo("state", "CA")
.addSnapshotListener(new EventListener<QuerySnapshot>() {
@Override
public void onEvent(@Nullable QuerySnapshot snapshots,
@Nullable FirebaseFirestoreException e) {
if (e != null) {
Log.w(TAG, "listen:error", e);
return;
}
for (DocumentChange dc : snapshots.getDocumentChanges()) {
if (dc.getType() == Type.ADDED) {
Log.d(TAG, "New city: " + dc.getDocument().getData());
}
}
if (!snapshots.getMetadata().isFromCache()) {
Log.d(TAG, "Got initial state.");
}
}
});
// [END listen_state]
}
public void detachListener() {
// [START detach_listener]
Query query = db.collection("cities");
ListenerRegistration registration = query.addSnapshotListener(
new EventListener<QuerySnapshot>() {
// [START_EXCLUDE]
@Override
public void onEvent(@Nullable QuerySnapshot snapshots,
@Nullable FirebaseFirestoreException e) {
// ...
}
// [END_EXCLUDE]
});
// ...
// Stop listening to changes
registration.remove();
// [END detach_listener]
}
public void handleListenErrors() {
// [START handle_listen_errors]
db.collection("cities")
.addSnapshotListener(new EventListener<QuerySnapshot>() {
@Override
public void onEvent(@Nullable QuerySnapshot snapshots,
@Nullable FirebaseFirestoreException e) {
if (e != null) {
Log.w(TAG, "listen:error", e);
return;
}
for (DocumentChange dc : snapshots.getDocumentChanges()) {
if (dc.getType() == Type.ADDED) {
Log.d(TAG, "New city: " + dc.getDocument().getData());
}
}
}
});
// [END handle_listen_errors]
}
public void exampleData() {
// [START example_data]
CollectionReference cities = db.collection("cities");
Map<String, Object> data1 = new HashMap<>();
data1.put("name", "San Francisco");
data1.put("state", "CA");
data1.put("country", "USA");
data1.put("capital", false);
data1.put("population", 860000);
data1.put("regions", Arrays.asList("west_coast", "norcal"));
cities.document("SF").set(data1);
Map<String, Object> data2 = new HashMap<>();
data2.put("name", "Los Angeles");
data2.put("state", "CA");
data2.put("country", "USA");
data2.put("capital", false);
data2.put("population", 3900000);
data2.put("regions", Arrays.asList("west_coast", "socal"));
cities.document("LA").set(data2);
Map<String, Object> data3 = new HashMap<>();
data3.put("name", "Washington D.C.");
data3.put("state", null);
data3.put("country", "USA");
data3.put("capital", true);
data3.put("population", 680000);
data3.put("regions", Arrays.asList("east_coast"));
cities.document("DC").set(data3);
Map<String, Object> data4 = new HashMap<>();
data4.put("name", "Tokyo");
data4.put("state", null);
data4.put("country", "Japan");
data4.put("capital", true);
data4.put("population", 9000000);
data4.put("regions", Arrays.asList("kanto", "honshu"));
cities.document("TOK").set(data4);
Map<String, Object> data5 = new HashMap<>();
data5.put("name", "Beijing");
data5.put("state", null);
data5.put("country", "China");
data5.put("capital", true);
data5.put("population", 21500000);
data5.put("regions", Arrays.asList("jingjinji", "hebei"));
cities.document("BJ").set(data5);
// [END example_data]
}
public void exampleDataCollectionGroup() {
// [START fs_collection_group_query_data_setup]
CollectionReference citiesRef = db.collection("cities");
Map<String, Object> ggbData = new HashMap<>();
ggbData.put("name", "Golden Gate Bridge");
ggbData.put("type", "bridge");
citiesRef.document("SF").collection("landmarks").add(ggbData);
Map<String, Object> lohData = new HashMap<>();
lohData.put("name", "Legion of Honor");
lohData.put("type", "museum");
citiesRef.document("SF").collection("landmarks").add(lohData);
Map<String, Object> gpData = new HashMap<>();
gpData.put("name", "Griffith Park");
gpData.put("type", "park");
citiesRef.document("LA").collection("landmarks").add(gpData);
Map<String, Object> tgData = new HashMap<>();
tgData.put("name", "The Getty");
tgData.put("type", "museum");
citiesRef.document("LA").collection("landmarks").add(tgData);
Map<String, Object> lmData = new HashMap<>();
lmData.put("name", "Lincoln Memorial");
lmData.put("type", "memorial");
citiesRef.document("DC").collection("landmarks").add(lmData);
Map<String, Object> nasaData = new HashMap<>();
nasaData.put("name", "National Air and Space Museum");
nasaData.put("type", "museum");
citiesRef.document("DC").collection("landmarks").add(nasaData);
Map<String, Object> upData = new HashMap<>();
upData.put("name", "Ueno Park");
upData.put("type", "park");
citiesRef.document("TOK").collection("landmarks").add(upData);
Map<String, Object> nmData = new HashMap<>();
nmData.put("name", "National Museum of Nature and Science");
nmData.put("type", "museum");
citiesRef.document("TOK").collection("landmarks").add(nmData);
Map<String, Object> jpData = new HashMap<>();
jpData.put("name", "Jingshan Park");
jpData.put("type", "park");
citiesRef.document("BJ").collection("landmarks").add(jpData);
Map<String, Object> baoData = new HashMap<>();
baoData.put("name", "Beijing Ancient Observatory");
baoData.put("type", "museum");
citiesRef.document("BJ").collection("landmarks").add(baoData);
// [END fs_collection_group_query_data_setup]
}
public void simpleQueries() {
// [START simple_queries]
// Create a reference to the cities collection
CollectionReference citiesRef = db.collection("cities");
// Create a query against the collection.
Query query = citiesRef.whereEqualTo("state", "CA");
// [END simple_queries]
// [START simple_query_capital]
Query capitalCities = db.collection("cities").whereEqualTo("capital", true);
// [END simple_query_capital]
// [START example_filters]
Query stateQuery = citiesRef.whereEqualTo("state", "CA");
Query populationQuery = citiesRef.whereLessThan("population", 100000);
Query nameQuery = citiesRef.whereGreaterThanOrEqualTo("name", "San Francisco");
// [END example_filters]
// [START simple_query_not_equal]
Query notCapitalQuery = citiesRef.whereNotEqualTo("capital", false);
// [END simple_query_not_equal]
}
public void arrayContainsQueries() {
// [START array_contains_filter]
CollectionReference citiesRef = db.collection("cities");
citiesRef.whereArrayContains("regions", "west_coast");
// [END array_contains_filter]
}
public void arrayContainsAnyQueries() {
// [START array_contains_any_filter]
CollectionReference citiesRef = db.collection("cities");
citiesRef.whereArrayContainsAny("regions", Arrays.asList("west_coast", "east_coast"));
// [END array_contains_any_filter]
}
public void inQueries() {
// [START in_filter]
CollectionReference citiesRef = db.collection("cities");
citiesRef.whereIn("country", Arrays.asList("USA", "Japan"));
// [END in_filter]
// [START not_in_filter]
citiesRef.whereNotIn("country", Arrays.asList("USA", "Japan"));
// [END not_in_filter]
// [START in_filter_with_array]
citiesRef.whereIn("regions", Arrays.asList(new String[]{"west_coast"}, new String[]{"east_coast"}));
// [END in_filter_with_array]
}
public void compoundQueries() {
CollectionReference citiesRef = db.collection("cities");
// [START chain_filters]
citiesRef.whereEqualTo("state", "CO").whereEqualTo("name", "Denver");
citiesRef.whereEqualTo("state", "CA").whereLessThan("population", 1000000);
// [END chain_filters]
// [START valid_range_filters]
citiesRef.whereGreaterThanOrEqualTo("state", "CA")
.whereLessThanOrEqualTo("state", "IN");
citiesRef.whereEqualTo("state", "CA")
.whereGreaterThan("population", 1000000);
// [END valid_range_filters]
}
public void compoundQueriesInvalid() {
CollectionReference citiesRef = db.collection("cities");
// [START invalid_range_filters]
citiesRef.whereGreaterThanOrEqualTo("state", "CA").whereGreaterThan("population", 100000);
// [END invalid_range_filters]
}
public void orderAndLimit() {
CollectionReference citiesRef = db.collection("cities");
// [START order_and_limit]
citiesRef.orderBy("name").limit(3);
// [END order_and_limit]
// [START order_and_limit_desc]
citiesRef.orderBy("name", Direction.DESCENDING).limit(3);
// [END order_and_limit_desc]
// [START order_by_multiple]
citiesRef.orderBy("state").orderBy("population", Direction.DESCENDING);
// [END order_by_multiple]
// [START filter_and_order]
citiesRef.whereGreaterThan("population", 100000).orderBy("population").limit(2);
// [END filter_and_order]
// [START valid_filter_and_order]
citiesRef.whereGreaterThan("population", 100000).orderBy("population");
// [END valid_filter_and_order]
}
public void orderAndLimitInvalid() {
CollectionReference citiesRef = db.collection("cities");
// [START invalid_filter_and_order]
citiesRef.whereGreaterThan("population", 100000).orderBy("country");
// [END invalid_filter_and_order]
}
public void queryStartAtEndAt() {
// [START query_start_at_single]
// Get all cities with a population >= 1,000,000, ordered by population,
db.collection("cities")
.orderBy("population")
.startAt(1000000);
// [END query_start_at_single]
// [START query_end_at_single]
// Get all cities with a population <= 1,000,000, ordered by population,
db.collection("cities")
.orderBy("population")
.endAt(1000000);
// [END query_end_at_single]
// [START query_start_at_doc_snapshot]
// Get the data for "San Francisco"
db.collection("cities").document("SF")
.get()
.addOnSuccessListener(new OnSuccessListener<DocumentSnapshot>() {
@Override
public void onSuccess(DocumentSnapshot documentSnapshot) {
// Get all cities with a population bigger than San Francisco.
Query biggerThanSf = db.collection("cities")
.orderBy("population")
.startAt(documentSnapshot);
// ...
}
});
// [END query_start_at_doc_snapshot]
// [START query_pagination]
// Construct query for first 25 cities, ordered by population
Query first = db.collection("cities")
.orderBy("population")
.limit(25);
first.get()
.addOnSuccessListener(new OnSuccessListener<QuerySnapshot>() {
@Override
public void onSuccess(QuerySnapshot documentSnapshots) {
// ...
// Get the last visible document
DocumentSnapshot lastVisible = documentSnapshots.getDocuments()
.get(documentSnapshots.size() -1);
// Construct a new query starting at this document,
// get the next 25 cities.
Query next = db.collection("cities")
.orderBy("population")
.startAfter(lastVisible)
.limit(25);
// Use the query for pagination
// ...
}
});
// [END query_pagination]
// [START multi_cursor]
// Will return all Springfields
db.collection("cities")
.orderBy("name")
.orderBy("state")
.startAt("Springfield");
// Will return "Springfield, Missouri" and "Springfield, Wisconsin"
db.collection("cities")
.orderBy("name")
.orderBy("state")
.startAt("Springfield", "Missouri");
// [END multi_cursor]
}
public void collectionGroupQuery() {
// [START fs_collection_group_query]
db.collectionGroup("landmarks").whereEqualTo("type", "museum").get()
.addOnSuccessListener(new OnSuccessListener<QuerySnapshot>() {
@Override
public void onSuccess(QuerySnapshot queryDocumentSnapshots) {
// [START_EXCLUDE]
for (QueryDocumentSnapshot snap : queryDocumentSnapshots) {
Log.d(TAG, snap.getId() + " => " + snap.getData());
}
// [END_EXCLUDE]
}
});
// [END fs_collection_group_query]
}
// [START delete_collection]
/**
* Delete all documents in a collection. Uses an Executor to perform work on a background
* thread. This does *not* automatically discover and delete subcollections.
*/
private Task<Void> deleteCollection(final CollectionReference collection,
final int batchSize,
Executor executor) {
// Perform the delete operation on the provided Executor, which allows us to use
// simpler synchronous logic without blocking the main thread.
return Tasks.call(executor, new Callable<Void>() {
@Override
public Void call() throws Exception {
// Get the first batch of documents in the collection
Query query = collection.orderBy(FieldPath.documentId()).limit(batchSize);
// Get a list of deleted documents
List<DocumentSnapshot> deleted = deleteQueryBatch(query);
// While the deleted documents in the last batch indicate that there
// may still be more documents in the collection, page down to the
// next batch and delete again
while (deleted.size() >= batchSize) {
// Move the query cursor to start after the last doc in the batch
DocumentSnapshot last = deleted.get(deleted.size() - 1);
query = collection.orderBy(FieldPath.documentId())
.startAfter(last.getId())
.limit(batchSize);
deleted = deleteQueryBatch(query);
}
return null;
}
});
}
/**
* Delete all results from a query in a single WriteBatch. Must be run on a worker thread
* to avoid blocking/crashing the main thread.
*/
@WorkerThread
private List<DocumentSnapshot> deleteQueryBatch(final Query query) throws Exception {
QuerySnapshot querySnapshot = Tasks.await(query.get());
WriteBatch batch = query.getFirestore().batch();
for (QueryDocumentSnapshot snapshot : querySnapshot) {
batch.delete(snapshot.getReference());
}
Tasks.await(batch.commit());
return querySnapshot.getDocuments();
}
// [END delete_collection]
public void toggleOffline() {
// [START disable_network]
db.disableNetwork()
.addOnCompleteListener(new OnCompleteListener<Void>() {
@Override
public void onComplete(@NonNull Task<Void> task) {
// Do offline things
// ...
}
});
// [END disable_network]
// [START enable_network]
db.enableNetwork()
.addOnCompleteListener(new OnCompleteListener<Void>() {
@Override
public void onComplete(@NonNull Task<Void> task) {
// Do online things
// ...
}
});
// [END enable_network]
}
public void offlineListen(FirebaseFirestore db) {
// [START offline_listen]
db.collection("cities").whereEqualTo("state", "CA")
.addSnapshotListener(MetadataChanges.INCLUDE, new EventListener<QuerySnapshot>() {
@Override
public void onEvent(@Nullable QuerySnapshot querySnapshot,
@Nullable FirebaseFirestoreException e) {
if (e != null) {
Log.w(TAG, "Listen error", e);
return;
}
for (DocumentChange change : querySnapshot.getDocumentChanges()) {
if (change.getType() == Type.ADDED) {
Log.d(TAG, "New city:" + change.getDocument().getData());
}
String source = querySnapshot.getMetadata().isFromCache() ?
"local cache" : "server";
Log.d(TAG, "Data fetched from " + source);
}
}
});
// [END offline_listen]
}
// [START server_timestamp_annotation]
public class MyObject {
public String name;
public @ServerTimestamp Date timestamp;
public MyObject() {}
}
// [END server_timestamp_annotation]
public void updateWithServerTimestamp() {
// [START update_with_server_timestamp]
DocumentReference docRef = db.collection("objects").document("some-id");
// Update the timestamp field with the value from the server
Map<String,Object> updates = new HashMap<>();
updates.put("timestamp", FieldValue.serverTimestamp());
docRef.update(updates).addOnCompleteListener(new OnCompleteListener<Void>() {
// [START_EXCLUDE]
@Override
public void onComplete(@NonNull Task<Void> task) {}
// [START_EXCLUDE]
});
// [END update_with_server_timestamp]
}
public void updateDeleteField() {
// [START update_delete_field]
DocumentReference docRef = db.collection("cities").document("BJ");
// Remove the 'capital' field from the document
Map<String,Object> updates = new HashMap<>();
updates.put("capital", FieldValue.delete());
docRef.update(updates).addOnCompleteListener(new OnCompleteListener<Void>() {
// [START_EXCLUDE]
@Override
public void onComplete(@NonNull Task<Void> task) {}
// [START_EXCLUDE]
});
// [END update_delete_field]
}
}
| apache-2.0 |
topsecretabc/StressTest | app/src/main/java/com/sagereal/streettest/log/LogFile.java | 1433 | package com.sagereal.streettest.log;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.Environment;
import com.sagereal.streettest.util.IntSdCardUtil;
import com.sagereal.streettest.util.SysUtil;
import java.io.File;
public class LogFile {
private static final String NEW_LINE = "\r\n";
protected File mLogFile;
public LogFile() {
this.mLogFile = null;
this.mLogFile = null;
}
public LogFile(Context context, String savePath, String fileName) {
this.mLogFile = null;
this.mLogFile = createFile(savePath, fileName);
if (this.mLogFile != null) {
context.sendBroadcast(new Intent("android.intent.action.MEDIA_SCANNER_SCAN_FILE", Uri.fromFile(new File(Environment.getExternalStorageDirectory() + "/" + savePath + "/"))));
}
}
public void write(String content) {
if (IntSdCardUtil.isMounted() && this.mLogFile != null) {
LogManager.getPlatform().getFileIO().append(this.mLogFile, content);
}
}
public void writeLine(String content) {
write(SysUtil.getTimeStampString() + ": " + content + NEW_LINE);
}
private File createFile(String savePath, String fileName) {
File logDir = IntSdCardUtil.createDir(savePath);
if (logDir != null) {
return new File(logDir, fileName);
}
return null;
}
}
| apache-2.0 |
googleads/google-ads-java | google-ads-stubs-v8/src/main/java/com/google/ads/googleads/v8/resources/BiddingDataExclusion.java | 100332 | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v8/resources/bidding_data_exclusion.proto
package com.google.ads.googleads.v8.resources;
/**
* <pre>
* Represents a bidding data exclusion.
* See "About data exclusions" at
* https://support.google.com/google-ads/answer/10370710.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.resources.BiddingDataExclusion}
*/
public final class BiddingDataExclusion extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v8.resources.BiddingDataExclusion)
BiddingDataExclusionOrBuilder {
private static final long serialVersionUID = 0L;
// Use BiddingDataExclusion.newBuilder() to construct.
private BiddingDataExclusion(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BiddingDataExclusion() {
resourceName_ = "";
scope_ = 0;
status_ = 0;
startDateTime_ = "";
endDateTime_ = "";
name_ = "";
description_ = "";
devices_ = java.util.Collections.emptyList();
campaigns_ = com.google.protobuf.LazyStringArrayList.EMPTY;
advertisingChannelTypes_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new BiddingDataExclusion();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private BiddingDataExclusion(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
java.lang.String s = input.readStringRequireUtf8();
resourceName_ = s;
break;
}
case 16: {
dataExclusionId_ = input.readInt64();
break;
}
case 24: {
int rawValue = input.readEnum();
scope_ = rawValue;
break;
}
case 32: {
int rawValue = input.readEnum();
status_ = rawValue;
break;
}
case 42: {
java.lang.String s = input.readStringRequireUtf8();
startDateTime_ = s;
break;
}
case 50: {
java.lang.String s = input.readStringRequireUtf8();
endDateTime_ = s;
break;
}
case 58: {
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 66: {
java.lang.String s = input.readStringRequireUtf8();
description_ = s;
break;
}
case 72: {
int rawValue = input.readEnum();
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
devices_ = new java.util.ArrayList<java.lang.Integer>();
mutable_bitField0_ |= 0x00000001;
}
devices_.add(rawValue);
break;
}
case 74: {
int length = input.readRawVarint32();
int oldLimit = input.pushLimit(length);
while(input.getBytesUntilLimit() > 0) {
int rawValue = input.readEnum();
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
devices_ = new java.util.ArrayList<java.lang.Integer>();
mutable_bitField0_ |= 0x00000001;
}
devices_.add(rawValue);
}
input.popLimit(oldLimit);
break;
}
case 82: {
java.lang.String s = input.readStringRequireUtf8();
if (!((mutable_bitField0_ & 0x00000002) != 0)) {
campaigns_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000002;
}
campaigns_.add(s);
break;
}
case 88: {
int rawValue = input.readEnum();
if (!((mutable_bitField0_ & 0x00000004) != 0)) {
advertisingChannelTypes_ = new java.util.ArrayList<java.lang.Integer>();
mutable_bitField0_ |= 0x00000004;
}
advertisingChannelTypes_.add(rawValue);
break;
}
case 90: {
int length = input.readRawVarint32();
int oldLimit = input.pushLimit(length);
while(input.getBytesUntilLimit() > 0) {
int rawValue = input.readEnum();
if (!((mutable_bitField0_ & 0x00000004) != 0)) {
advertisingChannelTypes_ = new java.util.ArrayList<java.lang.Integer>();
mutable_bitField0_ |= 0x00000004;
}
advertisingChannelTypes_.add(rawValue);
}
input.popLimit(oldLimit);
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
devices_ = java.util.Collections.unmodifiableList(devices_);
}
if (((mutable_bitField0_ & 0x00000002) != 0)) {
campaigns_ = campaigns_.getUnmodifiableView();
}
if (((mutable_bitField0_ & 0x00000004) != 0)) {
advertisingChannelTypes_ = java.util.Collections.unmodifiableList(advertisingChannelTypes_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.resources.BiddingDataExclusionProto.internal_static_google_ads_googleads_v8_resources_BiddingDataExclusion_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.resources.BiddingDataExclusionProto.internal_static_google_ads_googleads_v8_resources_BiddingDataExclusion_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.resources.BiddingDataExclusion.class, com.google.ads.googleads.v8.resources.BiddingDataExclusion.Builder.class);
}
public static final int RESOURCE_NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object resourceName_;
/**
* <pre>
* Immutable. The resource name of the data exclusion.
* Data exclusion resource names have the form:
* `customers/{customer_id}/biddingDataExclusions/{data_exclusion_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
@java.lang.Override
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
}
}
/**
* <pre>
* Immutable. The resource name of the data exclusion.
* Data exclusion resource names have the form:
* `customers/{customer_id}/biddingDataExclusions/{data_exclusion_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DATA_EXCLUSION_ID_FIELD_NUMBER = 2;
private long dataExclusionId_;
/**
* <pre>
* Output only. The ID of the data exclusion.
* </pre>
*
* <code>int64 data_exclusion_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The dataExclusionId.
*/
@java.lang.Override
public long getDataExclusionId() {
return dataExclusionId_;
}
public static final int SCOPE_FIELD_NUMBER = 3;
private int scope_;
/**
* <pre>
* The scope of the data exclusion.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.SeasonalityEventScopeEnum.SeasonalityEventScope scope = 3;</code>
* @return The enum numeric value on the wire for scope.
*/
@java.lang.Override public int getScopeValue() {
return scope_;
}
/**
* <pre>
* The scope of the data exclusion.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.SeasonalityEventScopeEnum.SeasonalityEventScope scope = 3;</code>
* @return The scope.
*/
@java.lang.Override public com.google.ads.googleads.v8.enums.SeasonalityEventScopeEnum.SeasonalityEventScope getScope() {
@SuppressWarnings("deprecation")
com.google.ads.googleads.v8.enums.SeasonalityEventScopeEnum.SeasonalityEventScope result = com.google.ads.googleads.v8.enums.SeasonalityEventScopeEnum.SeasonalityEventScope.valueOf(scope_);
return result == null ? com.google.ads.googleads.v8.enums.SeasonalityEventScopeEnum.SeasonalityEventScope.UNRECOGNIZED : result;
}
public static final int STATUS_FIELD_NUMBER = 4;
private int status_;
/**
* <pre>
* Output only. The status of the data exclusion.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.SeasonalityEventStatusEnum.SeasonalityEventStatus status = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for status.
*/
@java.lang.Override public int getStatusValue() {
return status_;
}
/**
* <pre>
* Output only. The status of the data exclusion.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.SeasonalityEventStatusEnum.SeasonalityEventStatus status = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The status.
*/
@java.lang.Override public com.google.ads.googleads.v8.enums.SeasonalityEventStatusEnum.SeasonalityEventStatus getStatus() {
@SuppressWarnings("deprecation")
com.google.ads.googleads.v8.enums.SeasonalityEventStatusEnum.SeasonalityEventStatus result = com.google.ads.googleads.v8.enums.SeasonalityEventStatusEnum.SeasonalityEventStatus.valueOf(status_);
return result == null ? com.google.ads.googleads.v8.enums.SeasonalityEventStatusEnum.SeasonalityEventStatus.UNRECOGNIZED : result;
}
public static final int START_DATE_TIME_FIELD_NUMBER = 5;
private volatile java.lang.Object startDateTime_;
/**
* <pre>
* Required. The inclusive start time of the data exclusion in yyyy-MM-dd HH:mm:ss
* format.
* A data exclusion is backward looking and should be used for events that
* start in the past and end either in the past or future.
* </pre>
*
* <code>string start_date_time = 5 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The startDateTime.
*/
@java.lang.Override
public java.lang.String getStartDateTime() {
java.lang.Object ref = startDateTime_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
startDateTime_ = s;
return s;
}
}
/**
* <pre>
* Required. The inclusive start time of the data exclusion in yyyy-MM-dd HH:mm:ss
* format.
* A data exclusion is backward looking and should be used for events that
* start in the past and end either in the past or future.
* </pre>
*
* <code>string start_date_time = 5 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for startDateTime.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getStartDateTimeBytes() {
java.lang.Object ref = startDateTime_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
startDateTime_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int END_DATE_TIME_FIELD_NUMBER = 6;
private volatile java.lang.Object endDateTime_;
/**
* <pre>
* Required. The exclusive end time of the data exclusion in yyyy-MM-dd HH:mm:ss format.
* The length of [start_date_time, end_date_time) interval must be
* within (0, 14 days].
* </pre>
*
* <code>string end_date_time = 6 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The endDateTime.
*/
@java.lang.Override
public java.lang.String getEndDateTime() {
java.lang.Object ref = endDateTime_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
endDateTime_ = s;
return s;
}
}
/**
* <pre>
* Required. The exclusive end time of the data exclusion in yyyy-MM-dd HH:mm:ss format.
* The length of [start_date_time, end_date_time) interval must be
* within (0, 14 days].
* </pre>
*
* <code>string end_date_time = 6 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for endDateTime.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getEndDateTimeBytes() {
java.lang.Object ref = endDateTime_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
endDateTime_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int NAME_FIELD_NUMBER = 7;
private volatile java.lang.Object name_;
/**
* <pre>
* The name of the data exclusion. The name can be at most 255
* characters.
* </pre>
*
* <code>string name = 7;</code>
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
* <pre>
* The name of the data exclusion. The name can be at most 255
* characters.
* </pre>
*
* <code>string name = 7;</code>
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DESCRIPTION_FIELD_NUMBER = 8;
private volatile java.lang.Object description_;
/**
* <pre>
* The description of the data exclusion. The description can be at
* most 2048 characters.
* </pre>
*
* <code>string description = 8;</code>
* @return The description.
*/
@java.lang.Override
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
description_ = s;
return s;
}
}
/**
* <pre>
* The description of the data exclusion. The description can be at
* most 2048 characters.
* </pre>
*
* <code>string description = 8;</code>
* @return The bytes for description.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
description_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DEVICES_FIELD_NUMBER = 9;
private java.util.List<java.lang.Integer> devices_;
private static final com.google.protobuf.Internal.ListAdapter.Converter<
java.lang.Integer, com.google.ads.googleads.v8.enums.DeviceEnum.Device> devices_converter_ =
new com.google.protobuf.Internal.ListAdapter.Converter<
java.lang.Integer, com.google.ads.googleads.v8.enums.DeviceEnum.Device>() {
public com.google.ads.googleads.v8.enums.DeviceEnum.Device convert(java.lang.Integer from) {
@SuppressWarnings("deprecation")
com.google.ads.googleads.v8.enums.DeviceEnum.Device result = com.google.ads.googleads.v8.enums.DeviceEnum.Device.valueOf(from);
return result == null ? com.google.ads.googleads.v8.enums.DeviceEnum.Device.UNRECOGNIZED : result;
}
};
/**
* <pre>
* If not specified, all devices will be included in this exclusion.
* Otherwise, only the specified targeted devices will be included in this
* exclusion.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.DeviceEnum.Device devices = 9;</code>
* @return A list containing the devices.
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v8.enums.DeviceEnum.Device> getDevicesList() {
return new com.google.protobuf.Internal.ListAdapter<
java.lang.Integer, com.google.ads.googleads.v8.enums.DeviceEnum.Device>(devices_, devices_converter_);
}
/**
* <pre>
* If not specified, all devices will be included in this exclusion.
* Otherwise, only the specified targeted devices will be included in this
* exclusion.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.DeviceEnum.Device devices = 9;</code>
* @return The count of devices.
*/
@java.lang.Override
public int getDevicesCount() {
return devices_.size();
}
/**
* <pre>
* If not specified, all devices will be included in this exclusion.
* Otherwise, only the specified targeted devices will be included in this
* exclusion.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.DeviceEnum.Device devices = 9;</code>
* @param index The index of the element to return.
* @return The devices at the given index.
*/
@java.lang.Override
public com.google.ads.googleads.v8.enums.DeviceEnum.Device getDevices(int index) {
return devices_converter_.convert(devices_.get(index));
}
/**
* <pre>
* If not specified, all devices will be included in this exclusion.
* Otherwise, only the specified targeted devices will be included in this
* exclusion.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.DeviceEnum.Device devices = 9;</code>
* @return A list containing the enum numeric values on the wire for devices.
*/
@java.lang.Override
public java.util.List<java.lang.Integer>
getDevicesValueList() {
return devices_;
}
/**
* <pre>
* If not specified, all devices will be included in this exclusion.
* Otherwise, only the specified targeted devices will be included in this
* exclusion.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.DeviceEnum.Device devices = 9;</code>
* @param index The index of the value to return.
* @return The enum numeric value on the wire of devices at the given index.
*/
@java.lang.Override
public int getDevicesValue(int index) {
return devices_.get(index);
}
private int devicesMemoizedSerializedSize;
public static final int CAMPAIGNS_FIELD_NUMBER = 10;
private com.google.protobuf.LazyStringList campaigns_;
/**
* <pre>
* The data exclusion will apply to the campaigns listed when the scope of
* this exclusion is CAMPAIGN. The maximum number of campaigns per event is
* 2000.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated string campaigns = 10 [(.google.api.resource_reference) = { ... }</code>
* @return A list containing the campaigns.
*/
public com.google.protobuf.ProtocolStringList
getCampaignsList() {
return campaigns_;
}
/**
* <pre>
* The data exclusion will apply to the campaigns listed when the scope of
* this exclusion is CAMPAIGN. The maximum number of campaigns per event is
* 2000.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated string campaigns = 10 [(.google.api.resource_reference) = { ... }</code>
* @return The count of campaigns.
*/
public int getCampaignsCount() {
return campaigns_.size();
}
/**
* <pre>
* The data exclusion will apply to the campaigns listed when the scope of
* this exclusion is CAMPAIGN. The maximum number of campaigns per event is
* 2000.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated string campaigns = 10 [(.google.api.resource_reference) = { ... }</code>
* @param index The index of the element to return.
* @return The campaigns at the given index.
*/
public java.lang.String getCampaigns(int index) {
return campaigns_.get(index);
}
/**
* <pre>
* The data exclusion will apply to the campaigns listed when the scope of
* this exclusion is CAMPAIGN. The maximum number of campaigns per event is
* 2000.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated string campaigns = 10 [(.google.api.resource_reference) = { ... }</code>
* @param index The index of the value to return.
* @return The bytes of the campaigns at the given index.
*/
public com.google.protobuf.ByteString
getCampaignsBytes(int index) {
return campaigns_.getByteString(index);
}
public static final int ADVERTISING_CHANNEL_TYPES_FIELD_NUMBER = 11;
private java.util.List<java.lang.Integer> advertisingChannelTypes_;
private static final com.google.protobuf.Internal.ListAdapter.Converter<
java.lang.Integer, com.google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType> advertisingChannelTypes_converter_ =
new com.google.protobuf.Internal.ListAdapter.Converter<
java.lang.Integer, com.google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType>() {
public com.google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType convert(java.lang.Integer from) {
@SuppressWarnings("deprecation")
com.google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType result = com.google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType.valueOf(from);
return result == null ? com.google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType.UNRECOGNIZED : result;
}
};
/**
* <pre>
* The data_exclusion will apply to all the campaigns under the listed
* channels retroactively as well as going forward when the scope of this
* exclusion is CHANNEL.
* The supported advertising channel types are DISPLAY, SEARCH and SHOPPING.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType advertising_channel_types = 11;</code>
* @return A list containing the advertisingChannelTypes.
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType> getAdvertisingChannelTypesList() {
return new com.google.protobuf.Internal.ListAdapter<
java.lang.Integer, com.google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType>(advertisingChannelTypes_, advertisingChannelTypes_converter_);
}
/**
* <pre>
* The data_exclusion will apply to all the campaigns under the listed
* channels retroactively as well as going forward when the scope of this
* exclusion is CHANNEL.
* The supported advertising channel types are DISPLAY, SEARCH and SHOPPING.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType advertising_channel_types = 11;</code>
* @return The count of advertisingChannelTypes.
*/
@java.lang.Override
public int getAdvertisingChannelTypesCount() {
return advertisingChannelTypes_.size();
}
/**
* <pre>
* The data_exclusion will apply to all the campaigns under the listed
* channels retroactively as well as going forward when the scope of this
* exclusion is CHANNEL.
* The supported advertising channel types are DISPLAY, SEARCH and SHOPPING.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType advertising_channel_types = 11;</code>
* @param index The index of the element to return.
* @return The advertisingChannelTypes at the given index.
*/
@java.lang.Override
public com.google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType getAdvertisingChannelTypes(int index) {
return advertisingChannelTypes_converter_.convert(advertisingChannelTypes_.get(index));
}
/**
* <pre>
* The data_exclusion will apply to all the campaigns under the listed
* channels retroactively as well as going forward when the scope of this
* exclusion is CHANNEL.
* The supported advertising channel types are DISPLAY, SEARCH and SHOPPING.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType advertising_channel_types = 11;</code>
* @return A list containing the enum numeric values on the wire for advertisingChannelTypes.
*/
@java.lang.Override
public java.util.List<java.lang.Integer>
getAdvertisingChannelTypesValueList() {
return advertisingChannelTypes_;
}
/**
* <pre>
* The data_exclusion will apply to all the campaigns under the listed
* channels retroactively as well as going forward when the scope of this
* exclusion is CHANNEL.
* The supported advertising channel types are DISPLAY, SEARCH and SHOPPING.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType advertising_channel_types = 11;</code>
* @param index The index of the value to return.
* @return The enum numeric value on the wire of advertisingChannelTypes at the given index.
*/
@java.lang.Override
public int getAdvertisingChannelTypesValue(int index) {
return advertisingChannelTypes_.get(index);
}
private int advertisingChannelTypesMemoizedSerializedSize;
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_);
}
if (dataExclusionId_ != 0L) {
output.writeInt64(2, dataExclusionId_);
}
if (scope_ != com.google.ads.googleads.v8.enums.SeasonalityEventScopeEnum.SeasonalityEventScope.UNSPECIFIED.getNumber()) {
output.writeEnum(3, scope_);
}
if (status_ != com.google.ads.googleads.v8.enums.SeasonalityEventStatusEnum.SeasonalityEventStatus.UNSPECIFIED.getNumber()) {
output.writeEnum(4, status_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(startDateTime_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, startDateTime_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(endDateTime_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 6, endDateTime_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 7, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(description_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 8, description_);
}
if (getDevicesList().size() > 0) {
output.writeUInt32NoTag(74);
output.writeUInt32NoTag(devicesMemoizedSerializedSize);
}
for (int i = 0; i < devices_.size(); i++) {
output.writeEnumNoTag(devices_.get(i));
}
for (int i = 0; i < campaigns_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 10, campaigns_.getRaw(i));
}
if (getAdvertisingChannelTypesList().size() > 0) {
output.writeUInt32NoTag(90);
output.writeUInt32NoTag(advertisingChannelTypesMemoizedSerializedSize);
}
for (int i = 0; i < advertisingChannelTypes_.size(); i++) {
output.writeEnumNoTag(advertisingChannelTypes_.get(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_);
}
if (dataExclusionId_ != 0L) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(2, dataExclusionId_);
}
if (scope_ != com.google.ads.googleads.v8.enums.SeasonalityEventScopeEnum.SeasonalityEventScope.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(3, scope_);
}
if (status_ != com.google.ads.googleads.v8.enums.SeasonalityEventStatusEnum.SeasonalityEventStatus.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(4, status_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(startDateTime_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, startDateTime_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(endDateTime_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, endDateTime_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(description_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(8, description_);
}
{
int dataSize = 0;
for (int i = 0; i < devices_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeEnumSizeNoTag(devices_.get(i));
}
size += dataSize;
if (!getDevicesList().isEmpty()) { size += 1;
size += com.google.protobuf.CodedOutputStream
.computeUInt32SizeNoTag(dataSize);
}devicesMemoizedSerializedSize = dataSize;
}
{
int dataSize = 0;
for (int i = 0; i < campaigns_.size(); i++) {
dataSize += computeStringSizeNoTag(campaigns_.getRaw(i));
}
size += dataSize;
size += 1 * getCampaignsList().size();
}
{
int dataSize = 0;
for (int i = 0; i < advertisingChannelTypes_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeEnumSizeNoTag(advertisingChannelTypes_.get(i));
}
size += dataSize;
if (!getAdvertisingChannelTypesList().isEmpty()) { size += 1;
size += com.google.protobuf.CodedOutputStream
.computeUInt32SizeNoTag(dataSize);
}advertisingChannelTypesMemoizedSerializedSize = dataSize;
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v8.resources.BiddingDataExclusion)) {
return super.equals(obj);
}
com.google.ads.googleads.v8.resources.BiddingDataExclusion other = (com.google.ads.googleads.v8.resources.BiddingDataExclusion) obj;
if (!getResourceName()
.equals(other.getResourceName())) return false;
if (getDataExclusionId()
!= other.getDataExclusionId()) return false;
if (scope_ != other.scope_) return false;
if (status_ != other.status_) return false;
if (!getStartDateTime()
.equals(other.getStartDateTime())) return false;
if (!getEndDateTime()
.equals(other.getEndDateTime())) return false;
if (!getName()
.equals(other.getName())) return false;
if (!getDescription()
.equals(other.getDescription())) return false;
if (!devices_.equals(other.devices_)) return false;
if (!getCampaignsList()
.equals(other.getCampaignsList())) return false;
if (!advertisingChannelTypes_.equals(other.advertisingChannelTypes_)) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getResourceName().hashCode();
hash = (37 * hash) + DATA_EXCLUSION_ID_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getDataExclusionId());
hash = (37 * hash) + SCOPE_FIELD_NUMBER;
hash = (53 * hash) + scope_;
hash = (37 * hash) + STATUS_FIELD_NUMBER;
hash = (53 * hash) + status_;
hash = (37 * hash) + START_DATE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getStartDateTime().hashCode();
hash = (37 * hash) + END_DATE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getEndDateTime().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER;
hash = (53 * hash) + getDescription().hashCode();
if (getDevicesCount() > 0) {
hash = (37 * hash) + DEVICES_FIELD_NUMBER;
hash = (53 * hash) + devices_.hashCode();
}
if (getCampaignsCount() > 0) {
hash = (37 * hash) + CAMPAIGNS_FIELD_NUMBER;
hash = (53 * hash) + getCampaignsList().hashCode();
}
if (getAdvertisingChannelTypesCount() > 0) {
hash = (37 * hash) + ADVERTISING_CHANNEL_TYPES_FIELD_NUMBER;
hash = (53 * hash) + advertisingChannelTypes_.hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v8.resources.BiddingDataExclusion parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.resources.BiddingDataExclusion parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.resources.BiddingDataExclusion parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.resources.BiddingDataExclusion parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.resources.BiddingDataExclusion parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.resources.BiddingDataExclusion parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.resources.BiddingDataExclusion parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.resources.BiddingDataExclusion parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.resources.BiddingDataExclusion parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.resources.BiddingDataExclusion parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.resources.BiddingDataExclusion parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.resources.BiddingDataExclusion parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v8.resources.BiddingDataExclusion prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Represents a bidding data exclusion.
* See "About data exclusions" at
* https://support.google.com/google-ads/answer/10370710.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.resources.BiddingDataExclusion}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v8.resources.BiddingDataExclusion)
com.google.ads.googleads.v8.resources.BiddingDataExclusionOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.resources.BiddingDataExclusionProto.internal_static_google_ads_googleads_v8_resources_BiddingDataExclusion_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.resources.BiddingDataExclusionProto.internal_static_google_ads_googleads_v8_resources_BiddingDataExclusion_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.resources.BiddingDataExclusion.class, com.google.ads.googleads.v8.resources.BiddingDataExclusion.Builder.class);
}
// Construct using com.google.ads.googleads.v8.resources.BiddingDataExclusion.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
resourceName_ = "";
dataExclusionId_ = 0L;
scope_ = 0;
status_ = 0;
startDateTime_ = "";
endDateTime_ = "";
name_ = "";
description_ = "";
devices_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
campaigns_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
advertisingChannelTypes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v8.resources.BiddingDataExclusionProto.internal_static_google_ads_googleads_v8_resources_BiddingDataExclusion_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v8.resources.BiddingDataExclusion getDefaultInstanceForType() {
return com.google.ads.googleads.v8.resources.BiddingDataExclusion.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v8.resources.BiddingDataExclusion build() {
com.google.ads.googleads.v8.resources.BiddingDataExclusion result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v8.resources.BiddingDataExclusion buildPartial() {
com.google.ads.googleads.v8.resources.BiddingDataExclusion result = new com.google.ads.googleads.v8.resources.BiddingDataExclusion(this);
int from_bitField0_ = bitField0_;
result.resourceName_ = resourceName_;
result.dataExclusionId_ = dataExclusionId_;
result.scope_ = scope_;
result.status_ = status_;
result.startDateTime_ = startDateTime_;
result.endDateTime_ = endDateTime_;
result.name_ = name_;
result.description_ = description_;
if (((bitField0_ & 0x00000001) != 0)) {
devices_ = java.util.Collections.unmodifiableList(devices_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.devices_ = devices_;
if (((bitField0_ & 0x00000002) != 0)) {
campaigns_ = campaigns_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000002);
}
result.campaigns_ = campaigns_;
if (((bitField0_ & 0x00000004) != 0)) {
advertisingChannelTypes_ = java.util.Collections.unmodifiableList(advertisingChannelTypes_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.advertisingChannelTypes_ = advertisingChannelTypes_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v8.resources.BiddingDataExclusion) {
return mergeFrom((com.google.ads.googleads.v8.resources.BiddingDataExclusion)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v8.resources.BiddingDataExclusion other) {
if (other == com.google.ads.googleads.v8.resources.BiddingDataExclusion.getDefaultInstance()) return this;
if (!other.getResourceName().isEmpty()) {
resourceName_ = other.resourceName_;
onChanged();
}
if (other.getDataExclusionId() != 0L) {
setDataExclusionId(other.getDataExclusionId());
}
if (other.scope_ != 0) {
setScopeValue(other.getScopeValue());
}
if (other.status_ != 0) {
setStatusValue(other.getStatusValue());
}
if (!other.getStartDateTime().isEmpty()) {
startDateTime_ = other.startDateTime_;
onChanged();
}
if (!other.getEndDateTime().isEmpty()) {
endDateTime_ = other.endDateTime_;
onChanged();
}
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (!other.getDescription().isEmpty()) {
description_ = other.description_;
onChanged();
}
if (!other.devices_.isEmpty()) {
if (devices_.isEmpty()) {
devices_ = other.devices_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDevicesIsMutable();
devices_.addAll(other.devices_);
}
onChanged();
}
if (!other.campaigns_.isEmpty()) {
if (campaigns_.isEmpty()) {
campaigns_ = other.campaigns_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureCampaignsIsMutable();
campaigns_.addAll(other.campaigns_);
}
onChanged();
}
if (!other.advertisingChannelTypes_.isEmpty()) {
if (advertisingChannelTypes_.isEmpty()) {
advertisingChannelTypes_ = other.advertisingChannelTypes_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureAdvertisingChannelTypesIsMutable();
advertisingChannelTypes_.addAll(other.advertisingChannelTypes_);
}
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v8.resources.BiddingDataExclusion parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v8.resources.BiddingDataExclusion) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object resourceName_ = "";
/**
* <pre>
* Immutable. The resource name of the data exclusion.
* Data exclusion resource names have the form:
* `customers/{customer_id}/biddingDataExclusions/{data_exclusion_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Immutable. The resource name of the data exclusion.
* Data exclusion resource names have the form:
* `customers/{customer_id}/biddingDataExclusions/{data_exclusion_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Immutable. The resource name of the data exclusion.
* Data exclusion resource names have the form:
* `customers/{customer_id}/biddingDataExclusions/{data_exclusion_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @param value The resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resourceName_ = value;
onChanged();
return this;
}
/**
* <pre>
* Immutable. The resource name of the data exclusion.
* Data exclusion resource names have the form:
* `customers/{customer_id}/biddingDataExclusions/{data_exclusion_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearResourceName() {
resourceName_ = getDefaultInstance().getResourceName();
onChanged();
return this;
}
/**
* <pre>
* Immutable. The resource name of the data exclusion.
* Data exclusion resource names have the form:
* `customers/{customer_id}/biddingDataExclusions/{data_exclusion_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @param value The bytes for resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resourceName_ = value;
onChanged();
return this;
}
private long dataExclusionId_ ;
/**
* <pre>
* Output only. The ID of the data exclusion.
* </pre>
*
* <code>int64 data_exclusion_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The dataExclusionId.
*/
@java.lang.Override
public long getDataExclusionId() {
return dataExclusionId_;
}
/**
* <pre>
* Output only. The ID of the data exclusion.
* </pre>
*
* <code>int64 data_exclusion_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param value The dataExclusionId to set.
* @return This builder for chaining.
*/
public Builder setDataExclusionId(long value) {
dataExclusionId_ = value;
onChanged();
return this;
}
/**
* <pre>
* Output only. The ID of the data exclusion.
* </pre>
*
* <code>int64 data_exclusion_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return This builder for chaining.
*/
public Builder clearDataExclusionId() {
dataExclusionId_ = 0L;
onChanged();
return this;
}
private int scope_ = 0;
/**
* <pre>
* The scope of the data exclusion.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.SeasonalityEventScopeEnum.SeasonalityEventScope scope = 3;</code>
* @return The enum numeric value on the wire for scope.
*/
@java.lang.Override public int getScopeValue() {
return scope_;
}
/**
* <pre>
* The scope of the data exclusion.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.SeasonalityEventScopeEnum.SeasonalityEventScope scope = 3;</code>
* @param value The enum numeric value on the wire for scope to set.
* @return This builder for chaining.
*/
public Builder setScopeValue(int value) {
scope_ = value;
onChanged();
return this;
}
/**
* <pre>
* The scope of the data exclusion.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.SeasonalityEventScopeEnum.SeasonalityEventScope scope = 3;</code>
* @return The scope.
*/
@java.lang.Override
public com.google.ads.googleads.v8.enums.SeasonalityEventScopeEnum.SeasonalityEventScope getScope() {
@SuppressWarnings("deprecation")
com.google.ads.googleads.v8.enums.SeasonalityEventScopeEnum.SeasonalityEventScope result = com.google.ads.googleads.v8.enums.SeasonalityEventScopeEnum.SeasonalityEventScope.valueOf(scope_);
return result == null ? com.google.ads.googleads.v8.enums.SeasonalityEventScopeEnum.SeasonalityEventScope.UNRECOGNIZED : result;
}
/**
* <pre>
* The scope of the data exclusion.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.SeasonalityEventScopeEnum.SeasonalityEventScope scope = 3;</code>
* @param value The scope to set.
* @return This builder for chaining.
*/
public Builder setScope(com.google.ads.googleads.v8.enums.SeasonalityEventScopeEnum.SeasonalityEventScope value) {
if (value == null) {
throw new NullPointerException();
}
scope_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* The scope of the data exclusion.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.SeasonalityEventScopeEnum.SeasonalityEventScope scope = 3;</code>
* @return This builder for chaining.
*/
public Builder clearScope() {
scope_ = 0;
onChanged();
return this;
}
private int status_ = 0;
/**
* <pre>
* Output only. The status of the data exclusion.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.SeasonalityEventStatusEnum.SeasonalityEventStatus status = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for status.
*/
@java.lang.Override public int getStatusValue() {
return status_;
}
/**
* <pre>
* Output only. The status of the data exclusion.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.SeasonalityEventStatusEnum.SeasonalityEventStatus status = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param value The enum numeric value on the wire for status to set.
* @return This builder for chaining.
*/
public Builder setStatusValue(int value) {
status_ = value;
onChanged();
return this;
}
/**
* <pre>
* Output only. The status of the data exclusion.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.SeasonalityEventStatusEnum.SeasonalityEventStatus status = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The status.
*/
@java.lang.Override
public com.google.ads.googleads.v8.enums.SeasonalityEventStatusEnum.SeasonalityEventStatus getStatus() {
@SuppressWarnings("deprecation")
com.google.ads.googleads.v8.enums.SeasonalityEventStatusEnum.SeasonalityEventStatus result = com.google.ads.googleads.v8.enums.SeasonalityEventStatusEnum.SeasonalityEventStatus.valueOf(status_);
return result == null ? com.google.ads.googleads.v8.enums.SeasonalityEventStatusEnum.SeasonalityEventStatus.UNRECOGNIZED : result;
}
/**
* <pre>
* Output only. The status of the data exclusion.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.SeasonalityEventStatusEnum.SeasonalityEventStatus status = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param value The status to set.
* @return This builder for chaining.
*/
public Builder setStatus(com.google.ads.googleads.v8.enums.SeasonalityEventStatusEnum.SeasonalityEventStatus value) {
if (value == null) {
throw new NullPointerException();
}
status_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* Output only. The status of the data exclusion.
* </pre>
*
* <code>.google.ads.googleads.v8.enums.SeasonalityEventStatusEnum.SeasonalityEventStatus status = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return This builder for chaining.
*/
public Builder clearStatus() {
status_ = 0;
onChanged();
return this;
}
private java.lang.Object startDateTime_ = "";
/**
* <pre>
* Required. The inclusive start time of the data exclusion in yyyy-MM-dd HH:mm:ss
* format.
* A data exclusion is backward looking and should be used for events that
* start in the past and end either in the past or future.
* </pre>
*
* <code>string start_date_time = 5 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The startDateTime.
*/
public java.lang.String getStartDateTime() {
java.lang.Object ref = startDateTime_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
startDateTime_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The inclusive start time of the data exclusion in yyyy-MM-dd HH:mm:ss
* format.
* A data exclusion is backward looking and should be used for events that
* start in the past and end either in the past or future.
* </pre>
*
* <code>string start_date_time = 5 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for startDateTime.
*/
public com.google.protobuf.ByteString
getStartDateTimeBytes() {
java.lang.Object ref = startDateTime_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
startDateTime_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The inclusive start time of the data exclusion in yyyy-MM-dd HH:mm:ss
* format.
* A data exclusion is backward looking and should be used for events that
* start in the past and end either in the past or future.
* </pre>
*
* <code>string start_date_time = 5 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The startDateTime to set.
* @return This builder for chaining.
*/
public Builder setStartDateTime(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
startDateTime_ = value;
onChanged();
return this;
}
/**
* <pre>
* Required. The inclusive start time of the data exclusion in yyyy-MM-dd HH:mm:ss
* format.
* A data exclusion is backward looking and should be used for events that
* start in the past and end either in the past or future.
* </pre>
*
* <code>string start_date_time = 5 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearStartDateTime() {
startDateTime_ = getDefaultInstance().getStartDateTime();
onChanged();
return this;
}
/**
* <pre>
* Required. The inclusive start time of the data exclusion in yyyy-MM-dd HH:mm:ss
* format.
* A data exclusion is backward looking and should be used for events that
* start in the past and end either in the past or future.
* </pre>
*
* <code>string start_date_time = 5 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for startDateTime to set.
* @return This builder for chaining.
*/
public Builder setStartDateTimeBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
startDateTime_ = value;
onChanged();
return this;
}
private java.lang.Object endDateTime_ = "";
/**
* <pre>
* Required. The exclusive end time of the data exclusion in yyyy-MM-dd HH:mm:ss format.
* The length of [start_date_time, end_date_time) interval must be
* within (0, 14 days].
* </pre>
*
* <code>string end_date_time = 6 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The endDateTime.
*/
public java.lang.String getEndDateTime() {
java.lang.Object ref = endDateTime_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
endDateTime_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The exclusive end time of the data exclusion in yyyy-MM-dd HH:mm:ss format.
* The length of [start_date_time, end_date_time) interval must be
* within (0, 14 days].
* </pre>
*
* <code>string end_date_time = 6 [(.google.api.field_behavior) = REQUIRED];</code>
* @return The bytes for endDateTime.
*/
public com.google.protobuf.ByteString
getEndDateTimeBytes() {
java.lang.Object ref = endDateTime_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
endDateTime_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The exclusive end time of the data exclusion in yyyy-MM-dd HH:mm:ss format.
* The length of [start_date_time, end_date_time) interval must be
* within (0, 14 days].
* </pre>
*
* <code>string end_date_time = 6 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The endDateTime to set.
* @return This builder for chaining.
*/
public Builder setEndDateTime(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
endDateTime_ = value;
onChanged();
return this;
}
/**
* <pre>
* Required. The exclusive end time of the data exclusion in yyyy-MM-dd HH:mm:ss format.
* The length of [start_date_time, end_date_time) interval must be
* within (0, 14 days].
* </pre>
*
* <code>string end_date_time = 6 [(.google.api.field_behavior) = REQUIRED];</code>
* @return This builder for chaining.
*/
public Builder clearEndDateTime() {
endDateTime_ = getDefaultInstance().getEndDateTime();
onChanged();
return this;
}
/**
* <pre>
* Required. The exclusive end time of the data exclusion in yyyy-MM-dd HH:mm:ss format.
* The length of [start_date_time, end_date_time) interval must be
* within (0, 14 days].
* </pre>
*
* <code>string end_date_time = 6 [(.google.api.field_behavior) = REQUIRED];</code>
* @param value The bytes for endDateTime to set.
* @return This builder for chaining.
*/
public Builder setEndDateTimeBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
endDateTime_ = value;
onChanged();
return this;
}
private java.lang.Object name_ = "";
/**
* <pre>
* The name of the data exclusion. The name can be at most 255
* characters.
* </pre>
*
* <code>string name = 7;</code>
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* The name of the data exclusion. The name can be at most 255
* characters.
* </pre>
*
* <code>string name = 7;</code>
* @return The bytes for name.
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* The name of the data exclusion. The name can be at most 255
* characters.
* </pre>
*
* <code>string name = 7;</code>
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
* <pre>
* The name of the data exclusion. The name can be at most 255
* characters.
* </pre>
*
* <code>string name = 7;</code>
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
* <pre>
* The name of the data exclusion. The name can be at most 255
* characters.
* </pre>
*
* <code>string name = 7;</code>
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private java.lang.Object description_ = "";
/**
* <pre>
* The description of the data exclusion. The description can be at
* most 2048 characters.
* </pre>
*
* <code>string description = 8;</code>
* @return The description.
*/
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
description_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* The description of the data exclusion. The description can be at
* most 2048 characters.
* </pre>
*
* <code>string description = 8;</code>
* @return The bytes for description.
*/
public com.google.protobuf.ByteString
getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
description_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* The description of the data exclusion. The description can be at
* most 2048 characters.
* </pre>
*
* <code>string description = 8;</code>
* @param value The description to set.
* @return This builder for chaining.
*/
public Builder setDescription(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
description_ = value;
onChanged();
return this;
}
/**
* <pre>
* The description of the data exclusion. The description can be at
* most 2048 characters.
* </pre>
*
* <code>string description = 8;</code>
* @return This builder for chaining.
*/
public Builder clearDescription() {
description_ = getDefaultInstance().getDescription();
onChanged();
return this;
}
/**
* <pre>
* The description of the data exclusion. The description can be at
* most 2048 characters.
* </pre>
*
* <code>string description = 8;</code>
* @param value The bytes for description to set.
* @return This builder for chaining.
*/
public Builder setDescriptionBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
description_ = value;
onChanged();
return this;
}
private java.util.List<java.lang.Integer> devices_ =
java.util.Collections.emptyList();
private void ensureDevicesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
devices_ = new java.util.ArrayList<java.lang.Integer>(devices_);
bitField0_ |= 0x00000001;
}
}
/**
* <pre>
* If not specified, all devices will be included in this exclusion.
* Otherwise, only the specified targeted devices will be included in this
* exclusion.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.DeviceEnum.Device devices = 9;</code>
* @return A list containing the devices.
*/
public java.util.List<com.google.ads.googleads.v8.enums.DeviceEnum.Device> getDevicesList() {
return new com.google.protobuf.Internal.ListAdapter<
java.lang.Integer, com.google.ads.googleads.v8.enums.DeviceEnum.Device>(devices_, devices_converter_);
}
/**
* <pre>
* If not specified, all devices will be included in this exclusion.
* Otherwise, only the specified targeted devices will be included in this
* exclusion.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.DeviceEnum.Device devices = 9;</code>
* @return The count of devices.
*/
public int getDevicesCount() {
return devices_.size();
}
/**
* <pre>
* If not specified, all devices will be included in this exclusion.
* Otherwise, only the specified targeted devices will be included in this
* exclusion.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.DeviceEnum.Device devices = 9;</code>
* @param index The index of the element to return.
* @return The devices at the given index.
*/
public com.google.ads.googleads.v8.enums.DeviceEnum.Device getDevices(int index) {
return devices_converter_.convert(devices_.get(index));
}
/**
* <pre>
* If not specified, all devices will be included in this exclusion.
* Otherwise, only the specified targeted devices will be included in this
* exclusion.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.DeviceEnum.Device devices = 9;</code>
* @param index The index to set the value at.
* @param value The devices to set.
* @return This builder for chaining.
*/
public Builder setDevices(
int index, com.google.ads.googleads.v8.enums.DeviceEnum.Device value) {
if (value == null) {
throw new NullPointerException();
}
ensureDevicesIsMutable();
devices_.set(index, value.getNumber());
onChanged();
return this;
}
/**
* <pre>
* If not specified, all devices will be included in this exclusion.
* Otherwise, only the specified targeted devices will be included in this
* exclusion.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.DeviceEnum.Device devices = 9;</code>
* @param value The devices to add.
* @return This builder for chaining.
*/
public Builder addDevices(com.google.ads.googleads.v8.enums.DeviceEnum.Device value) {
if (value == null) {
throw new NullPointerException();
}
ensureDevicesIsMutable();
devices_.add(value.getNumber());
onChanged();
return this;
}
/**
* <pre>
* If not specified, all devices will be included in this exclusion.
* Otherwise, only the specified targeted devices will be included in this
* exclusion.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.DeviceEnum.Device devices = 9;</code>
* @param values The devices to add.
* @return This builder for chaining.
*/
public Builder addAllDevices(
java.lang.Iterable<? extends com.google.ads.googleads.v8.enums.DeviceEnum.Device> values) {
ensureDevicesIsMutable();
for (com.google.ads.googleads.v8.enums.DeviceEnum.Device value : values) {
devices_.add(value.getNumber());
}
onChanged();
return this;
}
/**
* <pre>
* If not specified, all devices will be included in this exclusion.
* Otherwise, only the specified targeted devices will be included in this
* exclusion.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.DeviceEnum.Device devices = 9;</code>
* @return This builder for chaining.
*/
public Builder clearDevices() {
devices_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <pre>
* If not specified, all devices will be included in this exclusion.
* Otherwise, only the specified targeted devices will be included in this
* exclusion.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.DeviceEnum.Device devices = 9;</code>
* @return A list containing the enum numeric values on the wire for devices.
*/
public java.util.List<java.lang.Integer>
getDevicesValueList() {
return java.util.Collections.unmodifiableList(devices_);
}
/**
* <pre>
* If not specified, all devices will be included in this exclusion.
* Otherwise, only the specified targeted devices will be included in this
* exclusion.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.DeviceEnum.Device devices = 9;</code>
* @param index The index of the value to return.
* @return The enum numeric value on the wire of devices at the given index.
*/
public int getDevicesValue(int index) {
return devices_.get(index);
}
/**
* <pre>
* If not specified, all devices will be included in this exclusion.
* Otherwise, only the specified targeted devices will be included in this
* exclusion.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.DeviceEnum.Device devices = 9;</code>
* @param index The index of the value to return.
* @return The enum numeric value on the wire of devices at the given index.
* @return This builder for chaining.
*/
public Builder setDevicesValue(
int index, int value) {
ensureDevicesIsMutable();
devices_.set(index, value);
onChanged();
return this;
}
/**
* <pre>
* If not specified, all devices will be included in this exclusion.
* Otherwise, only the specified targeted devices will be included in this
* exclusion.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.DeviceEnum.Device devices = 9;</code>
* @param value The enum numeric value on the wire for devices to add.
* @return This builder for chaining.
*/
public Builder addDevicesValue(int value) {
ensureDevicesIsMutable();
devices_.add(value);
onChanged();
return this;
}
/**
* <pre>
* If not specified, all devices will be included in this exclusion.
* Otherwise, only the specified targeted devices will be included in this
* exclusion.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.DeviceEnum.Device devices = 9;</code>
* @param values The enum numeric values on the wire for devices to add.
* @return This builder for chaining.
*/
public Builder addAllDevicesValue(
java.lang.Iterable<java.lang.Integer> values) {
ensureDevicesIsMutable();
for (int value : values) {
devices_.add(value);
}
onChanged();
return this;
}
private com.google.protobuf.LazyStringList campaigns_ = com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureCampaignsIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
campaigns_ = new com.google.protobuf.LazyStringArrayList(campaigns_);
bitField0_ |= 0x00000002;
}
}
/**
* <pre>
* The data exclusion will apply to the campaigns listed when the scope of
* this exclusion is CAMPAIGN. The maximum number of campaigns per event is
* 2000.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated string campaigns = 10 [(.google.api.resource_reference) = { ... }</code>
* @return A list containing the campaigns.
*/
public com.google.protobuf.ProtocolStringList
getCampaignsList() {
return campaigns_.getUnmodifiableView();
}
/**
* <pre>
* The data exclusion will apply to the campaigns listed when the scope of
* this exclusion is CAMPAIGN. The maximum number of campaigns per event is
* 2000.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated string campaigns = 10 [(.google.api.resource_reference) = { ... }</code>
* @return The count of campaigns.
*/
public int getCampaignsCount() {
return campaigns_.size();
}
/**
* <pre>
* The data exclusion will apply to the campaigns listed when the scope of
* this exclusion is CAMPAIGN. The maximum number of campaigns per event is
* 2000.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated string campaigns = 10 [(.google.api.resource_reference) = { ... }</code>
* @param index The index of the element to return.
* @return The campaigns at the given index.
*/
public java.lang.String getCampaigns(int index) {
return campaigns_.get(index);
}
/**
* <pre>
* The data exclusion will apply to the campaigns listed when the scope of
* this exclusion is CAMPAIGN. The maximum number of campaigns per event is
* 2000.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated string campaigns = 10 [(.google.api.resource_reference) = { ... }</code>
* @param index The index of the value to return.
* @return The bytes of the campaigns at the given index.
*/
public com.google.protobuf.ByteString
getCampaignsBytes(int index) {
return campaigns_.getByteString(index);
}
/**
* <pre>
* The data exclusion will apply to the campaigns listed when the scope of
* this exclusion is CAMPAIGN. The maximum number of campaigns per event is
* 2000.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated string campaigns = 10 [(.google.api.resource_reference) = { ... }</code>
* @param index The index to set the value at.
* @param value The campaigns to set.
* @return This builder for chaining.
*/
public Builder setCampaigns(
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureCampaignsIsMutable();
campaigns_.set(index, value);
onChanged();
return this;
}
/**
* <pre>
* The data exclusion will apply to the campaigns listed when the scope of
* this exclusion is CAMPAIGN. The maximum number of campaigns per event is
* 2000.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated string campaigns = 10 [(.google.api.resource_reference) = { ... }</code>
* @param value The campaigns to add.
* @return This builder for chaining.
*/
public Builder addCampaigns(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureCampaignsIsMutable();
campaigns_.add(value);
onChanged();
return this;
}
/**
* <pre>
* The data exclusion will apply to the campaigns listed when the scope of
* this exclusion is CAMPAIGN. The maximum number of campaigns per event is
* 2000.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated string campaigns = 10 [(.google.api.resource_reference) = { ... }</code>
* @param values The campaigns to add.
* @return This builder for chaining.
*/
public Builder addAllCampaigns(
java.lang.Iterable<java.lang.String> values) {
ensureCampaignsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, campaigns_);
onChanged();
return this;
}
/**
* <pre>
* The data exclusion will apply to the campaigns listed when the scope of
* this exclusion is CAMPAIGN. The maximum number of campaigns per event is
* 2000.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated string campaigns = 10 [(.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearCampaigns() {
campaigns_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* The data exclusion will apply to the campaigns listed when the scope of
* this exclusion is CAMPAIGN. The maximum number of campaigns per event is
* 2000.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated string campaigns = 10 [(.google.api.resource_reference) = { ... }</code>
* @param value The bytes of the campaigns to add.
* @return This builder for chaining.
*/
public Builder addCampaignsBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureCampaignsIsMutable();
campaigns_.add(value);
onChanged();
return this;
}
private java.util.List<java.lang.Integer> advertisingChannelTypes_ =
java.util.Collections.emptyList();
private void ensureAdvertisingChannelTypesIsMutable() {
if (!((bitField0_ & 0x00000004) != 0)) {
advertisingChannelTypes_ = new java.util.ArrayList<java.lang.Integer>(advertisingChannelTypes_);
bitField0_ |= 0x00000004;
}
}
/**
* <pre>
* The data_exclusion will apply to all the campaigns under the listed
* channels retroactively as well as going forward when the scope of this
* exclusion is CHANNEL.
* The supported advertising channel types are DISPLAY, SEARCH and SHOPPING.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType advertising_channel_types = 11;</code>
* @return A list containing the advertisingChannelTypes.
*/
public java.util.List<com.google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType> getAdvertisingChannelTypesList() {
return new com.google.protobuf.Internal.ListAdapter<
java.lang.Integer, com.google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType>(advertisingChannelTypes_, advertisingChannelTypes_converter_);
}
/**
* <pre>
* The data_exclusion will apply to all the campaigns under the listed
* channels retroactively as well as going forward when the scope of this
* exclusion is CHANNEL.
* The supported advertising channel types are DISPLAY, SEARCH and SHOPPING.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType advertising_channel_types = 11;</code>
* @return The count of advertisingChannelTypes.
*/
public int getAdvertisingChannelTypesCount() {
return advertisingChannelTypes_.size();
}
/**
* <pre>
* The data_exclusion will apply to all the campaigns under the listed
* channels retroactively as well as going forward when the scope of this
* exclusion is CHANNEL.
* The supported advertising channel types are DISPLAY, SEARCH and SHOPPING.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType advertising_channel_types = 11;</code>
* @param index The index of the element to return.
* @return The advertisingChannelTypes at the given index.
*/
public com.google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType getAdvertisingChannelTypes(int index) {
return advertisingChannelTypes_converter_.convert(advertisingChannelTypes_.get(index));
}
/**
* <pre>
* The data_exclusion will apply to all the campaigns under the listed
* channels retroactively as well as going forward when the scope of this
* exclusion is CHANNEL.
* The supported advertising channel types are DISPLAY, SEARCH and SHOPPING.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType advertising_channel_types = 11;</code>
* @param index The index to set the value at.
* @param value The advertisingChannelTypes to set.
* @return This builder for chaining.
*/
public Builder setAdvertisingChannelTypes(
int index, com.google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType value) {
if (value == null) {
throw new NullPointerException();
}
ensureAdvertisingChannelTypesIsMutable();
advertisingChannelTypes_.set(index, value.getNumber());
onChanged();
return this;
}
/**
* <pre>
* The data_exclusion will apply to all the campaigns under the listed
* channels retroactively as well as going forward when the scope of this
* exclusion is CHANNEL.
* The supported advertising channel types are DISPLAY, SEARCH and SHOPPING.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType advertising_channel_types = 11;</code>
* @param value The advertisingChannelTypes to add.
* @return This builder for chaining.
*/
public Builder addAdvertisingChannelTypes(com.google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType value) {
if (value == null) {
throw new NullPointerException();
}
ensureAdvertisingChannelTypesIsMutable();
advertisingChannelTypes_.add(value.getNumber());
onChanged();
return this;
}
/**
* <pre>
* The data_exclusion will apply to all the campaigns under the listed
* channels retroactively as well as going forward when the scope of this
* exclusion is CHANNEL.
* The supported advertising channel types are DISPLAY, SEARCH and SHOPPING.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType advertising_channel_types = 11;</code>
* @param values The advertisingChannelTypes to add.
* @return This builder for chaining.
*/
public Builder addAllAdvertisingChannelTypes(
java.lang.Iterable<? extends com.google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType> values) {
ensureAdvertisingChannelTypesIsMutable();
for (com.google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType value : values) {
advertisingChannelTypes_.add(value.getNumber());
}
onChanged();
return this;
}
/**
* <pre>
* The data_exclusion will apply to all the campaigns under the listed
* channels retroactively as well as going forward when the scope of this
* exclusion is CHANNEL.
* The supported advertising channel types are DISPLAY, SEARCH and SHOPPING.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType advertising_channel_types = 11;</code>
* @return This builder for chaining.
*/
public Builder clearAdvertisingChannelTypes() {
advertisingChannelTypes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* <pre>
* The data_exclusion will apply to all the campaigns under the listed
* channels retroactively as well as going forward when the scope of this
* exclusion is CHANNEL.
* The supported advertising channel types are DISPLAY, SEARCH and SHOPPING.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType advertising_channel_types = 11;</code>
* @return A list containing the enum numeric values on the wire for advertisingChannelTypes.
*/
public java.util.List<java.lang.Integer>
getAdvertisingChannelTypesValueList() {
return java.util.Collections.unmodifiableList(advertisingChannelTypes_);
}
/**
* <pre>
* The data_exclusion will apply to all the campaigns under the listed
* channels retroactively as well as going forward when the scope of this
* exclusion is CHANNEL.
* The supported advertising channel types are DISPLAY, SEARCH and SHOPPING.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType advertising_channel_types = 11;</code>
* @param index The index of the value to return.
* @return The enum numeric value on the wire of advertisingChannelTypes at the given index.
*/
public int getAdvertisingChannelTypesValue(int index) {
return advertisingChannelTypes_.get(index);
}
/**
* <pre>
* The data_exclusion will apply to all the campaigns under the listed
* channels retroactively as well as going forward when the scope of this
* exclusion is CHANNEL.
* The supported advertising channel types are DISPLAY, SEARCH and SHOPPING.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType advertising_channel_types = 11;</code>
* @param index The index of the value to return.
* @return The enum numeric value on the wire of advertisingChannelTypes at the given index.
* @return This builder for chaining.
*/
public Builder setAdvertisingChannelTypesValue(
int index, int value) {
ensureAdvertisingChannelTypesIsMutable();
advertisingChannelTypes_.set(index, value);
onChanged();
return this;
}
/**
* <pre>
* The data_exclusion will apply to all the campaigns under the listed
* channels retroactively as well as going forward when the scope of this
* exclusion is CHANNEL.
* The supported advertising channel types are DISPLAY, SEARCH and SHOPPING.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType advertising_channel_types = 11;</code>
* @param value The enum numeric value on the wire for advertisingChannelTypes to add.
* @return This builder for chaining.
*/
public Builder addAdvertisingChannelTypesValue(int value) {
ensureAdvertisingChannelTypesIsMutable();
advertisingChannelTypes_.add(value);
onChanged();
return this;
}
/**
* <pre>
* The data_exclusion will apply to all the campaigns under the listed
* channels retroactively as well as going forward when the scope of this
* exclusion is CHANNEL.
* The supported advertising channel types are DISPLAY, SEARCH and SHOPPING.
* Note: a data exclusion with both advertising_channel_types and
* campaign_ids is not supported.
* </pre>
*
* <code>repeated .google.ads.googleads.v8.enums.AdvertisingChannelTypeEnum.AdvertisingChannelType advertising_channel_types = 11;</code>
* @param values The enum numeric values on the wire for advertisingChannelTypes to add.
* @return This builder for chaining.
*/
public Builder addAllAdvertisingChannelTypesValue(
java.lang.Iterable<java.lang.Integer> values) {
ensureAdvertisingChannelTypesIsMutable();
for (int value : values) {
advertisingChannelTypes_.add(value);
}
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v8.resources.BiddingDataExclusion)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v8.resources.BiddingDataExclusion)
private static final com.google.ads.googleads.v8.resources.BiddingDataExclusion DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v8.resources.BiddingDataExclusion();
}
public static com.google.ads.googleads.v8.resources.BiddingDataExclusion getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BiddingDataExclusion>
PARSER = new com.google.protobuf.AbstractParser<BiddingDataExclusion>() {
@java.lang.Override
public BiddingDataExclusion parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new BiddingDataExclusion(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<BiddingDataExclusion> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BiddingDataExclusion> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v8.resources.BiddingDataExclusion getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| apache-2.0 |
Vertabelo/mobiorm-demo-android | app/src/main/java/com/vertabelo/mobileorm/myplaces/orm/runtime/query/RightOperatorLExp.java | 1447 | package com.vertabelo.mobileorm.myplaces.orm.runtime.query;
import java.util.List;
/**
* Represents logical expression created from another logical expression and logical operator
*/
public class RightOperatorLExp extends LExp {
private final Expression exp;
private final String operator;
public RightOperatorLExp(Expression exp, String operator) {
this.exp = exp;
this.operator = operator;
}
@Override
public void build(StringBuilder sb, List<Value> objects) {
exp.build(sb, objects);
sb.append(operator);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
RightOperatorLExp that = (RightOperatorLExp) o;
if (exp != null ? !exp.equals(that.exp) : that.exp != null) {
return false;
}
return !(operator != null ? !operator.equals(that.operator) : that.operator != null);
}
@Override
public int hashCode() {
int result = exp != null ? exp.hashCode() : 0;
result = 31 * result + (operator != null ? operator.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "RightOperatorLExp{"
+ "exp=" + exp
+ ", operator='" + operator + '\''
+ '}';
}
}
| apache-2.0 |
tfisher1226/ARIES | aries/ariel-compiler/src/main/java/org/aries/ast/AriesASTContext.java | 1175 | package org.aries.ast;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import nam.model.Operation;
import nam.model.Result;
import org.aries.ast.node.ImportNode;
import org.aries.ast.node.MessageNode;
import org.aries.ast.node.MethodNode;
import org.aries.ast.node.NetworkNode;
import org.aries.ast.node.ParticipantNode;
import org.aries.ast.node.ReceiveNode;
public class AriesASTContext {
public static List<ImportNode> importNodes;
public static List<NetworkNode> networkNodes;
public static NetworkNode networkNode;
public static ParticipantNode participantNode;
public static ReceiveNode receiveNode;
public static MethodNode methodNode;
public static MessageNode messageNode;
public static Operation operation;
private static Map<String, Result> activeVariablesInScope;
public static void clearActiveVariablesInScope() {
activeVariablesInScope = new HashMap<String, Result>();
}
public static Result getActiveVariableInScope(String name) {
return activeVariablesInScope.get(name);
}
public static void addActiveVariableInScope(Result result) {
activeVariablesInScope.put(result.getName(), result);
}
}
| apache-2.0 |
3dcitydb/importer-exporter | impexp-client-gui/src/main/java/org/citydb/gui/operation/preferences/preferences/RootPreferencesEntry.java | 1472 | /*
* 3D City Database - The Open Source CityGML Database
* https://www.3dcitydb.org/
*
* Copyright 2013 - 2021
* Chair of Geoinformatics
* Technical University of Munich, Germany
* https://www.lrg.tum.de/gis/
*
* The 3D City Database is jointly developed with the following
* cooperation partners:
*
* Virtual City Systems, Berlin <https://vc.systems/>
* M.O.S.S. Computer Grafik Systeme GmbH, Taufkirchen <http://www.moss.de/>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.citydb.gui.operation.preferences.preferences;
import org.citydb.config.i18n.Language;
import org.citydb.gui.operation.common.DefaultPreferencesEntry;
import org.citydb.gui.operation.common.NullComponent;
public class RootPreferencesEntry extends DefaultPreferencesEntry {
public RootPreferencesEntry() {
super(NullComponent.getInstance());
}
@Override
public String getLocalizedTitle() {
return Language.I18N.getString("pref.tree.root");
}
}
| apache-2.0 |
google/graphicsfuzz | generator/src/main/java/com/graphicsfuzz/generator/semanticspreserving/StructificationMutationFinder.java | 4466 | /*
* Copyright 2018 The GraphicsFuzz Project Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphicsfuzz.generator.semanticspreserving;
import com.graphicsfuzz.common.ast.TranslationUnit;
import com.graphicsfuzz.common.ast.stmt.DeclarationStmt;
import com.graphicsfuzz.common.ast.stmt.ForStmt;
import com.graphicsfuzz.common.ast.type.BasicType;
import com.graphicsfuzz.common.ast.type.QualifiedType;
import com.graphicsfuzz.common.ast.type.StructNameType;
import com.graphicsfuzz.common.ast.type.Type;
import com.graphicsfuzz.common.ast.visitors.StandardVisitor;
import com.graphicsfuzz.common.util.IRandom;
import com.graphicsfuzz.common.util.IdGenerator;
import com.graphicsfuzz.generator.mutateapi.MutationFinderBase;
import com.graphicsfuzz.generator.util.GenerationParams;
import com.graphicsfuzz.util.Constants;
import java.util.HashSet;
import java.util.Set;
public class StructificationMutationFinder extends MutationFinderBase<StructificationMutation> {
private final IdGenerator idGenerator;
private final IRandom random;
private final GenerationParams generationParams;
public StructificationMutationFinder(TranslationUnit tu,
IRandom random,
GenerationParams generationParams) {
super(tu);
this.idGenerator = new IdGenerator(getIdsAlreadyUsedForStructification());
this.random = random;
this.generationParams = generationParams;
}
/**
* Looks through the translation unit for all ids that have already been used for naming in
* structification, and yields the set of all such ids.
* @return The set of all ids that have been used in structification.
*/
private Set<Integer> getIdsAlreadyUsedForStructification() {
final Set<Integer> result = new HashSet<>();
new StandardVisitor() {
@Override
public void visitStructNameType(StructNameType structNameType) {
super.visitStructNameType(structNameType);
if (structNameType.getName().startsWith(Constants.STRUCTIFICATION_STRUCT_PREFIX)) {
result.add(new Integer(
StructificationMutation.getIdFromGeneratedStructName(structNameType)));
}
}
}.visit(getTranslationUnit());
return result;
}
@Override
public void visitDeclarationStmt(DeclarationStmt declarationStmt) {
super.visitDeclarationStmt(declarationStmt);
// Currently we only structify solo declarations
if (declarationStmt.getVariablesDeclaration().getNumDecls() != 1) {
return;
}
// We don't currently structify arrays
if (declarationStmt.getVariablesDeclaration().getDeclInfo(0).hasArrayInfo()) {
return;
}
final Type baseType = declarationStmt.getVariablesDeclaration().getBaseType();
if (hasQualifiers(baseType)) {
return;
}
// TODO: For simplicity, at present we do not structify non-basic types. The issue is that
// if a struct S is to be structified, we need to declare the structs that enclose S *after*
// S is declared, which is a bit fiddly (currently they all go at the top of the translation
// unit).
if (!(baseType.getWithoutQualifiers() instanceof BasicType)) {
return;
}
addMutation(new StructificationMutation(declarationStmt, currentBlock(),
getTranslationUnit(), idGenerator, random, generationParams));
}
private boolean hasQualifiers(Type type) {
return type instanceof QualifiedType
&& ((QualifiedType) type).hasQualifiers();
}
@Override
public void visitForStmt(ForStmt forStmt) {
if (!getTranslationUnit().getShadingLanguageVersion().restrictedForLoops()
|| !getTranslationUnit().getShadingLanguageVersion().isWebGl()) {
super.visitForStmt(forStmt);
} else {
// GLSL 1.00 + WebGL does not allow us to structify for loop guards, so we skip the for loop
// header.
visitForStmtBodyOnly(forStmt);
}
}
}
| apache-2.0 |
llarreta/larretasources | Commons/src/main/java/ar/com/larreta/commons/AppState.java | 414 | package ar.com.larreta.commons;
public class AppState extends AppObjectImpl {
private static AppState INSTANCE;
private Integer stateLevel=0;
public static AppState getInstance(){
if (INSTANCE==null){
INSTANCE = new AppState();
}
return INSTANCE;
}
public void advanceLevel(){
stateLevel++;
}
public Boolean isInitializing(){
return stateLevel==0;
}
}
| apache-2.0 |
paulseawa/p4ic4idea | plugin/src/net/groboclown/idea/p4ic/server/exceptions/P4SSLFingerprintException.java | 1548 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.groboclown.idea.p4ic.server.exceptions;
import com.perforce.p4java.exception.ConnectionException;
import com.perforce.p4java.exception.TrustException;
import net.groboclown.idea.p4ic.P4Bundle;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* Indicates that the host server reported a different SSL fingerprint
* than the one declared by either the trust file, or the user.
*/
public class P4SSLFingerprintException extends P4SSLException {
public P4SSLFingerprintException(@Nullable final String serverFingerprint, @NotNull final TrustException e){
super(P4Bundle.message("exception.ssl.fingerprint", serverFingerprint, e.getFingerprint()), e);
}
public P4SSLFingerprintException(@Nullable final String serverFingerprint, @NotNull final ConnectionException e) {
super(P4Bundle.message("exception.ssl.fingerprint", serverFingerprint, e.getMessage()), e);
}
}
| apache-2.0 |
ox-it/cucm-http-api | src/main/java/com/cisco/axl/api/_8/RCcdAdvertisingService.java | 5222 |
package com.cisco.axl.api._8;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for RCcdAdvertisingService complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="RCcdAdvertisingService">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence minOccurs="0">
* <element name="name" type="{http://www.cisco.com/AXL/API/8.0}String50" minOccurs="0"/>
* <element name="description" type="{http://www.cisco.com/AXL/API/8.0}String50" minOccurs="0"/>
* <element name="isActivated" type="{http://www.cisco.com/AXL/API/8.0}boolean" minOccurs="0"/>
* <element name="hostDnGroup" type="{http://www.cisco.com/AXL/API/8.0}XFkType" minOccurs="0"/>
* <element name="safSipTrunk" type="{http://www.cisco.com/AXL/API/8.0}XFkType" minOccurs="0"/>
* <element name="safH323Trunk" type="{http://www.cisco.com/AXL/API/8.0}XFkType" minOccurs="0"/>
* </sequence>
* <attribute name="uuid" type="{http://www.cisco.com/AXL/API/8.0}XUUID" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "RCcdAdvertisingService", propOrder = {
"name",
"description",
"isActivated",
"hostDnGroup",
"safSipTrunk",
"safH323Trunk"
})
public class RCcdAdvertisingService {
protected String name;
protected String description;
protected String isActivated;
protected XFkType hostDnGroup;
protected XFkType safSipTrunk;
protected XFkType safH323Trunk;
@XmlAttribute
protected String uuid;
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getName() {
return name;
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setName(String value) {
this.name = value;
}
/**
* Gets the value of the description property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDescription() {
return description;
}
/**
* Sets the value of the description property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDescription(String value) {
this.description = value;
}
/**
* Gets the value of the isActivated property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getIsActivated() {
return isActivated;
}
/**
* Sets the value of the isActivated property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setIsActivated(String value) {
this.isActivated = value;
}
/**
* Gets the value of the hostDnGroup property.
*
* @return
* possible object is
* {@link XFkType }
*
*/
public XFkType getHostDnGroup() {
return hostDnGroup;
}
/**
* Sets the value of the hostDnGroup property.
*
* @param value
* allowed object is
* {@link XFkType }
*
*/
public void setHostDnGroup(XFkType value) {
this.hostDnGroup = value;
}
/**
* Gets the value of the safSipTrunk property.
*
* @return
* possible object is
* {@link XFkType }
*
*/
public XFkType getSafSipTrunk() {
return safSipTrunk;
}
/**
* Sets the value of the safSipTrunk property.
*
* @param value
* allowed object is
* {@link XFkType }
*
*/
public void setSafSipTrunk(XFkType value) {
this.safSipTrunk = value;
}
/**
* Gets the value of the safH323Trunk property.
*
* @return
* possible object is
* {@link XFkType }
*
*/
public XFkType getSafH323Trunk() {
return safH323Trunk;
}
/**
* Sets the value of the safH323Trunk property.
*
* @param value
* allowed object is
* {@link XFkType }
*
*/
public void setSafH323Trunk(XFkType value) {
this.safH323Trunk = value;
}
/**
* Gets the value of the uuid property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getUuid() {
return uuid;
}
/**
* Sets the value of the uuid property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setUuid(String value) {
this.uuid = value;
}
}
| apache-2.0 |
StevenLeRoux/warp10-platform | warp10/src/main/java/io/warp10/script/functions/PACK.java | 6325 | //
// Copyright 2016 Cityzen Data
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package io.warp10.script.functions;
import io.warp10.continuum.gts.UnsafeString;
import io.warp10.script.NamedWarpScriptFunction;
import io.warp10.script.WarpScriptException;
import io.warp10.script.WarpScriptStack;
import io.warp10.script.WarpScriptStackFunction;
import java.io.ByteArrayOutputStream;
import java.util.ArrayList;
import java.util.List;
/**
* Pack a list of numeric or boolean values according to a specified format
*/
public class PACK extends NamedWarpScriptFunction implements WarpScriptStackFunction {
public PACK(String name) {
super(name);
}
@Override
public Object apply(WarpScriptStack stack) throws WarpScriptException {
Object o = stack.pop();
if (!(o instanceof String)) {
throw new WarpScriptException(getName() + " expects a format string on top of the stack.");
}
String fmt = o.toString();
o = stack.pop();
if (!(o instanceof List)) {
throw new WarpScriptException(getName() + " operates on a list of numeric or boolean values.");
}
List<Object> values = (List<Object>) o;
for (Object value: values) {
if (!(value instanceof Number) && !(value instanceof Boolean)) {
throw new WarpScriptException(getName() + " operates on a list of numeric or boolean values.");
}
}
//
// Parse the format
//
int idx = 0;
List<String> types = new ArrayList<String>();
List<Integer> lengths = new ArrayList<Integer>();
int totalbits = 0;
while(idx < fmt.length()) {
String type = new String(UnsafeString.substring(fmt, idx, idx + 2));
char prefix = fmt.charAt(idx++);
if (idx > fmt.length()) {
throw new WarpScriptException(getName() + " encountered an invalid format specification.");
}
int len = 0;
if ('<' == prefix || '>' == prefix) {
char t = fmt.charAt(idx++);
boolean nolen = false;
if ('L' == t) {
len = 64;
} else if ('D' == t) {
len = 64;
nolen = true;
} else {
throw new WarpScriptException(getName() + " encountered an invalid format specification '" + prefix + t + "'.");
}
// Check if we have a length
if (!nolen && idx < fmt.length()) {
if (fmt.charAt(idx) <= '9' && fmt.charAt(idx) >= '0') {
len = 0;
while (idx < fmt.length() && fmt.charAt(idx) <= '9' && fmt.charAt(idx) >= '0') {
len *= 10;
len += (int) (fmt.charAt(idx++) - '0');
}
}
}
if (len > 64) {
throw new WarpScriptException(getName() + " encountered an invalid length for 'L', max length is 64.");
}
} else if ('S' == prefix || 's' == prefix) {
type = "" + prefix;
if (idx >= fmt.length()) {
throw new WarpScriptException(getName() + " encountered an invalid Skip specification.");
}
if (fmt.charAt(idx) <= '9' && fmt.charAt(idx) >= '0') {
len = 0;
while (idx < fmt.length() && fmt.charAt(idx) <= '9' && fmt.charAt(idx) >= '0') {
len *= 10;
len += (int) (fmt.charAt(idx++) - '0');
}
}
} else if ('B' == prefix) {
type = "" + prefix;
len = 1;
} else {
throw new WarpScriptException(getName() + " encountered an invalid format specification '" + prefix + "'.");
}
types.add(type);
lengths.add(len);
totalbits += len;
}
//
// Now encode the various values
//
ByteArrayOutputStream baos = new ByteArrayOutputStream(((totalbits + 7) / 8));
int nbits = 0;
int vidx = 0;
long curbyte = 0;
for (int i = 0; i < types.size(); i++) {
int len = lengths.get(i);
long value = 0L;
boolean bigendian = true;
if ("s".equals(types.get(i))) {
value = 0L;
bigendian = false;
} else if ("S".equals(types.get(i))) {
value = 0xFFFFFFFFFFFFFFFFL;
bigendian = false;
} else {
Object v = values.get(vidx++);
if (v instanceof Boolean) {
if (Boolean.TRUE.equals(v)) {
v = 1L;
} else {
v = 0L;
}
}
if ("<D".equals(types.get(i))) {
bigendian = false;
value = Double.doubleToRawLongBits(((Number) v).doubleValue());
} else if (">D".equals(types.get(i))) {
bigendian = true;
value = Double.doubleToRawLongBits(((Number) v).doubleValue());
} else if ("<L".equals(types.get(i))) {
bigendian = false;
value = ((Number) v).longValue();
} else if (">L".equals(types.get(i))) {
bigendian = true;
value = ((Number) v).longValue();
} else if ("B".equals(types.get(i))) {
bigendian = false;
value = 0 != ((Number) v).longValue() ? 1L : 0L;
}
}
if (bigendian) {
value = Long.reverse(value);
if (len < 64) {
value >>>= (64 - len);
}
}
for (int k = 0; k < len; k++) {
curbyte <<= 1;
curbyte |= (value & 0x1L);
value >>= 1;
nbits++;
if (0 == nbits % 8) {
baos.write((int) (curbyte & 0xFFL));
curbyte = 0L;
}
}
}
if (0 != nbits % 8) {
curbyte <<= 8 - (nbits % 8);
baos.write((int) (curbyte & 0xFFL));
}
stack.push(baos.toByteArray());
return stack;
}
}
| apache-2.0 |
taboola/amphtml | validator/java/src/main/java/dev/amp/validator/ParsedValidatorRules.java | 37623 | /*
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
/*
* Changes to the original project are Copyright 2019, Verizon Media Inc..
*/
package dev.amp.validator;
import dev.amp.validator.exception.TagValidationException;
import dev.amp.validator.exception.ValidatorException;
import dev.amp.validator.utils.AttributeSpecUtils;
import dev.amp.validator.utils.DispatchKeyUtils;
import dev.amp.validator.utils.TagSpecUtils;
import org.xml.sax.Attributes;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* This wrapper class provides access to the validation rules.
*
* @author nhant01
* @author GeorgeLuo
*/
public class ParsedValidatorRules {
/**
* Constructor.
*
* @param htmlFormat the HTML format.
* @param ampValidatorManager the AMPValidatorManager instance.
*/
public ParsedValidatorRules(@Nonnull final ValidatorProtos.HtmlFormat.Code htmlFormat,
@Nonnull final AMPValidatorManager ampValidatorManager) {
this.ampValidatorManager = ampValidatorManager;
this.htmlFormat = htmlFormat;
this.parsedTagSpecById = new HashMap<>();
this.tagSpecByTagName = new HashMap<>();
this.mandatoryTagSpecs = new ArrayList<>();
this.fullMatchRegexes = new HashMap<>();
this.fullMatchCaseiRegexes = new HashMap<>();
this.partialMatchCaseiRegexes = new HashMap<>();
this.typeIdentifiers = new HashMap<>();
typeIdentifiers.put("\u26a1", 0);
typeIdentifiers.put("\u26a1\ufe0f", 0);
typeIdentifiers.put("amp", 0);
typeIdentifiers.put("\u26a14ads", 0);
typeIdentifiers.put("\u26a1\ufe0f4ads", 0);
typeIdentifiers.put("amp4ads", 0);
typeIdentifiers.put("\u26a14email", 0);
typeIdentifiers.put("\u26a1\ufe0f4email", 0);
typeIdentifiers.put("amp4email", 0);
typeIdentifiers.put("actions", 0);
typeIdentifiers.put("transformed", 0);
typeIdentifiers.put("data-ampdevmode", 0);
expandExtensionSpec();
this.parsedAttrSpecs = new ParsedAttrSpecs(ampValidatorManager);
this.tagSpecIdsToTrack = new HashMap<>();
final int numTags = this.ampValidatorManager.getRules().getTagsList().size();
for (int tagSpecId = 0; tagSpecId < numTags; ++tagSpecId) {
final ValidatorProtos.TagSpec tag = this.ampValidatorManager.getRules().getTags(tagSpecId);
if (!this.isTagSpecCorrectHtmlFormat(tag)) {
continue;
}
if (tag.hasSpecName()) {
tagSpecNameToSpecId.put(tag.getSpecName(), tagSpecId);
}
if (tag.getAlsoRequiresTagWarningList().size() > 0) {
this.tagSpecIdsToTrack.put(tagSpecId, true);
}
for (String otherTag : tag.getAlsoRequiresTagWarningList()) {
this.tagSpecIdsToTrack.put(otherTag, true);
}
if (!tag.getTagName().equals("$REFERENCE_POINT")) {
if (!(tagSpecByTagName.containsKey(tag.getTagName()))) {
this.tagSpecByTagName.put(tag.getTagName(), new TagSpecDispatch());
}
final TagSpecDispatch tagnameDispatch = this.tagSpecByTagName.get(tag.getTagName());
if (tag.hasExtensionSpec()) {
// This tag is an extension. Compute and register a dispatch key
// for it.
String dispatchKey = DispatchKeyUtils.makeDispatchKey(
ValidatorProtos.AttrSpec.DispatchKeyType.NAME_VALUE_DISPATCH,
AttributeSpecUtils.getExtensionNameAttribute(tag.getExtensionSpec()),
tag.getExtensionSpec().getName(), "");
tagnameDispatch.registerDispatchKey(dispatchKey, tagSpecId);
} else {
String dispatchKey = this.ampValidatorManager.getDispatchKeyByTagSpecId(tagSpecId);
if (dispatchKey == null) {
tagnameDispatch.registerTagSpec(tagSpecId);
} else {
tagnameDispatch.registerDispatchKey(dispatchKey, tagSpecId);
}
}
}
if (tag.hasMandatory()) {
this.mandatoryTagSpecs.add(tagSpecId);
}
}
this.errorCodes = new HashMap<>();
for (int i = 0; i < this.ampValidatorManager.getRules().getErrorFormatsList().size(); ++i) {
final ValidatorProtos.ErrorFormat errorFormat =
this.ampValidatorManager.getRules().getErrorFormats(i);
if (errorFormat != null) {
ErrorCodeMetadata errorCodeMetadata = new ErrorCodeMetadata();
errorCodeMetadata.setFormat(errorFormat.getFormat());
errorCodes.put(errorFormat.getCode(), errorCodeMetadata);
}
}
for (int i = 0; i < this.ampValidatorManager.getRules().getErrorSpecificityList().size(); ++i) {
final ValidatorProtos.ErrorSpecificity errorSpecificity =
this.ampValidatorManager.getRules().getErrorSpecificity(i);
if (errorSpecificity != null) {
ErrorCodeMetadata errorCodeMetadata = errorCodes.get(errorSpecificity.getCode());
if (errorCodeMetadata != null) {
errorCodeMetadata.setSpecificity(errorSpecificity.getSpecificity());
}
}
}
}
/**
* TODO - verify ALL regex getXXX() to ensure proper implementation
*
* @param regex the regex.
* @return returns the full match regex pattern.
*/
public Pattern getFullMatchRegex(@Nonnull final String regex) {
String regexEscape = regex.replace("{", "\\{");
for (String fullMatchRegex : fullMatchRegexes.keySet()) {
if (fullMatchRegex.equals(regex)) {
return fullMatchRegexes.get(regexEscape);
}
}
String fullMatchRegex = "^(" + regexEscape + ")$";
Pattern pattern = Pattern.compile(fullMatchRegex);
fullMatchRegexes.put(regexEscape, pattern);
return pattern;
}
/**
* @param caseiRegex case insensitive regex.
* @return returns the full match case insensitive regex pattern.
*/
public Pattern getFullMatchCaseiRegex(@Nonnull final String caseiRegex) {
String caseiRegexEscape = caseiRegex.replace("{", "\\{");
for (String fullMatchRegex : fullMatchCaseiRegexes.keySet()) {
if (fullMatchRegex.equals(caseiRegexEscape)) {
return fullMatchCaseiRegexes.get(caseiRegexEscape);
}
}
Pattern pattern = Pattern.compile("^(" + caseiRegexEscape + ")$");
this.fullMatchCaseiRegexes.put(caseiRegexEscape, pattern);
return pattern;
}
/**
* Returns the partial match case insensitive match regex pattern.
*
* @param caseiRegex the regex.
* @return returns the partial match case insensitive match regex pattern.
*/
public Pattern getPartialMatchCaseiRegex(@Nonnull final String caseiRegex) {
final String caseiRegexEscape = caseiRegex.replace("{", "\\{");
for (String fullMatchRegex : partialMatchCaseiRegexes.keySet()) {
if (fullMatchRegex.equals(caseiRegexEscape)) {
return partialMatchCaseiRegexes.get(caseiRegexEscape);
}
}
Pattern pattern = Pattern.compile(caseiRegexEscape);
partialMatchCaseiRegexes.put(caseiRegexEscape, pattern);
return pattern;
}
/**
* Computes the name for a given reference point.
* Used in generating error strings.
*
* @param referencePoint the reference point.
* @return returns the compute name for a given reference point.
* @throws TagValidationException the TagValidationException.
*/
public String getReferencePointName(@Nonnull final ValidatorProtos.ReferencePoint referencePoint)
throws TagValidationException {
// tagSpecName here is actually a number, which was replaced in
// validator_gen_js.py from the name string, so this works.
final int tagSpecId =
ampValidatorManager.getTagSpecIdByReferencePointTagSpecName(referencePoint.getTagSpecName());
final ParsedTagSpec refPointSpec = this.getByTagSpecId(tagSpecId);
return TagSpecUtils.getTagSpecName(refPointSpec.getSpec());
}
/**
* Return the ParsedTagSpec given the reference point spec name.
*
* @param specName the spec name.
* @return return the ParsedTagSpec given the reference point spec name.
* @throws TagValidationException the TagValidationException.
*/
public ParsedTagSpec getByTagSpecId(final String specName) throws TagValidationException {
int tagSpecId = this.ampValidatorManager.getTagSpecIdByReferencePointTagSpecName(specName);
return getByTagSpecId(tagSpecId);
}
/**
* Returns the spec id by spec name.
*
* @param specName the spec name.
* @return returns the spec id if exists.
*/
public Integer getTagSpecIdBySpecName(@Nonnull final String specName) {
return tagSpecNameToSpecId.get(specName);
}
/**
* Returns the ParsedTagSpec given the tag spec id.
*
* @param id tag spec id.
* @return returns the ParsedTagSpec.
* @throws TagValidationException the TagValidationException.
*/
public ParsedTagSpec getByTagSpecId(final int id) throws TagValidationException {
ParsedTagSpec parsed = this.parsedTagSpecById.get(id);
if (parsed != null) {
return parsed;
}
ValidatorProtos.TagSpec tag = this.ampValidatorManager.getRules().getTags(id);
if (tag == null) {
throw new TagValidationException("TagSpec is null for tag spec id " + id);
}
parsed = new ParsedTagSpec(
this.parsedAttrSpecs,
TagSpecUtils.shouldRecordTagspecValidated(tag, id, this.tagSpecIdsToTrack), tag,
id);
this.parsedTagSpecById.put(id, parsed);
return parsed;
}
/**
* Returns the tag spec id by reference point tag spec name.
*
* @param tagName the reference point tag name.
* @return returns the tag spec id by reference point tag spec name.
* @throws TagValidationException the TagValidationException.
*/
public int getTagSpecIdByReferencePointTagSpecName(@Nonnull final String tagName) throws TagValidationException {
return this.ampValidatorManager.getTagSpecIdByReferencePointTagSpecName(tagName);
}
/**
* Returns true iff resultA is a better result than resultB.
*
* @param resultA a validation result.
* @param resultB a validation result.
* @return returns true iff resultA is a better result than resultB.
* @throws ValidatorException the ValidatorException.
*/
public boolean betterValidationResultThan(@Nonnull final ValidatorProtos.ValidationResult.Builder resultA,
@Nonnull final ValidatorProtos.ValidationResult.Builder resultB)
throws ValidatorException {
if (resultA.getStatus() != resultB.getStatus()) {
return this.betterValidationStatusThan(resultA.getStatus(), resultB.getStatus());
}
// If one of the error sets by error.code is a subset of the other
// error set's error.codes, use the subset one. It's essentially saying, if
// you fix these errors that we both complain about, then you'd be passing
// for my tagspec, but not the other one, regardless of specificity.
if (this.isErrorSubset(resultB.getErrorsList(), resultA.getErrorsList())) {
return true;
}
if (this.isErrorSubset(resultA.getErrorsList(), resultB.getErrorsList())) {
return false;
}
// Prefer the most specific error found in either set.
if (this.maxSpecificity(resultA.getErrorsList())
> this.maxSpecificity(resultB.getErrorsList())) {
return true;
}
if (this.maxSpecificity(resultB.getErrorsList())
> this.maxSpecificity(resultA.getErrorsList())) {
return false;
}
// Prefer the attempt with the fewest errors if the most specific errors
// are the same.
if (resultA.getErrorsCount() < resultB.getErrorsCount()) {
return true;
}
if (resultB.getErrorsCount() < resultA.getErrorsCount()) {
return false;
}
// Equal, so not better than.
return false;
}
/**
* Checks if maybeTypeIdentifier is contained in rules' typeIdentifiers.
*
* @param maybeTypeIdentifier identifier to check
* @return true iff maybeTypeIdentifier is in typeIdentifiers.
*/
public boolean isTypeIdentifier(@Nonnull final String maybeTypeIdentifier) {
return this.typeIdentifiers.containsKey(maybeTypeIdentifier);
}
/**
* Validates type identifiers within a set of attributes, adding
* ValidationErrors as necessary, and sets type identifiers on
* ValidationResult.typeIdentifier.
*
* @param attrs sax Attributes object from tag.
* @param formatIdentifiers html formats
* @param context global context of document validation
* @param validationResult status of document validation
*/
public void validateTypeIdentifiers(@Nonnull final Attributes attrs,
@Nonnull final List<String> formatIdentifiers, @Nonnull final Context context,
@Nonnull final ValidatorProtos.ValidationResult.Builder validationResult) {
boolean hasMandatoryTypeIdentifier = false;
for (int i = 0; i < attrs.getLength(); i++) {
// Verify this attribute is a type identifier. Other attributes are
// validated in validateAttributes.
if (this.isTypeIdentifier(attrs.getLocalName(i))) {
// Verify this type identifier is allowed for this format.
if (formatIdentifiers.contains(attrs.getLocalName(i))) {
// Only add the type identifier once per representation. That is, both
// "⚡" and "amp", which represent the same type identifier.
final String typeIdentifier = attrs.getLocalName(i).replace("\u26a1\ufe0f", "amp")
.replace("\u26a1", "amp");
if (!validationResult.getTypeIdentifierList().contains(typeIdentifier)) {
validationResult.addTypeIdentifier(typeIdentifier);
context.recordTypeIdentifier(typeIdentifier);
}
// The type identifier "actions" and "transformed" are not considered
// mandatory unlike other type identifiers.
if (!typeIdentifier.equals("actions")
&& !typeIdentifier.equals("transformed")
&& !typeIdentifier.equals("data-ampdevmode")) {
hasMandatoryTypeIdentifier = true;
}
// The type identifier "transformed" has restrictions on it's value.
// It must be \w+;v=\d+ (e.g. google;v=1).
if ((typeIdentifier.equals("transformed") && !(attrs.getValue(i).equals("")))) {
Matcher reResult = TRANSFORMED_VALUE_REGEX.matcher(attrs.getValue(i));
if (reResult.matches()) {
validationResult.setTransformerVersion(Integer.parseInt(reResult.group(1)));
} else {
final List<String> params = new ArrayList<>();
params.add(attrs.getLocalName(i));
params.add("html");
params.add(attrs.getValue(i));
context.addError(
ValidatorProtos.ValidationError.Code.INVALID_ATTR_VALUE,
context.getLineCol(),
/*params=*/params,
"https://amp.dev/documentation/guides-and-tutorials/learn/spec/amphtml#required-markup",
validationResult);
}
}
if (typeIdentifier.equals("data-ampdevmode")) {
// https://github.com/ampproject/amphtml/issues/20974
// We always emit an error for this type identifier, but it
// suppresses other errors later in the document.
context.addError(
ValidatorProtos.ValidationError.Code.DEV_MODE_ONLY,
context.getLineCol(), /*params=*/new ArrayList<>(), /*url*/ "",
validationResult);
}
} else {
final List<String> params = new ArrayList<>();
params.add(attrs.getLocalName(i));
params.add("html");
context.addError(
ValidatorProtos.ValidationError.Code.DISALLOWED_ATTR,
context.getLineCol(), /*params=*/params,
"https://amp.dev/documentation/guides-and-tutorials/learn/spec/amphtml#required-markup",
validationResult);
}
}
}
if (!hasMandatoryTypeIdentifier) {
// Missing mandatory type identifier (any AMP variant but "actions" or
// "transformed").
final List<String> params = new ArrayList<>();
params.add(formatIdentifiers.get(0));
params.add("html");
context.addError(
ValidatorProtos.ValidationError.Code.MANDATORY_ATTR_MISSING,
context.getLineCol(), /*params=*/params,
"https://amp.dev/documentation/guides-and-tutorials/learn/spec/amphtml#required-markup",
validationResult);
}
}
/**
* Validates the HTML tag for type identifiers.
*
* @param htmlTag the html tag to validate.
* @param context global context of document validation
* @param validationResult status of document validation
*/
public void validateHtmlTag(@Nonnull final ParsedHtmlTag htmlTag,
@Nonnull final Context context,
@Nonnull final ValidatorProtos.ValidationResult.Builder validationResult) {
switch (this.htmlFormat) {
case AMP:
this.validateTypeIdentifiers(
htmlTag.attrs(), TagSpecUtils.AMP_IDENTIFIERS, context, validationResult);
break;
case AMP4ADS:
this.validateTypeIdentifiers(
htmlTag.attrs(), TagSpecUtils.AMP4ADS_IDENTIFIERS, context, validationResult);
break;
case AMP4EMAIL:
this.validateTypeIdentifiers(
htmlTag.attrs(), TagSpecUtils.AMP4EMAIL_IDENTIFIERS, context, validationResult);
break;
case ACTIONS:
this.validateTypeIdentifiers(
htmlTag.attrs(), TagSpecUtils.ACTIONS_IDENTIFIERS, context, validationResult);
if (!validationResult.getTypeIdentifierList().contains("actions")) {
final List<String> params = new ArrayList<>();
params.add("actions");
params.add("html");
context.addError(
ValidatorProtos.ValidationError.Code.MANDATORY_ATTR_MISSING,
context.getLineCol(), /* params */params,
/* url */"", validationResult);
}
break;
default:
// fallthrough
}
}
/**
* Returns the error code specificity.
*
* @param errorCode the validation error code.
* @return returns the error code specificity.
*/
public int specificity(@Nonnull final ValidatorProtos.ValidationError.Code errorCode) {
return this.errorCodes.get(errorCode).getSpecificity();
}
/**
* A helper function which allows us to compare two candidate results
* in validateTag to report the results which have the most specific errors.
*
* @param errors a list of validation errors.
* @return returns maximum value of specificity found in all errors.
* @throws ValidatorException the TagValidationException.
*/
public int maxSpecificity(@Nonnull final List<ValidatorProtos.ValidationError> errors) throws ValidatorException {
int max = 0;
for (final ValidatorProtos.ValidationError error : errors) {
if (error.getCode() == null) {
throw new ValidatorException("Validation error code is null");
}
max = Math.max(this.specificity(error.getCode()), max);
}
return max;
}
/**
* Returns true iff the error codes in errorsB are a subset of the error
* codes in errorsA.
*
* @param errorsA a list of validation errors.
* @param errorsB a list of validation errors.
* @return returns true iff the error codes in errorsB are a subset of the error
* codes in errorsA.
*/
public boolean isErrorSubset(@Nonnull final List<ValidatorProtos.ValidationError> errorsA,
@Nonnull final List<ValidatorProtos.ValidationError> errorsB) {
Map<ValidatorProtos.ValidationError.Code, Integer> codesA = new HashMap<>();
for (final ValidatorProtos.ValidationError error : errorsA) {
codesA.put(error.getCode(), 1);
}
Map<ValidatorProtos.ValidationError.Code, Integer> codesB = new HashMap<>();
for (final ValidatorProtos.ValidationError error : errorsB) {
codesB.put(error.getCode(), 1);
if (!codesA.containsKey(error.getCode())) {
return false;
}
}
// Every code in B is also in A. If they are the same, not a subset.
return codesA.size() > codesB.size();
}
/**
* Returns true iff statusA is a better status than statusB.
*
* @param statusA validation result status.
* @param statusB validation result status.
* @return returns true iff statusA is a better status than statusB.
* @throws ValidatorException the ValidatorException.
*/
public boolean betterValidationStatusThan(@Nonnull final ValidatorProtos.ValidationResult.Status statusA,
@Nonnull final ValidatorProtos.ValidationResult.Status statusB)
throws ValidatorException {
// Equal, so not better than.
if (statusA == statusB) {
return false;
}
// PASS > FAIL > UNKNOWN
if (statusA == ValidatorProtos.ValidationResult.Status.PASS) {
return true;
}
if (statusB == ValidatorProtos.ValidationResult.Status.PASS) {
return false;
}
if (statusA == ValidatorProtos.ValidationResult.Status.FAIL) {
return true;
}
if (statusA == ValidatorProtos.ValidationResult.Status.UNKNOWN) {
throw new ValidatorException("Status unknown");
}
return false;
}
/**
* Returns a TagSpecDispatch for a give tag name.
*
* @param tagName the tag name.
* @return returns a TagSpecDispatch if found.
*/
public TagSpecDispatch dispatchForTagName(@Nonnull final String tagName) {
return this.tagSpecByTagName.get(tagName);
}
/**
* Returns a styles spec url.
*
* @return returns a styles spec url.
*/
public String getStylesSpecUrl() {
return this.ampValidatorManager.getRules().getStylesSpecUrl();
}
/**
* Returns a template spec url.
*
* @return returns a template spec url.
*/
public String getTemplateSpecUrl() {
return this.ampValidatorManager.getRules().getTemplateSpecUrl();
}
/**
* Returns the script spec url.
*
* @return returns the script spec url.
*/
public String getScriptSpecUrl() {
return this.ampValidatorManager.getRules().getScriptSpecUrl();
}
/**
* Returns the list of Css length spec.
*
* @return returns the list of Css length spec.
*/
public List<ValidatorProtos.CssLengthSpec> getCssLengthSpec() {
return this.ampValidatorManager.getRules().getCssLengthSpecList();
}
/**
* Returns the descendant tag lists.
*
* @return returns the descendant tag lists.
*/
public List<ValidatorProtos.DescendantTagList> getDescendantTagLists() {
return ampValidatorManager.getDescendantTagLists();
}
/**
* Returns a combined black listed regex.
*
* @param tagSpecId tag spec id.
* @return returns a combined black listed regex.
*/
public String getCombinedBlacklistedCdataRegex(final int tagSpecId) {
return ampValidatorManager.getCombinedBlacklistedCdataRegex(tagSpecId);
}
/**
* Emits any validation errors which require a global view
* (mandatory tags, tags required by other tags, mandatory alternatives).
*
* @param context the Context.
* @param validationResult the ValidationResult.
* @throws TagValidationException the TagValidationException.
*/
public void maybeEmitGlobalTagValidationErrors(@Nonnull final Context context,
@Nonnull final ValidatorProtos.ValidationResult.Builder validationResult)
throws TagValidationException {
this.maybeEmitMandatoryTagValidationErrors(context, validationResult);
this.maybeEmitAlsoRequiresTagValidationErrors(context, validationResult);
this.maybeEmitMandatoryAlternativesSatisfiedErrors(
context, validationResult);
this.maybeEmitCssLengthSpecErrors(context, validationResult);
this.maybeEmitValueSetMismatchErrors(context, validationResult);
}
/**
* Emits errors when there is a ValueSetRequirement with no matching
* ValueSetProvision in the document.
*
* @param context the Context.
* @param validationResult the ValidationResult.
* @throws TagValidationException the TagValidationException.
*/
public void maybeEmitValueSetMismatchErrors(@Nonnull final Context context,
@Nonnull final ValidatorProtos.ValidationResult.Builder validationResult)
throws TagValidationException {
final Set<String> providedKeys = context.valueSetsProvided();
for (final String requiredKey : context.valueSetsRequired().keySet()) {
if (!providedKeys.contains(requiredKey)) {
context.valueSetsRequired().get(requiredKey);
for (final ValidatorProtos.ValidationError error : context.valueSetsRequired().get(requiredKey)) {
context.addBuiltError(error, validationResult);
}
}
}
}
/**
* Emits errors for css size limitations across entire document.
*
* @param context the Context.
* @param validationResult the ValidationResult.
* @throws TagValidationException the TagValidationException.
*/
public void maybeEmitCssLengthSpecErrors(@Nonnull final Context context,
@Nonnull final ValidatorProtos.ValidationResult.Builder validationResult)
throws TagValidationException {
// Only emit an error if there have been inline styles used. Otherwise
// if there was to be an error it would have been caught by
// CdataMatcher::Match().
if (context.getInlineStyleByteSize() == 0) {
return;
}
final int bytesUsed =
context.getInlineStyleByteSize() + context.getStyleAmpCustomByteSize();
for (final ValidatorProtos.CssLengthSpec cssLengthSpec : getCssLengthSpec()) {
if (!this.isCssLengthSpecCorrectHtmlFormat(cssLengthSpec)) {
continue;
}
if (cssLengthSpec.hasMaxBytes() && bytesUsed > cssLengthSpec.getMaxBytes()) {
final List<String> params = new ArrayList<>();
params.add(String.valueOf(bytesUsed));
params.add(String.valueOf(cssLengthSpec.getMaxBytes()));
context.addError(
ValidatorProtos.ValidationError.Code
.STYLESHEET_AND_INLINE_STYLE_TOO_LONG,
context.getLineCol(), /* params */
params,
/* specUrl */ cssLengthSpec.getSpecUrl(), validationResult);
}
}
}
/**
* Emits errors for tags that are specified as mandatory alternatives.
* Returns false iff context.Progress(result).complete.
*
* @param context the Context.
* @param validationResult the ValidationResult.
* @throws TagValidationException the TagValidationException.
*/
public void maybeEmitMandatoryAlternativesSatisfiedErrors(@Nonnull final Context context,
@Nonnull final ValidatorProtos.ValidationResult.Builder validationResult)
throws TagValidationException {
final List<String> satisfied = context.getMandatoryAlternativesSatisfied();
/** @type {!Array<string>} */
final List<String> missing = new ArrayList<>();
Map<String, String> specUrlsByMissing = new HashMap<>();
for (final ValidatorProtos.TagSpec tagSpec : this.ampValidatorManager.getRules().getTagsList()) {
if (!tagSpec.hasMandatoryAlternatives() || !this.isTagSpecCorrectHtmlFormat(tagSpec)) {
continue;
}
final String alternative = tagSpec.getMandatoryAlternatives();
if (satisfied.indexOf(alternative) == -1) {
if (!missing.contains(alternative)) {
missing.add(alternative);
specUrlsByMissing.put(alternative, TagSpecUtils.getTagSpecUrl(tagSpec));
}
}
}
//sortAndUniquify(missing);
for (final String tagMissing : missing) {
final List<String> params = new ArrayList<>();
params.add(tagMissing);
context.addError(
ValidatorProtos.ValidationError.Code.MANDATORY_TAG_MISSING,
context.getLineCol(),
params,
/* specUrl */ specUrlsByMissing.get(tagMissing),
validationResult);
}
}
/**
* Emits errors for tags that are specified to be mandatory.
*
* @param context the Context.
* @param validationResult the ValidationResult.
* @throws TagValidationException the TagValidationException.
*/
public void maybeEmitMandatoryTagValidationErrors(@Nonnull final Context context,
@Nonnull final ValidatorProtos.ValidationResult.Builder validationResult)
throws TagValidationException {
for (int tagSpecId : this.mandatoryTagSpecs) {
final ParsedTagSpec parsedTagSpec = this.getByTagSpecId(tagSpecId);
// Skip TagSpecs that aren't used for these type identifiers.
if (!parsedTagSpec.isUsedForTypeIdentifiers(
context.getTypeIdentifiers())) {
continue;
}
if (!context.getTagspecsValidated().containsKey(tagSpecId)) {
final ValidatorProtos.TagSpec spec = parsedTagSpec.getSpec();
final List<String> params = new ArrayList<>();
params.add(TagSpecUtils.getTagSpecName(spec));
context.addError(
ValidatorProtos.ValidationError.Code.MANDATORY_TAG_MISSING,
context.getLineCol(),
params,
TagSpecUtils.getTagSpecUrl(spec),
validationResult);
}
}
}
/**
* Emits errors for tags that specify that another tag is also required or
* a condition is required to be satisfied.
* Returns false iff context.Progress(result).complete.
*
* @param context the Context.
* @param validationResult the ValidationResult.
* @throws TagValidationException the TagValidationException.
*/
public void maybeEmitAlsoRequiresTagValidationErrors(@Nonnull final Context context,
@Nonnull final ValidatorProtos.ValidationResult.Builder validationResult)
throws TagValidationException {
for (final int tagSpecId : context.getTagspecsValidated().keySet()) {
final ParsedTagSpec parsedTagSpec = this.getByTagSpecId(tagSpecId);
// Skip TagSpecs that aren't used for these type identifiers.
if (!parsedTagSpec.isUsedForTypeIdentifiers(
context.getTypeIdentifiers())) {
continue;
}
for (final String condition : parsedTagSpec.requires()) {
if (!context.satisfiesCondition(condition)) {
final List<String> params = new ArrayList<>();
params.add(condition);
params.add(TagSpecUtils.getTagSpecName(parsedTagSpec.getSpec()));
context.addError(
ValidatorProtos.ValidationError.Code.TAG_REQUIRED_BY_MISSING,
context.getLineCol(),
params,
TagSpecUtils.getTagSpecUrl(parsedTagSpec.getSpec()),
validationResult);
}
}
for (final String condition : parsedTagSpec.excludes()) {
if (context.satisfiesCondition(condition)) {
final List<String> params = new ArrayList<>();
params.add(TagSpecUtils.getTagSpecName(parsedTagSpec.getSpec()));
params.add(condition);
context.addError(
ValidatorProtos.ValidationError.Code.TAG_EXCLUDED_BY_TAG,
context.getLineCol(),
params,
TagSpecUtils.getTagSpecUrl(parsedTagSpec.getSpec()),
validationResult);
}
}
for (final String requiresTagWarning : parsedTagSpec.getAlsoRequiresTagWarning()) {
final Integer tagSpecIdObj = getTagSpecIdBySpecName(requiresTagWarning);
if (tagSpecIdObj == null || !context.getTagspecsValidated().containsKey(tagSpecIdObj)) {
final ParsedTagSpec alsoRequiresTagspec = this.getByTagSpecId(tagSpecIdObj);
final List<String> params = new ArrayList<>();
params.add(TagSpecUtils.getTagSpecName(alsoRequiresTagspec.getSpec()));
params.add(TagSpecUtils.getTagSpecName(parsedTagSpec.getSpec()));
context.addWarning(
ValidatorProtos.ValidationError.Code.WARNING_TAG_REQUIRED_BY_MISSING,
context.getLineCol(),
params,
TagSpecUtils.getTagSpecUrl(parsedTagSpec.getSpec()),
validationResult);
}
}
}
final ExtensionsContext extensionsCtx = context.getExtensions();
final List<String> unusedRequired = extensionsCtx.unusedExtensionsRequired();
for (final String unusedExtensionName : unusedRequired) {
final List<String> params = new ArrayList<>();
params.add(unusedExtensionName);
context.addError(
ValidatorProtos.ValidationError.Code.EXTENSION_UNUSED,
context.getLineCol(),
params,
/* specUrl */ "", validationResult);
}
}
/**
* Returns true if Css length spec's html format is equal to this html format.
*
* @param cssLengthSpec the CssLengthSpec.
* @return returns true of Css length spec's html format is same as this html format.
*/
private boolean isCssLengthSpecCorrectHtmlFormat(@Nonnull final ValidatorProtos.CssLengthSpec cssLengthSpec) {
return cssLengthSpec.hasHtmlFormat() ? cssLengthSpec.getHtmlFormat() == htmlFormat : false;
}
/**
* Returns true if TagSpec's html format is the same as this html format.
*
* @param tagSpec the TagSpec.
* @return returns true if TagSpec's html format is the same as this html format.
*/
private boolean isTagSpecCorrectHtmlFormat(@Nonnull final ValidatorProtos.TagSpec tagSpec) {
for (final ValidatorProtos.HtmlFormat.Code htmlFormatCode : tagSpec.getHtmlFormatList()) {
if (htmlFormatCode == htmlFormat) {
return true;
}
}
return false;
}
/**
* For every tagspec that contains an ExtensionSpec, we add several TagSpec
* fields corresponding to the data found in the ExtensionSpec.
*/
private void expandExtensionSpec() {
final int numTags = this.ampValidatorManager.getRules().getTagsList().size();
for (int tagSpecId = 0; tagSpecId < numTags; ++tagSpecId) {
ValidatorProtos.TagSpec tagSpec = this.ampValidatorManager.getRules().getTags(tagSpecId);
if (!tagSpec.hasExtensionSpec()) {
continue;
}
ValidatorProtos.TagSpec.Builder tagSpecBuilder = ValidatorProtos.TagSpec.newBuilder();
tagSpecBuilder.mergeFrom(tagSpec);
if (!tagSpec.hasSpecName()) {
tagSpecBuilder.setSpecName(tagSpec.getTagName() + " extension .js script");
}
tagSpecBuilder.setMandatoryParent("HEAD");
if (tagSpec.getExtensionSpec().hasDeprecatedAllowDuplicates()) {
tagSpecBuilder.setUniqueWarning(true);
} else {
tagSpecBuilder.setUnique(true);
}
ValidatorProtos.CdataSpec cdataSpec = ValidatorProtos.CdataSpec.getDefaultInstance();
cdataSpec = cdataSpec.toBuilder().setWhitespaceOnly(true).build();
tagSpecBuilder.setCdata(cdataSpec);
this.ampValidatorManager.getRules().setTags(tagSpecId, tagSpecBuilder.build());
}
}
/**
* @return {!ParsedAttrSpecs}
*/
public ParsedAttrSpecs getParsedAttrSpecs() {
return this.parsedAttrSpecs;
}
/**
* AmpValidatorManager.
*/
private AMPValidatorManager ampValidatorManager;
/**
* The HTML format.
*/
private ValidatorProtos.HtmlFormat.Code htmlFormat;
/**
* ParsedTagSpecs in id order.
*/
private Map<Integer, ParsedTagSpec> parsedTagSpecById;
/**
* ParsedTagSpecs keyed by name.
*/
private Map<String, TagSpecDispatch> tagSpecByTagName;
/**
* Tag ids that are mandatory for a document to legally validate.
*/
private List<Integer> mandatoryTagSpecs;
/**
* A cache for full match regex instantiations.
*/
private Map<String, Pattern> fullMatchRegexes;
/**
* A cache for full match case insensitive regex instantiation.
*/
private Map<String, Pattern> fullMatchCaseiRegexes;
/**
* A cache for partial match case insensitive regex instantiation.
*/
private Map<String, Pattern> partialMatchCaseiRegexes;
/**
* Type identifiers which are used to determine the set of validation
* rules to be applied.
*/
private Map<String, Integer> typeIdentifiers;
/**
* A ParsedAttrSpecs object.
*/
private ParsedAttrSpecs parsedAttrSpecs;
/**
* A tag spec names to track.
*/
private Map<Object, Boolean> tagSpecIdsToTrack;
/**
* ErrorCodeMetadata keyed by error code.
*/
private Map<ValidatorProtos.ValidationError.Code, ErrorCodeMetadata> errorCodes;
/**
* Tag spec name to spec id .
*/
private Map<String, Integer> tagSpecNameToSpecId = new HashMap<>();
/**
* Transformed value regex pattern.
*/
private static final Pattern TRANSFORMED_VALUE_REGEX = Pattern.compile("^\\w+;v=(\\d+)$");
}
| apache-2.0 |
googleapis/java-bigtable-hbase | bigtable-hbase-1.x-parent/bigtable-hbase-1.x-tools/src/main/java/com/google/cloud/bigtable/hbase/tools/HBaseSchemaTranslator.java | 21474 | /*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.bigtable.hbase.tools;
import com.google.bigtable.repackaged.com.google.api.core.InternalApi;
import com.google.bigtable.repackaged.com.google.common.annotations.VisibleForTesting;
import com.google.bigtable.repackaged.com.google.common.base.Preconditions;
import com.google.bigtable.repackaged.com.google.gson.Gson;
import com.google.bigtable.repackaged.com.google.gson.reflect.TypeToken;
import com.google.cloud.bigtable.hbase.BigtableConfiguration;
import com.google.cloud.bigtable.hbase.BigtableOptionsFactory;
import com.google.cloud.bigtable.hbase.tools.ClusterSchemaDefinition.TableSchemaDefinition;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Reader;
import java.io.Writer;
import java.lang.reflect.Type;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A utility to create tables in Cloud Bigtable based on the tables in an HBase cluster.
*
* <p>Execute the following command to copy the schema from HBase to Cloud Bigtable:
*
* <pre>
* java -jar bigtable-hbase-1.x-tools-1.20.0-SNAPSHOT-jar-with-dependencies.jar com.google.cloud.bigtable.hbase.tools.HBaseSchemaTranslator \
* -Dhbase.zookeeper.quorum=$ZOOKEEPER_QUORUM \
* -Dhbase.zookeeper.property.clientPort=$ZOOKEEPER_PORT \
* -Dgoogle.bigtable.table.filter=$TABLE_NAME_REGEX \
* -Dgoogle.bigtable.project.id=$PROJECT_ID \
* -Dgoogle.bigtable.instance.id=$INSTANCE_ID
* </pre>
*
* <p>There are 2 ways to run this tool. If you can connect to both HBase and Cloud Bigtable, you
* can use the above method to create tables in Cloud Bigtable directly. However, if HBase master is
* in a private VPC or can't connect to internet, you can dump HBase schema in a file and create
* tables in Cloud Bigtable using that file.
*
* <p>Run the tool from a host that can connect to HBase. Store HBase schema in a file:
*
* <pre>
* java -jar bigtable-hbase-1.x-tools-1.20.0-SNAPSHOT-jar-with-dependencies.jar com.google.cloud.bigtable.hbase.tools.HBaseSchemaTranslator \
* -Dhbase.zookeeper.quorum=$ZOOKEEPER_QUORUM \
* -Dhbase.zookeeper.property.clientPort=$ZOOKEEPER_PORT \
* -Dgoogle.bigtable.table.filter=$TABLE_NAME_REGEX \
* -Dgoogle.bigtable.output.filepath=$SCHEMA_FILE_PATH
* </pre>
*
* <p>Copy the schema file to a host which can connect to Google Cloud. Create tables in Cloud
* Bigtable using the schema file:
*
* <pre>
* java -jar bigtable-hbase-1.x-tools-1.20.0-SNAPSHOT-jar-with-dependencies.jar com.google.cloud.bigtable.hbase.tools.HBaseSchemaTranslator \
* -Dgoogle.bigtable.input.filepath=$SCHEMA_FILE_PATH \
* -Dgoogle.bigtable.project.id=$PROJECT_ID \
* -Dgoogle.bigtable.instance.id=$INSTANCE_ID
* </pre>
*/
@InternalApi
public class HBaseSchemaTranslator {
public static final String PROJECT_ID_KEY = "google.bigtable.project.id";
public static final String INSTANCE_ID_KEY = "google.bigtable.instance.id";
public static final String ZOOKEEPER_QUORUM_KEY = "hbase.zookeeper.quorum";
public static final String ZOOKEEPER_PORT_KEY = "hbase.zookeeper.property.clientPort";
public static final String INPUT_FILE_KEY = "google.bigtable.input.filepath";
public static final String OUTPUT_FILE_KEY = "google.bigtable.output.filepath";
public static final String TABLE_NAME_FILTER_KEY = "google.bigtable.table.filter";
public static final String SCHEMA_MAPPING_FILEPATH = "google.bigtable.schema.mapping.filepath";
private static final Logger LOG = LoggerFactory.getLogger(HBaseSchemaTranslator.class);
private final SchemaReader schemaReader;
private final SchemaTransformer schemaTransformer;
private final SchemaWriter schemaWriter;
@VisibleForTesting
static class SchemaTranslationOptions {
@Nullable String projectId;
@Nullable String instanceId;
@Nullable String zookeeperQuorum;
@Nullable Integer zookeeperPort;
@Nullable String inputFilePath;
@Nullable String outputFilePath;
@Nullable String tableNameFilter;
@Nullable String schemaMappingFilePath;
@VisibleForTesting
SchemaTranslationOptions() {}
@VisibleForTesting
void validateOptions() {
if (outputFilePath != null) {
Preconditions.checkArgument(
projectId == null && instanceId == null,
INSTANCE_ID_KEY + "/" + PROJECT_ID_KEY + " can not be set when output file is set.");
} else {
Preconditions.checkArgument(
projectId != null && instanceId != null, "Schema destination not specified.");
}
if (inputFilePath != null) {
Preconditions.checkArgument(
zookeeperPort == null && zookeeperQuorum == null,
ZOOKEEPER_PORT_KEY
+ "/"
+ ZOOKEEPER_QUORUM_KEY
+ " can not be set when input file is set.");
Preconditions.checkArgument(
tableNameFilter == null,
TABLE_NAME_FILTER_KEY
+ " is not supported for reading the schema from a table. "
+ "TableFilter should be used when writing the schema to the file.");
} else {
Preconditions.checkArgument(
zookeeperQuorum != null && zookeeperPort != null, "Schema source not specified. ");
}
}
public static SchemaTranslationOptions loadOptionsFromSystemProperties() {
SchemaTranslationOptions options = new SchemaTranslationOptions();
options.projectId = System.getProperty(PROJECT_ID_KEY);
options.instanceId = System.getProperty(INSTANCE_ID_KEY);
options.outputFilePath = System.getProperty(OUTPUT_FILE_KEY);
options.inputFilePath = System.getProperty(INPUT_FILE_KEY);
options.zookeeperQuorum = System.getProperty(ZOOKEEPER_QUORUM_KEY);
if (System.getProperty(ZOOKEEPER_PORT_KEY) != null) {
options.zookeeperPort = Integer.parseInt(System.getProperty(ZOOKEEPER_PORT_KEY));
}
options.tableNameFilter = System.getProperty(TABLE_NAME_FILTER_KEY);
options.schemaMappingFilePath = System.getProperty(SCHEMA_MAPPING_FILEPATH);
// Ensure that the options are set properly
// TODO It is possible to validate the options without creating the object, but its less
// readable. See if we can make it readable and validate before calling the constructor.
try {
options.validateOptions();
} catch (RuntimeException e) {
usage(e.getMessage());
throw e;
}
return options;
}
}
/** Interface for reading HBase schema. */
private interface SchemaReader {
ClusterSchemaDefinition readSchema() throws IOException;
}
/**
* Reads HBase schema from a JSON file. JSON file should be representation of a {@link
* ClusterSchemaDefinition} object.
*/
@VisibleForTesting
static class FileBasedSchemaReader implements SchemaReader {
private final String schemaFilePath;
public FileBasedSchemaReader(String schemaFilePath) {
this.schemaFilePath = schemaFilePath;
}
@Override
public ClusterSchemaDefinition readSchema() throws IOException {
Reader jsonReader = new FileReader(schemaFilePath);
return new Gson().fromJson(jsonReader, ClusterSchemaDefinition.class);
}
}
/** Reads the HBase schema by connecting to an HBase cluster. */
@VisibleForTesting
static class HBaseSchemaReader implements SchemaReader {
private final String tableFilterPattern;
private final Admin hbaseAdmin;
public HBaseSchemaReader(
String zookeeperQuorum, int zookeeperPort, @Nullable String tableFilterPattern)
throws IOException {
// If no filter is provided, use `.*` to match all the tables.
this.tableFilterPattern = tableFilterPattern == null ? ".*" : tableFilterPattern;
// Create the HBase admin client.
Configuration conf = HBaseConfiguration.create();
conf.setInt(ZOOKEEPER_PORT_KEY, zookeeperPort);
conf.set(ZOOKEEPER_QUORUM_KEY, zookeeperQuorum);
Connection connection = ConnectionFactory.createConnection(conf);
this.hbaseAdmin = connection.getAdmin();
}
@VisibleForTesting
HBaseSchemaReader(Admin admin, @Nullable String tableFilterPattern) {
this.hbaseAdmin = admin;
// If no filter is provided, use `.*` to match all the tables.
this.tableFilterPattern = tableFilterPattern == null ? ".*" : tableFilterPattern;
}
private List<HTableDescriptor> getTables() throws IOException {
// Read the table definitions
HTableDescriptor[] tables = hbaseAdmin.listTables(tableFilterPattern);
if (tables == null) {
LOG.info(" Found no tables");
return new LinkedList<>();
}
return Arrays.asList(tables);
}
private byte[][] getSplits(TableName table) throws IOException {
List<HRegionInfo> regions = hbaseAdmin.getTableRegions(table);
if (regions == null || regions.isEmpty()) {
return new byte[0][];
}
List<byte[]> splits = new ArrayList<>();
for (HRegionInfo region : regions) {
if (Arrays.equals(region.getStartKey(), HConstants.EMPTY_START_ROW)) {
// CBT client does not accept an empty row as a split.
continue;
}
splits.add(region.getStartKey());
}
LOG.debug("Found {} splits for table {}.", splits.size(), table.getNameAsString());
return splits.toArray(new byte[0][]);
}
@Override
public ClusterSchemaDefinition readSchema() throws IOException {
LOG.info("Reading schema from HBase.");
ClusterSchemaDefinition schemaDefinition = new ClusterSchemaDefinition();
List<HTableDescriptor> tables = getTables();
for (HTableDescriptor table : tables) {
LOG.debug("Found table {} in HBase.", table.getNameAsString());
LOG.trace("Table details: {}", table);
schemaDefinition.addTableSchemaDefinition(table, getSplits(table.getTableName()));
}
return schemaDefinition;
}
}
/**
* Interface for writing the HBase schema represented by a {@link ClusterSchemaDefinition} object.
*/
private interface SchemaWriter {
void writeSchema(ClusterSchemaDefinition schemaDefinition) throws IOException;
}
/**
* Writes the HBase schema into a file. File contains the JSON representation of the {@link
* ClusterSchemaDefinition} object.
*/
@VisibleForTesting
static class FileBasedSchemaWriter implements SchemaWriter {
private final String outputFilePath;
public FileBasedSchemaWriter(String outputFilePath) {
this.outputFilePath = outputFilePath;
}
@Override
public void writeSchema(ClusterSchemaDefinition schemaDefinition) throws IOException {
Preconditions.checkNotNull(schemaDefinition, "SchemaDefinitions can not be null.");
try (Writer writer = new FileWriter(outputFilePath)) {
new Gson().toJson(schemaDefinition, writer);
LOG.info("Wrote schema to file " + outputFilePath);
}
}
}
/**
* Creates tables in Cloud Bigtable based on the schema provided by the {@link
* ClusterSchemaDefinition} object.
*/
@VisibleForTesting
static class BigtableSchemaWriter implements SchemaWriter {
private final Admin btAdmin;
public BigtableSchemaWriter(String projectId, String instanceId) throws IOException {
Configuration btConf = BigtableConfiguration.configure(projectId, instanceId);
btConf.set(BigtableOptionsFactory.CUSTOM_USER_AGENT_KEY, "HBaseSchemaTranslator");
this.btAdmin = ConnectionFactory.createConnection(btConf).getAdmin();
}
@VisibleForTesting
BigtableSchemaWriter(Admin btAdmin) {
this.btAdmin = btAdmin;
}
@Override
public void writeSchema(ClusterSchemaDefinition schemaDefinition) {
Preconditions.checkNotNull(schemaDefinition, "SchemaDefinitions can not be null.");
List<String> failedTables = new ArrayList<>();
for (TableSchemaDefinition tableSchemaDefinition : schemaDefinition.tableSchemaDefinitions) {
String tableName = tableSchemaDefinition.name;
try {
btAdmin.createTable(
tableSchemaDefinition.getHbaseTableDescriptor(), tableSchemaDefinition.splits);
LOG.info("Created table {} in Bigtable.", tableName);
} catch (Exception e) {
failedTables.add(tableName);
LOG.error("Failed to create table {}.", e, tableName);
// Continue creating tables in BT. Skipping creation failures makes the script idempotent
// as BT will throw TableExistsException for a table that is already present.
}
}
if (!failedTables.isEmpty()) {
throw new RuntimeException(
"Failed to create some tables in Cloud Bigtable: " + failedTables);
}
}
}
public HBaseSchemaTranslator(SchemaTranslationOptions options) throws IOException {
Preconditions.checkNotNull(options, "SchemaTranslationOptions can not be null.");
if (options.inputFilePath != null) {
this.schemaReader = new FileBasedSchemaReader(options.inputFilePath);
} else {
this.schemaReader =
new HBaseSchemaReader(
options.zookeeperQuorum, options.zookeeperPort, options.tableNameFilter);
}
if (options.schemaMappingFilePath != null) {
this.schemaTransformer =
JsonBasedSchemaTransformer.newSchemaTransformerFromJsonFile(
options.schemaMappingFilePath);
} else {
this.schemaTransformer = new NoopSchemaTransformer();
}
if (options.outputFilePath != null) {
this.schemaWriter = new FileBasedSchemaWriter(options.outputFilePath);
} else {
this.schemaWriter = new BigtableSchemaWriter(options.projectId, options.instanceId);
}
}
/**
* Transforms the {@link ClusterSchemaDefinition} read by {@link SchemaReader} before writing it
* to {@link SchemaWriter}.
*/
private interface SchemaTransformer {
ClusterSchemaDefinition transform(ClusterSchemaDefinition originalSchema)
throws IOException, DeserializationException;
}
/** No-op implementation of @{@link SchemaTransformer}. Returns the original schema definition. */
private static class NoopSchemaTransformer implements SchemaTransformer {
@Override
public ClusterSchemaDefinition transform(ClusterSchemaDefinition originalSchema) {
return originalSchema;
}
}
/**
* Transforms the @{@link ClusterSchemaDefinition} based on a provided JSON map. It can rename
* tables before writing them to {@link SchemaWriter}.
*
* <p>JSON map should look like { "SourceTable": "DestinationTable",
* "sourceTable-2":"DestinationTable-2"}
*/
@VisibleForTesting
static class JsonBasedSchemaTransformer implements SchemaTransformer {
// Map from old-tableName -> new-tableName
@VisibleForTesting Map<String, String> tableNameMappings;
@VisibleForTesting
JsonBasedSchemaTransformer(Map<String, String> tableNameMappings) {
this.tableNameMappings = tableNameMappings;
LOG.info("Creating SchemaTransformer with schema mapping: {}", tableNameMappings);
}
public static JsonBasedSchemaTransformer newSchemaTransformerFromJsonFile(
String mappingFilePath) throws IOException {
Map<String, String> tableNameMappings = null;
Type mapType = new TypeToken<Map<String, String>>() {}.getType();
try (Reader jsonReader = new FileReader(mappingFilePath)) {
tableNameMappings = new Gson().fromJson(jsonReader, mapType);
}
if (tableNameMappings == null) {
throw new IllegalStateException(
"SchemaMapping file does not contain valid schema mappings");
}
return new JsonBasedSchemaTransformer(tableNameMappings);
}
@Override
public ClusterSchemaDefinition transform(ClusterSchemaDefinition originalSchema)
throws DeserializationException, IOException {
ClusterSchemaDefinition transformedSchema = new ClusterSchemaDefinition();
// Apply the transformations.
for (TableSchemaDefinition tableSchemaDefinition : originalSchema.tableSchemaDefinitions) {
String newTableName = tableSchemaDefinition.name;
HTableDescriptor tableDescriptor = tableSchemaDefinition.getHbaseTableDescriptor();
HTableDescriptor newTableDescriptor = tableDescriptor;
// Override the table name if its present in the mapping file
if (tableNameMappings.containsKey(newTableName)) {
newTableName = tableNameMappings.get(newTableName);
// Rename the table and copy all the other configs, including the column families.
newTableDescriptor =
new HTableDescriptor(TableName.valueOf(newTableName), tableDescriptor);
LOG.info("Renaming table {} to {}.", tableSchemaDefinition.name, newTableName);
}
// finalize the transformed schema
transformedSchema.addTableSchemaDefinition(
newTableDescriptor, tableSchemaDefinition.splits);
}
return transformedSchema;
}
}
@VisibleForTesting
HBaseSchemaTranslator(SchemaReader schemaReader, SchemaWriter schemaWriter) {
this(schemaReader, schemaWriter, new NoopSchemaTransformer());
}
@VisibleForTesting
HBaseSchemaTranslator(
SchemaReader schemaReader, SchemaWriter schemaWriter, SchemaTransformer schemaTransformer) {
this.schemaReader = schemaReader;
this.schemaWriter = schemaWriter;
this.schemaTransformer = schemaTransformer;
}
public void translate() throws IOException, DeserializationException {
ClusterSchemaDefinition schemaDefinition = schemaReader.readSchema();
LOG.info("Read schema with {} tables.", schemaDefinition.tableSchemaDefinitions.size());
this.schemaWriter.writeSchema(schemaTransformer.transform(schemaDefinition));
}
/*
* @param errorMsg Error message. Can be null.
*/
private static void usage(final String errorMsg) {
// Print usage on system.err instead of logger.
if (errorMsg != null && errorMsg.length() > 0) {
System.err.println("ERROR: " + errorMsg);
}
String jarName;
try {
jarName =
new File(
HBaseSchemaTranslator.class
.getProtectionDomain()
.getCodeSource()
.getLocation()
.toURI()
.getPath())
.getName();
} catch (URISyntaxException e) {
jarName = "<jar>";
}
System.err.printf(
"Usage: java -jar %s com.google.cloud.bigtable.hbase.tools.HBaseSchemaTranslator "
+ "<schema_source> <schema_destination> <table-name-regex> \n\n",
jarName);
System.err.println(" Schema Source can be 1 of the following:");
System.err.println(
" -D "
+ ZOOKEEPER_QUORUM_KEY
+ "=<zookeeper quorum> -D "
+ ZOOKEEPER_PORT_KEY
+ "=<zookeeper port>");
System.err.println(" -D " + INPUT_FILE_KEY + "=<schema file path>");
System.err.println(" Schema destination can be 1 of the following:");
System.err.println(
" -D "
+ PROJECT_ID_KEY
+ "=<bigtable project id> -D "
+ INSTANCE_ID_KEY
+ "=<bigtable instance id>");
System.err.println(" -D " + OUTPUT_FILE_KEY + "=<schema file path>");
System.err.println(
" Additionally, you can filter tables to create when using HBase as source");
System.err.println(" -D " + TABLE_NAME_FILTER_KEY + "=<table name regex>");
System.err.println(
" Optionally, the tables can be renamed by providing a JSON map. Example JSON "
+ "{\"source-table\": \"destination-table\", \"namespace:source-table2\": \"namespace-destination-table2\"}.");
System.err.println(" -D " + SCHEMA_MAPPING_FILEPATH + "=/schema/mapping/file/path.json");
}
public static void main(String[] args) throws IOException, DeserializationException {
SchemaTranslationOptions options = SchemaTranslationOptions.loadOptionsFromSystemProperties();
HBaseSchemaTranslator translator = new HBaseSchemaTranslator(options);
translator.translate();
}
}
| apache-2.0 |
jdsjlzx/LRecyclerView | app/src/main/java/com/lzx/demo/ui/SectionLayoutActivity.java | 2594 | package com.lzx.demo.ui;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentPagerAdapter;
import android.support.v4.view.ViewPager;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import com.lzx.demo.R;
import com.lzx.demo.fragment.DoubleHeaderFragment;
import com.lzx.demo.fragment.InlineStickyHeaderFragment;
import com.lzx.demo.fragment.StickyHeaderFragment;
public class SectionLayoutActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_section_pager);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
HeaderPagerAdapter adapter = new HeaderPagerAdapter(this.getSupportFragmentManager());
ViewPager pager = (ViewPager) this.findViewById(R.id.pager);
pager.setAdapter(adapter);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
this.getMenuInflater().inflate(R.menu.menu_setion, menu);
return true;
}
class HeaderPagerAdapter extends FragmentPagerAdapter {
public HeaderPagerAdapter(FragmentManager fm) {
super(fm);
}
@Override
public Fragment getItem(int position) {
switch (position) {
case 0:
return new StickyHeaderFragment();
case 1:
return new InlineStickyHeaderFragment();
case 2:
return new DoubleHeaderFragment();
default:
return null;
}
}
@Override
public int getCount() {
return 3;
}
@Override
public CharSequence getPageTitle(int position) {
switch (position) {
case 0:
return "Sticky Header";
case 1:
return "Sticky Header - Inline";
case 2:
return "Double Header";
default:
return null;
}
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == android.R.id.home) {
finish();
}
return true;
}
}
| apache-2.0 |
google/copybara | java/com/google/copybara/git/GitEnvironment.java | 2407 | /*
* Copyright (C) 2018 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.copybara.git;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import java.nio.file.FileSystems;
import java.util.Map;
public class GitEnvironment {
private final Map<String, String> environment;
private final boolean noGitPrompt;
public GitEnvironment(Map<String, String> environment) {
this(environment, /*noGitPrompt*/ false);
}
GitEnvironment(Map<String, String> environment, boolean noGitPrompt) {
this.environment = ImmutableMap.copyOf(Preconditions.checkNotNull(environment));
this.noGitPrompt = noGitPrompt;
}
public ImmutableMap<String, String> getEnvironment() {
Map<String, String> env = Maps.newHashMap(environment);
if (noGitPrompt) {
env.put("GIT_TERMINAL_PROMPT", "0");
}
return ImmutableMap.copyOf(env);
}
/**
* Returns a copy of this environment, setting explicitly to prevent Git from asking for
* username/password and fail if the credentials cannot be resolved.
*/
GitEnvironment withNoGitPrompt() {
return new GitEnvironment(this.environment, true);
}
/**
* Returns a String representing the git binary to be executed.
*
* <p>The env var {@code GIT_EXEC_PATH} determines where Git looks for its sub-programs, but also
* the regular git binaries (git, git-upload-pack, etc) are duplicated in {@code GIT_EXEC_PATH}.
*
* <p>If the env var is not set, then we will execute "git", that it will be resolved in the path
* as usual.
*/
public String resolveGitBinary() {
if (environment.containsKey("GIT_EXEC_PATH")) {
return FileSystems.getDefault()
.getPath(environment.get("GIT_EXEC_PATH"))
.resolve("git")
.toString();
}
return "git";
}
}
| apache-2.0 |
gdickinson/alexa-smarthome-java | src/main/java/uk/co/gdickinson/smarthome/lambda/models/ApplianceType.java | 495 | package uk.co.gdickinson.smarthome.lambda.models;
import com.google.gson.annotations.SerializedName;
public enum ApplianceType {
@SerializedName("CAMERA")
CAMERA,
@SerializedName("LIGHT")
LIGHT,
@SerializedName("SMARTLOCK")
SMARTLOCK,
@SerializedName("SMARTPLUG")
SMARTPLUG,
@SerializedName("SWITCH")
SWITCH,
@SerializedName("THERMOSTAT")
THERMOSTAT,
@SerializedName("ACTIVITY_TRIGGER")
ACTIVITY_TRIGGER,
@SerializedName("SCENE_TRIGGER")
SCENE_TRIGGER
}
| apache-2.0 |
iamfigo/redis-cluster-manager | redis-cluster-manager/src/main/java/tech/huit/redis/util/DataMigrationSingleDoubleWriteCheck.java | 8666 | package tech.huit.redis.util;
import redis.clients.jedis.*;
import java.util.Map;
/**
* 使用方法:java -cp redis-cluster-manager-jar-with-dependencies.jar tech.huit.redis.util.DataMigrationSingleDoubleWriteCheck args
* 数据从单实例迁移到单实例数据双写一致性检查工具
* 已知问题:
* 1.由于存在时间差,高频操作的数据可能存比较错误,可以用DataMigrationSingleValueCheck工具人多确认几次看是否同步
* <p>
* 输入参数:
* redisHost=10.0.6.200 待迁移RedisIP
* redisPort=6380 待迁移Redis端口
* redisPwd=mon.wanghai 待迁移Redis密码
* newRedisHost=10.0.6.200 新RedisIP
* newRedisPort=6001 新Redis端口
* newRedisPwd=uElDG3IHZAnXhT22 新Redis密码
* ipFilter=10.0.9.133 要过滤执行操作的机器IP
* keyFilter=dpm_ 要过滤key前缀
* monitorTime=5 监控时间单位秒
* <p>
* 输出结果:notSync或sync,如:sync->cmd:setnx key:dpm_accountInfo_200011420515_201
* <p>
* Created by huit on 2017/10/24.
*/
public class DataMigrationSingleDoubleWriteCheck {
public static String redisHost, newRedisHost, ipFilter, keyFilter, redisPwd, newRedisPwd;
public static int redisPort, newRedisPort, monitorTime;
public static String helpInfo = "redisHost=10.6.1.53 redisPort=6379 redisPwd=mon.wanghai newRedisHost=10.6.1.23 newRedisPort=6481 newRedisPwd=uElDG3IHZAnXhT22 ipFilter= keyFilter=dpm_ monitorTime=500";
static Jedis newRedis;
static Jedis oldRedis;
public static void main(String[] args) throws Exception {
if (args.length == 0) {
System.out.println("use default arg");
args = helpInfo.split(" ");
}
ArgsParse.parseArgs(DataMigrationSingleDoubleWriteCheck.class, args, "newRedis", "oldRedis", "lastDbIndex");
newRedis = new Jedis(newRedisHost, newRedisPort);
if (null != newRedisPwd) {
newRedis.auth(newRedisPwd);
}
oldRedis = new Jedis(redisHost, redisPort);
if (null != redisPwd) {
oldRedis.auth(redisPwd);
}
onlineMonitor();
}
private static String trimValue(String value) {
if (value.length() >= 2) {
return value.substring(1, value.length() - 1).replace("\\\"", "\"");
} else {
return value;
}
}
private static int lastDbIndex = 0;
public static void compareData(String data) {
if ("OK".equals(data)) {
return;
}
int hostBegin = data.indexOf("[");
int hostEnd = data.indexOf("]");
int db = 0;
String clientIp = null;
String clientIpPort;
String cmdDetail = null;
String[] cmdInfo = null;
if (hostBegin > 0 && hostBegin > 0) {
db = Integer.valueOf(data.substring(hostBegin + 1, hostEnd).split(" ")[0]);
clientIpPort = data.substring(hostBegin + 1, hostEnd).split(" ")[1];
clientIp = clientIpPort.split(":")[0];
cmdDetail = data.substring(hostEnd + 2);
cmdInfo = cmdDetail.split(" ");
}
if (null != ipFilter && !clientIp.startsWith(ipFilter)) {
return;
}
if ("\"SELECT\"".equalsIgnoreCase(cmdInfo[0]) || cmdInfo.length < 2) {
return;
}
String cmd = trimValue(cmdInfo[0]).toLowerCase();
String key = cmdInfo[1].replace("\"", "");
if (null != keyFilter && !key.startsWith(keyFilter)) {
return;
}
if (lastDbIndex != db) {
oldRedis.select(db);
newRedis.select(db);
lastDbIndex = db;
}
if ("hmset".equals(cmd)) {
Map<String, String> newRedisValue = newRedis.hgetAll(key);
for (int i = 2; i < cmdInfo.length; i += 2) {
String oldValue = HexToCn.redisString(trimValue(cmdInfo[i + 1]));
String newValue = newRedisValue.get(trimValue(cmdInfo[i]));
if (!oldValue.equals(newValue)) {
printSyncResult(clientIp, cmd, key, false, oldValue, newValue);
return;
}
}
printSyncResult(clientIp, cmd, key);
} else if ("del".equals(cmd)) {
boolean isEquals = false;
for (int i = 0; i < 5; i++) {
String newRedisValue = newRedis.type(key);
if ("none".equals(newRedisValue)) {
isEquals = true;
break;
}
waitMillis(20);
}
printSyncResult(clientIp, cmd, key, isEquals, null, null);
} else if ("set".equals(cmd) || "setnx".equals(cmd)) {
boolean isEquals = false;
String oldValue = HexToCn.redisString(trimValue(cmdInfo[2]));
String newRedisValue = null;
for (int i = 0; i < 5; i++) {
newRedisValue = newRedis.get(key);
if (oldValue.equals(newRedisValue)) {
isEquals = true;
break;
} else {
waitMillis(20);
oldValue = oldRedis.get(key);
}
}
printSyncResult(clientIp, cmd, key, isEquals, oldValue, newRedisValue);
} else if ("expire".equals(cmd)) {
Long newRedisValue = newRedis.ttl(key);
String oldValue = trimValue(cmdInfo[2]);
if (Long.valueOf(oldValue) - newRedisValue >= 5) {//超过1秒肯定不正常
printSyncResult(clientIp, cmd, key, true, oldValue, newRedisValue.toString());
} else {
printSyncResult(clientIp, cmd, key);
}
} else if ("zadd".equals(cmd)) {
boolean isSync = true;
for (int i = 2; i < cmdInfo.length; i += 2) {
String oldValue = HexToCn.redisString(trimValue(cmdInfo[i + 1]));
Double oldScore = Double.valueOf(trimValue(cmdInfo[i]));
Double newRedisScore = newRedis.zscore(key, oldValue);
if (oldScore != newRedisScore) {
isSync = false;
break;
}
}
printSyncResult(clientIp, cmd, key, isSync, null, null);
} else if ("sadd".equals(cmd)) {
boolean isSync = true;
for (int i = 2; i < cmdInfo.length; i++) {
String oldValue = HexToCn.redisString(trimValue(cmdInfo[i]));
if (!newRedis.sismember(key, oldValue) && oldRedis.sismember(trimValue(cmdInfo[1]), oldValue)) {//高并发情况下可能被移出
isSync = false;
break;
}
}
printSyncResult(clientIp, cmd, key, isSync, null, null);
}
}
private static void waitMillis(int millis) {
try {
Thread.sleep(millis);
} catch (InterruptedException e) {
}
}
private static void printSyncResult(String clientIp, String cmd, String key) {
printSyncResult(clientIp, cmd, key, true);
}
private static void printSyncResult(String clientIp, String cmd, String key, boolean isEquals) {
printSyncResult(clientIp, cmd, key, isEquals, null, null);
}
private static void printSyncResult(String clientIp, String cmd, String key, boolean isEquals, String oldValue, String newRedisValue) {
if (isEquals) {
System.out.println("sync->clientIp:" + clientIp + " cmd:" + cmd + " key:" + key);
} else {
System.out.println("notSync->clientIp:" + clientIp + " cmd:" + cmd + " key:" + key + " oldValue:" + oldValue + "newValue:" + newRedisValue);
}
}
public static void onlineMonitor() {
Jedis jedis = new Jedis(redisHost, Integer.valueOf(redisPort));
JedisMonitor monitor = new JedisMonitor() {
@Override
public void onCommand(String command) {
compareData(command);
}
};
final long beginTime = System.currentTimeMillis();
new Thread(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(monitorTime * 1000);
} catch (InterruptedException e) {
} finally {
System.out.println("useTime:" + (System.currentTimeMillis() - beginTime));
System.exit(0);
}
}
}, "monitorTimer").start();
if (null != redisPwd) {
jedis.auth(redisPwd);
}
jedis.monitor(monitor);
}
}
| apache-2.0 |
tolo/JServer | samples/panel/src/RemoteTestEvent.java | 334 | import com.teletalk.jserver.rmi.remote.RemoteEvent;
/**
*
*/
public class RemoteTestEvent extends RemoteEvent
{
private final int id;
public RemoteTestEvent(String source, int id)
{
super(source);
this.id = id;
}
public String toString()
{
return "RemoteTestEvent( id: " + this.id + " )";
}
}
| apache-2.0 |