repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
AdoHe/Homework | Project01/src/com/xqbase/java/Blur.java | 3433 | package com.xqbase.java;
/* Blur.java */
/* DO NOT CHANGE THIS FILE. */
/* YOUR SUBMISSION MUST WORK CORRECTLY WITH _OUR_ COPY OF THIS FILE. */
/* You may wish to make temporary changes or insert println() statements */
/* while testing your code. When you're finished testing and debugging, */
/* though, make sure your code works with the original version of this file. */
/**
* The Blur class is a program that reads an image file in TIFF format, blurs
* it with a 3x3 box blurring kernel, writes the blurred image as a TIFF file,
* and displays both images.
*
* The Blur program takes up to two parameters. The first parameter is
* the name of the TIFF-format file to read. (The output image file is
* constructed by adding "blur_" to the beginning of the input filename.)
* An optional second parameter specifies the number of iterations of the
* box blurring operation. (The default is one iteration.) For example, if
* you run
*
* java Blur engine.tiff 5
*
* then Blur will read engine.tiff, perform 5 iterations of blurring, and
* write the blurred image to blur_engine.tiff .
*
* @author Joel Galenson and Jonathan Shewchuk
*/
public class Blur {
/**
* blurFile() reads a TIFF image file, blurs it, write the blurred image to
* a new TIFF image file, and displays both images.
*
* @param filename the name of the input TIFF image file.
* @param numIterations the number of iterations of blurring to perform.
*/
private static void blurFile(String filename, int numIterations) {
System.out.println("Reading image file " + filename);
PixImage image = ImageUtils.readTIFFPix(filename);
System.out.println("Blurring image file.");
PixImage blurred = image.boxBlur(numIterations);
String blurname = filename;
System.out.println("Writing blurred image file " + blurname);
TIFFEncoder.writeTIFF(blurred, blurname);
/*
TIFFEncoder.writeTIFF(new RunLengthEncoding(edges), "rle" + blurname);
*/
System.out.println("Displaying input image and blurred image.");
System.out.println("Close the image to quit.");
ImageUtils.displayTIFFs(new PixImage[] { image, blurred });
}
/**
* main() reads the command-line arguments and initiates the blurring.
*
* The first command-line argument is the name of the image file.
* An optional second argument is number of iterations of blurring.
*
* @param args the usual array of command-line argument Strings.
*/
public static void main(String[] args) {
if (args.length == 0) {
System.out.println("usage: java Blur imagefile [iterations]");
System.out.println(" imagefile is an image in TIFF format.");
System.out.println(" interations is the number of blurring iterations" +
" (default 1).");
System.out.println("The blurred image is written to blur_imagefile.");
System.exit(0);
}
int numIterations = 1;
if (args.length > 1) {
try {
numIterations = Integer.parseInt(args[1]);
} catch (NumberFormatException ex) {
System.err.println("The second argument must be a number.");
System.exit(1);
}
}
blurFile(args[0], numIterations);
}
} | apache-2.0 |
spinnaker/clouddriver | clouddriver-kubernetes/src/main/java/com/netflix/spinnaker/clouddriver/kubernetes/op/handler/KubernetesCustomResourceHandler.java | 2205 | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.spinnaker.clouddriver.kubernetes.op.handler;
import static com.netflix.spinnaker.clouddriver.kubernetes.op.handler.KubernetesHandler.DeployPriority.LOWEST_PRIORITY;
import com.netflix.spinnaker.clouddriver.kubernetes.caching.agent.KubernetesCachingAgentFactory;
import com.netflix.spinnaker.clouddriver.kubernetes.caching.agent.KubernetesUnregisteredCustomResourceCachingAgent;
import com.netflix.spinnaker.clouddriver.kubernetes.description.SpinnakerKind;
import com.netflix.spinnaker.clouddriver.kubernetes.description.manifest.KubernetesKind;
import com.netflix.spinnaker.clouddriver.kubernetes.description.manifest.KubernetesManifest;
import com.netflix.spinnaker.clouddriver.kubernetes.model.Manifest;
import javax.annotation.Nonnull;
public class KubernetesCustomResourceHandler extends KubernetesHandler implements CanDelete {
private final KubernetesKind kind;
public KubernetesCustomResourceHandler(KubernetesKind kind) {
this.kind = kind;
}
@Override
public int deployPriority() {
return LOWEST_PRIORITY.getValue();
}
@Nonnull
@Override
public KubernetesKind kind() {
return this.kind;
}
@Override
public boolean versioned() {
return false;
}
@Nonnull
@Override
public SpinnakerKind spinnakerKind() {
return SpinnakerKind.UNCLASSIFIED;
}
@Override
public Manifest.Status status(KubernetesManifest manifest) {
return Manifest.Status.defaultStatus();
}
@Override
protected KubernetesCachingAgentFactory cachingAgentFactory() {
return KubernetesUnregisteredCustomResourceCachingAgent::new;
}
}
| apache-2.0 |
Ariah-Group/Continuity | src/main/java/org/kuali/continuity/admin/dao/AdminTestModeDAO.java | 812 | //
// Copyright 2011 Kuali Foundation, Inc. Licensed under the
// Educational Community License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may
// obtain a copy of the License at
//
// http://www.opensource.org/licenses/ecl2.php
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an "AS IS"
// BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
// or implied. See the License for the specific language governing
// permissions and limitations under the License.
//
package org.kuali.continuity.admin.dao;
public interface AdminTestModeDAO {
void setInTestMode(boolean isInTestMode, Integer userId);
boolean isInTestMode(Integer userId);
}
| apache-2.0 |
dehora/outland | outland-feature-java/src/main/java/outland/feature/FeatureStore.java | 494 | package outland.feature;
import outland.feature.proto.Feature;
import outland.feature.proto.FeatureCollection;
interface FeatureStore {
Void put(Feature feature) throws FeatureException;
FeatureRecord find(String group, String key) throws FeatureException;
FeatureCollection findAll(String group) throws FeatureException;
Void remove(String group, String featureKey) throws FeatureException;
Void removeAll() throws FeatureException;
void close() throws FeatureException;
}
| apache-2.0 |
JIGAsoftSTP/NICON | src/java/bean/ContaBancoBean.java | 5411 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package bean;
import dao.ContaBancoDao;
import java.util.ArrayList;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.ViewScoped;
import javax.faces.context.FacesContext;
import mensagem.Message;
import modelo.ComoBox;
import modelo.ContaBanco;
import org.primefaces.context.RequestContext;
import validacao.Validacao;
/**
*
* @author ahmedjorge
*/
@ManagedBean
@ViewScoped
public class ContaBancoBean {
private ContaBanco contaBanco;
private ArrayList<ContaBanco> listContaBanco;
private ArrayList<ComoBox> listTipoConta;
private ArrayList<ComoBox> listContaContaBilistica;
private ArrayList<ComoBox> listBanco;
private ArrayList<ComoBox> listMoeda;
private String pesquisaConta;
public ContaBancoBean() {
listMoeda = ComoBox.loadAllDados("VER_MOEDA", "ID", "MOEDA");
listTipoConta = ComoBox.loadAllDados("VER_TYPEACCOUNTBANK", "ID", "TYPECONTA");
listContaContaBilistica = ComoBox.loadAllDados("VER_ACCOUNT where STATE = 1", "ID", "NUMBER");
listBanco = ComoBox.loadAllDados("VER_BANK", "ID", "NOME");
listContaBanco = new ContaBancoDao().getListContaBanco(null);
}
public ContaBanco getContaBanco() {
return (contaBanco == null) ? contaBanco = new ContaBanco() : contaBanco;
}
public void regContaBanco(){
FacesContext facesContext = FacesContext.getCurrentInstance();
contaBanco.setIdtipoConta(Int(facesContext.getExternalContext().getRequestParameterMap().get("tipoconta")));
contaBanco.setIdtipoContaContabilistica(Int(facesContext.getExternalContext().getRequestParameterMap().get("contacontabalistica")));
contaBanco.setIdbanco(Int(facesContext.getExternalContext().getRequestParameterMap().get("banco")));
contaBanco.setIdTipoMoeda(Int(facesContext.getExternalContext().getRequestParameterMap().get("tipomoeda")));
contaBanco.setNumConta(facesContext.getExternalContext().getRequestParameterMap().get("numconta"));
String stra[] = new ContaBancoDao().regAccount(contaBanco);
if(stra != null && "true".equals(stra[0])){
RequestContext.getCurrentInstance().execute("limparCBForm()");
listContaBanco = new ContaBancoDao().getListContaBanco(null);
Validacao.atualizar("accountTableForm", "tableCantaBanco");
Message.addInfoMsg("Nova Conta Banco registada com sucesso!","changePassword", "growlChangePassword");
}else if(stra != null && "false".equals(stra[0]))
{ Message.addErrorMsg(stra[1],"changePassword", "growlChangePassword"); }
RequestContext.getCurrentInstance().execute("$('.modalProcess').hide()");
}
public void printDocCB(){
FacesContext facesContext = FacesContext.getCurrentInstance();
String typeDoc = facesContext.getExternalContext().getRequestParameterMap().get("docType");
new ContaBancoDao().resultPesqu(pesquisaConta, true, typeDoc);
RequestContext.getCurrentInstance().execute("$('.modalProcess').hide()");
}
public void pesquisaCB(){
FacesContext facesContext = FacesContext.getCurrentInstance();
pesquisaConta = facesContext.getExternalContext().getRequestParameterMap().get("pesq");
listContaBanco = new ContaBancoDao().resultPesqu(pesquisaConta, false, null);
Validacao.atualizar("accountTableForm", "tableCantaBanco");
RequestContext.getCurrentInstance().execute("$('.modalProcess').hide()");
}
public Integer Int(String var){ return Integer.valueOf(var); }
public void setContaBanco(ContaBanco contaBanco) {
this.contaBanco = contaBanco;
}
public ArrayList<ContaBanco> getListContaBanco() {
return (listContaBanco == null) ? listContaBanco = new ArrayList<>() : listContaBanco;
}
public void setListContaBanco(ArrayList<ContaBanco> listContaBanco) {
this.listContaBanco = listContaBanco;
}
public ArrayList<ComoBox> getListTipoConta() {
return (listTipoConta == null) ? listTipoConta = new ArrayList<>(): listTipoConta;
}
public void setListTipoConta(ArrayList<ComoBox> listTipoConta) {
this.listTipoConta = listTipoConta;
}
public ArrayList<ComoBox> getListContaContaBilistica() {
return (listContaContaBilistica == null) ? listContaContaBilistica = new ArrayList<>() : listContaContaBilistica ;
}
public void setListContaContaBilistica(ArrayList<ComoBox> listContaContaBilistica) {
this.listContaContaBilistica = listContaContaBilistica;
}
public ArrayList<ComoBox> getListBanco() {
return (listBanco == null) ? listBanco = new ArrayList<>(): listBanco;
}
public void setListBanco(ArrayList<ComoBox> listBanco) {
this.listBanco = listBanco;
}
public ArrayList<ComoBox> getListMoeda() {
return (listMoeda == null) ? listMoeda = new ArrayList<>(): listMoeda;
}
public void setListMoeda(ArrayList<ComoBox> listMoeda) {
this.listMoeda = listMoeda;
}
public String getPesquisaConta() {
return pesquisaConta;
}
public void setPesquisaConta(String pesquisaConta) {
this.pesquisaConta = pesquisaConta;
}
}
| apache-2.0 |
CarlosIribarren/Ejemplos-Examples | Java/Spring/SpringFramework/Spring Core/00_HolaMundo_XML/src/main/java/com/curso/springcore/beans/Mundo.java | 209 | package com.curso.springcore.beans;
public class Mundo {
private String saludo;
public String getSaludo() {
return saludo;
}
public void setSaludo(String saludo) {
this.saludo = saludo;
}
}
| apache-2.0 |
jprante/elasticsearch | core/src/main/java/org/elasticsearch/search/SearchService.java | 35814 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.search.SearchTask;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.concurrent.ConcurrentMapLong;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.search.collapse.CollapseContext;
import org.elasticsearch.index.query.InnerHitBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.IndexEventListener;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.SearchOperationListener;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.aggregations.AggregationInitializationException;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.dfs.DfsPhase;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.QueryFetchSearchResult;
import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult;
import org.elasticsearch.search.fetch.ShardFetchRequest;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext.ScriptField;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.internal.AliasFilter;
import org.elasticsearch.search.internal.InternalScrollSearchRequest;
import org.elasticsearch.search.internal.ScrollContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.search.profile.Profilers;
import org.elasticsearch.search.query.QueryPhase;
import org.elasticsearch.search.query.QuerySearchRequest;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.query.ScrollQuerySearchResult;
import org.elasticsearch.search.rescore.RescoreBuilder;
import org.elasticsearch.search.searchafter.SearchAfterBuilder;
import org.elasticsearch.search.sort.SortAndFormats;
import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.threadpool.ThreadPool.Cancellable;
import org.elasticsearch.threadpool.ThreadPool.Names;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicLong;
import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;
import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes;
public class SearchService extends AbstractLifecycleComponent implements IndexEventListener {
// we can have 5 minutes here, since we make sure to clean with search requests and when shard/index closes
public static final Setting<TimeValue> DEFAULT_KEEPALIVE_SETTING =
Setting.positiveTimeSetting("search.default_keep_alive", timeValueMinutes(5), Property.NodeScope);
public static final Setting<TimeValue> KEEPALIVE_INTERVAL_SETTING =
Setting.positiveTimeSetting("search.keep_alive_interval", timeValueMinutes(1), Property.NodeScope);
/**
* Enables low-level, frequent search cancellation checks. Enabling low-level checks will make long running searches to react
* to the cancellation request faster. However, since it will produce more cancellation checks it might slow the search performance
* down.
*/
public static final Setting<Boolean> LOW_LEVEL_CANCELLATION_SETTING =
Setting.boolSetting("search.low_level_cancellation", false, Property.Dynamic, Property.NodeScope);
public static final TimeValue NO_TIMEOUT = timeValueMillis(-1);
public static final Setting<TimeValue> DEFAULT_SEARCH_TIMEOUT_SETTING =
Setting.timeSetting("search.default_search_timeout", NO_TIMEOUT, Property.Dynamic, Property.NodeScope);
private final ThreadPool threadPool;
private final ClusterService clusterService;
private final IndicesService indicesService;
private final ScriptService scriptService;
private final BigArrays bigArrays;
private final DfsPhase dfsPhase = new DfsPhase();
private final QueryPhase queryPhase;
private final FetchPhase fetchPhase;
private final long defaultKeepAlive;
private volatile TimeValue defaultSearchTimeout;
private volatile boolean lowLevelCancellation;
private final Cancellable keepAliveReaper;
private final AtomicLong idGenerator = new AtomicLong();
private final ConcurrentMapLong<SearchContext> activeContexts = ConcurrentCollections.newConcurrentMapLongWithAggressiveConcurrency();
public SearchService(ClusterService clusterService, IndicesService indicesService,
ThreadPool threadPool, ScriptService scriptService, BigArrays bigArrays, FetchPhase fetchPhase) {
super(clusterService.getSettings());
this.threadPool = threadPool;
this.clusterService = clusterService;
this.indicesService = indicesService;
this.scriptService = scriptService;
this.bigArrays = bigArrays;
this.queryPhase = new QueryPhase(settings);
this.fetchPhase = fetchPhase;
TimeValue keepAliveInterval = KEEPALIVE_INTERVAL_SETTING.get(settings);
this.defaultKeepAlive = DEFAULT_KEEPALIVE_SETTING.get(settings).millis();
this.keepAliveReaper = threadPool.scheduleWithFixedDelay(new Reaper(), keepAliveInterval, Names.SAME);
defaultSearchTimeout = DEFAULT_SEARCH_TIMEOUT_SETTING.get(settings);
clusterService.getClusterSettings().addSettingsUpdateConsumer(DEFAULT_SEARCH_TIMEOUT_SETTING, this::setDefaultSearchTimeout);
lowLevelCancellation = LOW_LEVEL_CANCELLATION_SETTING.get(settings);
clusterService.getClusterSettings().addSettingsUpdateConsumer(LOW_LEVEL_CANCELLATION_SETTING, this::setLowLevelCancellation);
}
private void setDefaultSearchTimeout(TimeValue defaultSearchTimeout) {
this.defaultSearchTimeout = defaultSearchTimeout;
}
private void setLowLevelCancellation(Boolean lowLevelCancellation) {
this.lowLevelCancellation = lowLevelCancellation;
}
@Override
public void afterIndexRemoved(Index index, IndexSettings indexSettings, IndexRemovalReason reason) {
// once an index is removed due to deletion or closing, we can just clean up all the pending search context information
// if we then close all the contexts we can get some search failures along the way which are not expected.
// it's fine to keep the contexts open if the index is still "alive"
// unfortunately we don't have a clear way to signal today why an index is closed.
// to release memory and let references to the filesystem go etc.
if (reason == IndexRemovalReason.DELETED || reason == IndexRemovalReason.CLOSED) {
freeAllContextForIndex(index);
}
}
protected void putContext(SearchContext context) {
final SearchContext previous = activeContexts.put(context.id(), context);
assert previous == null;
}
protected SearchContext removeContext(long id) {
return activeContexts.remove(id);
}
@Override
protected void doStart() {
}
@Override
protected void doStop() {
for (final SearchContext context : activeContexts.values()) {
freeContext(context.id());
}
}
@Override
protected void doClose() {
doStop();
keepAliveReaper.cancel();
}
public DfsSearchResult executeDfsPhase(ShardSearchRequest request, SearchTask task) throws IOException {
final SearchContext context = createAndPutContext(request);
context.incRef();
try {
context.setTask(task);
contextProcessing(context);
dfsPhase.execute(context);
contextProcessedSuccessfully(context);
return context.dfsResult();
} catch (Exception e) {
logger.trace("Dfs phase failed", e);
processFailure(context, e);
throw ExceptionsHelper.convertToRuntime(e);
} finally {
cleanContext(context);
}
}
/**
* Try to load the query results from the cache or execute the query phase directly if the cache cannot be used.
*/
private void loadOrExecuteQueryPhase(final ShardSearchRequest request, final SearchContext context) throws Exception {
final boolean canCache = indicesService.canCache(request, context);
context.getQueryShardContext().freezeContext();
if (canCache) {
indicesService.loadIntoContext(request, context, queryPhase);
} else {
queryPhase.execute(context);
}
}
public SearchPhaseResult executeQueryPhase(ShardSearchRequest request, SearchTask task) throws IOException {
final SearchContext context = createAndPutContext(request);
final SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
context.incRef();
try {
context.setTask(task);
operationListener.onPreQueryPhase(context);
long time = System.nanoTime();
contextProcessing(context);
loadOrExecuteQueryPhase(request, context);
if (context.queryResult().hasHits() == false && context.scrollContext() == null) {
freeContext(context.id());
} else {
contextProcessedSuccessfully(context);
}
final long afterQueryTime = System.nanoTime();
operationListener.onQueryPhase(context, afterQueryTime - time);
if (request.numberOfShards() == 1) {
return executeFetchPhase(context, operationListener, afterQueryTime);
}
return context.queryResult();
} catch (Exception e) {
// execution exception can happen while loading the cache, strip it
if (e instanceof ExecutionException) {
e = (e.getCause() == null || e.getCause() instanceof Exception) ?
(Exception) e.getCause() : new ElasticsearchException(e.getCause());
}
operationListener.onFailedQueryPhase(context);
logger.trace("Query phase failed", e);
processFailure(context, e);
throw ExceptionsHelper.convertToRuntime(e);
} finally {
cleanContext(context);
}
}
private QueryFetchSearchResult executeFetchPhase(SearchContext context, SearchOperationListener operationListener,
long afterQueryTime) {
operationListener.onPreFetchPhase(context);
try {
shortcutDocIdsToLoad(context);
fetchPhase.execute(context);
if (fetchPhaseShouldFreeContext(context)) {
freeContext(context.id());
} else {
contextProcessedSuccessfully(context);
}
} catch (Exception e) {
operationListener.onFailedFetchPhase(context);
throw ExceptionsHelper.convertToRuntime(e);
}
operationListener.onFetchPhase(context, System.nanoTime() - afterQueryTime);
return new QueryFetchSearchResult(context.queryResult(), context.fetchResult());
}
public ScrollQuerySearchResult executeQueryPhase(InternalScrollSearchRequest request, SearchTask task) {
final SearchContext context = findContext(request.id());
SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
context.incRef();
try {
context.setTask(task);
operationListener.onPreQueryPhase(context);
long time = System.nanoTime();
contextProcessing(context);
processScroll(request, context);
queryPhase.execute(context);
contextProcessedSuccessfully(context);
operationListener.onQueryPhase(context, System.nanoTime() - time);
return new ScrollQuerySearchResult(context.queryResult(), context.shardTarget());
} catch (Exception e) {
operationListener.onFailedQueryPhase(context);
logger.trace("Query phase failed", e);
processFailure(context, e);
throw ExceptionsHelper.convertToRuntime(e);
} finally {
cleanContext(context);
}
}
public QuerySearchResult executeQueryPhase(QuerySearchRequest request, SearchTask task) {
final SearchContext context = findContext(request.id());
context.setTask(task);
IndexShard indexShard = context.indexShard();
SearchOperationListener operationListener = indexShard.getSearchOperationListener();
context.incRef();
try {
contextProcessing(context);
context.searcher().setAggregatedDfs(request.dfs());
operationListener.onPreQueryPhase(context);
long time = System.nanoTime();
queryPhase.execute(context);
if (context.queryResult().hasHits() == false && context.scrollContext() == null) {
// no hits, we can release the context since there will be no fetch phase
freeContext(context.id());
} else {
contextProcessedSuccessfully(context);
}
operationListener.onQueryPhase(context, System.nanoTime() - time);
return context.queryResult();
} catch (Exception e) {
operationListener.onFailedQueryPhase(context);
logger.trace("Query phase failed", e);
processFailure(context, e);
throw ExceptionsHelper.convertToRuntime(e);
} finally {
cleanContext(context);
}
}
private boolean fetchPhaseShouldFreeContext(SearchContext context) {
if (context.scrollContext() == null) {
// simple search, no scroll
return true;
} else {
// scroll request, but the scroll was not extended
return context.scrollContext().scroll == null;
}
}
public ScrollQueryFetchSearchResult executeFetchPhase(InternalScrollSearchRequest request, SearchTask task) {
final SearchContext context = findContext(request.id());
context.incRef();
try {
context.setTask(task);
contextProcessing(context);
SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
processScroll(request, context);
operationListener.onPreQueryPhase(context);
final long time = System.nanoTime();
try {
queryPhase.execute(context);
} catch (Exception e) {
operationListener.onFailedQueryPhase(context);
throw ExceptionsHelper.convertToRuntime(e);
}
long afterQueryTime = System.nanoTime();
operationListener.onQueryPhase(context, afterQueryTime - time);
QueryFetchSearchResult fetchSearchResult = executeFetchPhase(context, operationListener, afterQueryTime);
return new ScrollQueryFetchSearchResult(fetchSearchResult,
context.shardTarget());
} catch (Exception e) {
logger.trace("Fetch phase failed", e);
processFailure(context, e);
throw ExceptionsHelper.convertToRuntime(e);
} finally {
cleanContext(context);
}
}
public FetchSearchResult executeFetchPhase(ShardFetchRequest request, SearchTask task) {
final SearchContext context = findContext(request.id());
final SearchOperationListener operationListener = context.indexShard().getSearchOperationListener();
context.incRef();
try {
context.setTask(task);
contextProcessing(context);
if (request.lastEmittedDoc() != null) {
context.scrollContext().lastEmittedDoc = request.lastEmittedDoc();
}
context.docIdsToLoad(request.docIds(), 0, request.docIdsSize());
operationListener.onPreFetchPhase(context);
long time = System.nanoTime();
fetchPhase.execute(context);
if (fetchPhaseShouldFreeContext(context)) {
freeContext(request.id());
} else {
contextProcessedSuccessfully(context);
}
operationListener.onFetchPhase(context, System.nanoTime() - time);
return context.fetchResult();
} catch (Exception e) {
operationListener.onFailedFetchPhase(context);
logger.trace("Fetch phase failed", e);
processFailure(context, e);
throw ExceptionsHelper.convertToRuntime(e);
} finally {
cleanContext(context);
}
}
private SearchContext findContext(long id) throws SearchContextMissingException {
SearchContext context = activeContexts.get(id);
if (context == null) {
throw new SearchContextMissingException(id);
}
return context;
}
final SearchContext createAndPutContext(ShardSearchRequest request) throws IOException {
SearchContext context = createContext(request, null);
boolean success = false;
try {
putContext(context);
if (request.scroll() != null) {
context.indexShard().getSearchOperationListener().onNewScrollContext(context);
}
context.indexShard().getSearchOperationListener().onNewContext(context);
success = true;
return context;
} finally {
if (!success) {
freeContext(context.id());
}
}
}
final SearchContext createContext(ShardSearchRequest request, @Nullable Engine.Searcher searcher) throws IOException {
final DefaultSearchContext context = createSearchContext(request, defaultSearchTimeout, searcher);
try {
if (request.scroll() != null) {
context.scrollContext(new ScrollContext());
context.scrollContext().scroll = request.scroll();
}
parseSource(context, request.source());
// if the from and size are still not set, default them
if (context.from() == -1) {
context.from(0);
}
if (context.size() == -1) {
context.size(10);
}
// pre process
dfsPhase.preProcess(context);
queryPhase.preProcess(context);
fetchPhase.preProcess(context);
// compute the context keep alive
long keepAlive = defaultKeepAlive;
if (request.scroll() != null && request.scroll().keepAlive() != null) {
keepAlive = request.scroll().keepAlive().millis();
}
context.keepAlive(keepAlive);
context.lowLevelCancellation(lowLevelCancellation);
} catch (Exception e) {
context.close();
throw ExceptionsHelper.convertToRuntime(e);
}
return context;
}
public DefaultSearchContext createSearchContext(ShardSearchRequest request, TimeValue timeout, @Nullable Engine.Searcher searcher)
throws IOException {
IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex());
IndexShard indexShard = indexService.getShard(request.shardId().getId());
SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().getId(), indexShard.shardId());
Engine.Searcher engineSearcher = searcher == null ? indexShard.acquireSearcher("search") : searcher;
final DefaultSearchContext searchContext = new DefaultSearchContext(idGenerator.incrementAndGet(), request, shardTarget,
engineSearcher, indexService, indexShard, bigArrays, threadPool.estimatedTimeInMillisCounter(), timeout, fetchPhase);
boolean success = false;
try {
// we clone the query shard context here just for rewriting otherwise we
// might end up with incorrect state since we are using now() or script services
// during rewrite and normalized / evaluate templates etc.
request.rewrite(new QueryShardContext(searchContext.getQueryShardContext()));
assert searchContext.getQueryShardContext().isCachable();
success = true;
} finally {
if (success == false) {
IOUtils.closeWhileHandlingException(searchContext);
}
}
return searchContext;
}
private void freeAllContextForIndex(Index index) {
assert index != null;
for (SearchContext ctx : activeContexts.values()) {
if (index.equals(ctx.indexShard().shardId().getIndex())) {
freeContext(ctx.id());
}
}
}
public boolean freeContext(long id) {
final SearchContext context = removeContext(id);
if (context != null) {
assert context.refCount() > 0 : " refCount must be > 0: " + context.refCount();
try {
context.indexShard().getSearchOperationListener().onFreeContext(context);
if (context.scrollContext() != null) {
context.indexShard().getSearchOperationListener().onFreeScrollContext(context);
}
} finally {
context.close();
}
return true;
}
return false;
}
public void freeAllScrollContexts() {
for (SearchContext searchContext : activeContexts.values()) {
if (searchContext.scrollContext() != null) {
freeContext(searchContext.id());
}
}
}
private void contextProcessing(SearchContext context) {
// disable timeout while executing a search
context.accessed(-1);
}
private void contextProcessedSuccessfully(SearchContext context) {
context.accessed(threadPool.relativeTimeInMillis());
}
private void cleanContext(SearchContext context) {
try {
context.clearReleasables(Lifetime.PHASE);
context.setTask(null);
} finally {
context.decRef();
}
}
private void processFailure(SearchContext context, Exception e) {
freeContext(context.id());
try {
if (Lucene.isCorruptionException(e)) {
context.indexShard().failShard("search execution corruption failure", e);
}
} catch (Exception inner) {
inner.addSuppressed(e);
logger.warn("failed to process shard failure to (potentially) send back shard failure on corruption", inner);
}
}
private void parseSource(DefaultSearchContext context, SearchSourceBuilder source) throws SearchContextException {
// nothing to parse...
if (source == null) {
return;
}
QueryShardContext queryShardContext = context.getQueryShardContext();
context.from(source.from());
context.size(source.size());
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
if (source.query() != null) {
InnerHitBuilder.extractInnerHits(source.query(), innerHitBuilders);
context.parsedQuery(queryShardContext.toQuery(source.query()));
}
if (source.postFilter() != null) {
InnerHitBuilder.extractInnerHits(source.postFilter(), innerHitBuilders);
context.parsedPostFilter(queryShardContext.toQuery(source.postFilter()));
}
if (innerHitBuilders.size() > 0) {
for (Map.Entry<String, InnerHitBuilder> entry : innerHitBuilders.entrySet()) {
try {
entry.getValue().build(context, context.innerHits());
} catch (IOException e) {
throw new SearchContextException(context, "failed to build inner_hits", e);
}
}
}
if (source.sorts() != null) {
try {
Optional<SortAndFormats> optionalSort = SortBuilder.buildSort(source.sorts(), context.getQueryShardContext());
if (optionalSort.isPresent()) {
context.sort(optionalSort.get());
}
} catch (IOException e) {
throw new SearchContextException(context, "failed to create sort elements", e);
}
}
context.trackScores(source.trackScores());
if (source.minScore() != null) {
context.minimumScore(source.minScore());
}
if (source.profile()) {
context.setProfilers(new Profilers(context.searcher()));
}
if (source.timeout() != null) {
context.timeout(source.timeout());
}
context.terminateAfter(source.terminateAfter());
if (source.aggregations() != null) {
try {
AggregatorFactories factories = source.aggregations().build(context, null);
factories.validate();
context.aggregations(new SearchContextAggregations(factories));
} catch (IOException e) {
throw new AggregationInitializationException("Failed to create aggregators", e);
}
}
if (source.suggest() != null) {
try {
context.suggest(source.suggest().build(queryShardContext));
} catch (IOException e) {
throw new SearchContextException(context, "failed to create SuggestionSearchContext", e);
}
}
if (source.rescores() != null) {
try {
for (RescoreBuilder<?> rescore : source.rescores()) {
context.addRescore(rescore.build(queryShardContext));
}
} catch (IOException e) {
throw new SearchContextException(context, "failed to create RescoreSearchContext", e);
}
}
if (source.explain() != null) {
context.explain(source.explain());
}
if (source.fetchSource() != null) {
context.fetchSourceContext(source.fetchSource());
}
if (source.docValueFields() != null) {
context.docValueFieldsContext(new DocValueFieldsContext(source.docValueFields()));
}
if (source.highlighter() != null) {
HighlightBuilder highlightBuilder = source.highlighter();
try {
context.highlight(highlightBuilder.build(queryShardContext));
} catch (IOException e) {
throw new SearchContextException(context, "failed to create SearchContextHighlighter", e);
}
}
if (source.scriptFields() != null) {
for (org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField field : source.scriptFields()) {
SearchScript searchScript = scriptService.search(context.lookup(), field.script(), ScriptContext.Standard.SEARCH);
context.scriptFields().add(new ScriptField(field.fieldName(), searchScript, field.ignoreFailure()));
}
}
if (source.ext() != null) {
for (SearchExtBuilder searchExtBuilder : source.ext()) {
context.addSearchExt(searchExtBuilder);
}
}
if (source.version() != null) {
context.version(source.version());
}
if (source.stats() != null) {
context.groupStats(source.stats());
}
if (source.searchAfter() != null && source.searchAfter().length > 0) {
if (context.scrollContext() != null) {
throw new SearchContextException(context, "`search_after` cannot be used in a scroll context.");
}
if (context.from() > 0) {
throw new SearchContextException(context, "`from` parameter must be set to 0 when `search_after` is used.");
}
FieldDoc fieldDoc = SearchAfterBuilder.buildFieldDoc(context.sort(), source.searchAfter());
context.searchAfter(fieldDoc);
}
if (source.slice() != null) {
if (context.scrollContext() == null) {
throw new SearchContextException(context, "`slice` cannot be used outside of a scroll context");
}
context.sliceBuilder(source.slice());
}
if (source.storedFields() != null) {
if (source.storedFields().fetchFields() == false) {
if (context.version()) {
throw new SearchContextException(context, "`stored_fields` cannot be disabled if version is requested");
}
if (context.sourceRequested()) {
throw new SearchContextException(context, "`stored_fields` cannot be disabled if _source is requested");
}
}
context.storedFieldsContext(source.storedFields());
}
if (source.collapse() != null) {
final CollapseContext collapseContext = source.collapse().build(context);
context.collapse(collapseContext);
}
}
/**
* Shortcut ids to load, we load only "from" and up to "size". The phase controller
* handles this as well since the result is always size * shards for Q_A_F
*/
private void shortcutDocIdsToLoad(SearchContext context) {
final int[] docIdsToLoad;
int docsOffset = 0;
final Suggest suggest = context.queryResult().suggest();
int numSuggestDocs = 0;
final List<CompletionSuggestion> completionSuggestions;
if (suggest != null && suggest.hasScoreDocs()) {
completionSuggestions = suggest.filter(CompletionSuggestion.class);
for (CompletionSuggestion completionSuggestion : completionSuggestions) {
numSuggestDocs += completionSuggestion.getOptions().size();
}
} else {
completionSuggestions = Collections.emptyList();
}
if (context.request().scroll() != null) {
TopDocs topDocs = context.queryResult().topDocs();
docIdsToLoad = new int[topDocs.scoreDocs.length + numSuggestDocs];
for (int i = 0; i < topDocs.scoreDocs.length; i++) {
docIdsToLoad[docsOffset++] = topDocs.scoreDocs[i].doc;
}
} else {
TopDocs topDocs = context.queryResult().topDocs();
if (topDocs.scoreDocs.length < context.from()) {
// no more docs...
docIdsToLoad = new int[numSuggestDocs];
} else {
int totalSize = context.from() + context.size();
docIdsToLoad = new int[Math.min(topDocs.scoreDocs.length - context.from(), context.size()) +
numSuggestDocs];
for (int i = context.from(); i < Math.min(totalSize, topDocs.scoreDocs.length); i++) {
docIdsToLoad[docsOffset++] = topDocs.scoreDocs[i].doc;
}
}
}
for (CompletionSuggestion completionSuggestion : completionSuggestions) {
for (CompletionSuggestion.Entry.Option option : completionSuggestion.getOptions()) {
docIdsToLoad[docsOffset++] = option.getDoc().doc;
}
}
context.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length);
}
private void processScroll(InternalScrollSearchRequest request, SearchContext context) {
// process scroll
context.from(context.from() + context.size());
context.scrollContext().scroll = request.scroll();
// update the context keep alive based on the new scroll value
if (request.scroll() != null && request.scroll().keepAlive() != null) {
context.keepAlive(request.scroll().keepAlive().millis());
}
}
/**
* Returns the number of active contexts in this
* SearchService
*/
public int getActiveContexts() {
return this.activeContexts.size();
}
class Reaper implements Runnable {
@Override
public void run() {
final long time = threadPool.relativeTimeInMillis();
for (SearchContext context : activeContexts.values()) {
// Use the same value for both checks since lastAccessTime can
// be modified by another thread between checks!
final long lastAccessTime = context.lastAccessTime();
if (lastAccessTime == -1L) { // its being processed or timeout is disabled
continue;
}
if ((time - lastAccessTime > context.keepAlive())) {
logger.debug("freeing search context [{}], time [{}], lastAccessTime [{}], keepAlive [{}]", context.id(), time,
lastAccessTime, context.keepAlive());
freeContext(context.id());
}
}
}
}
public AliasFilter buildAliasFilter(ClusterState state, String index, String... expressions) {
return indicesService.buildAliasFilter(state, index, expressions);
}
}
| apache-2.0 |
rometools/rome | rome-core/src/main/java/com/rometools/rome/feed/synd/SyndContentImpl.java | 4921 | /*
* Copyright 2004 Sun Microsystems, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.rometools.rome.feed.synd;
import java.io.Serializable;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import com.rometools.rome.feed.CopyFrom;
import com.rometools.rome.feed.impl.CloneableBean;
import com.rometools.rome.feed.impl.CopyFromHelper;
import com.rometools.rome.feed.impl.EqualsBean;
import com.rometools.rome.feed.impl.ToStringBean;
/**
* Bean for content of SyndFeedImpl entries.
*/
public class SyndContentImpl implements Serializable, SyndContent {
private static final long serialVersionUID = 1L;
private static final CopyFromHelper COPY_FROM_HELPER;
private String type;
private String value;
private String mode;
static {
final Map<String, Class<?>> basePropInterfaceMap = new HashMap<String, Class<?>>();
basePropInterfaceMap.put("type", String.class);
basePropInterfaceMap.put("value", String.class);
final Map<Class<? extends CopyFrom>, Class<?>> basePropClassImplMap = Collections.<Class<? extends CopyFrom>, Class<?>> emptyMap();
COPY_FROM_HELPER = new CopyFromHelper(SyndContent.class, basePropInterfaceMap, basePropClassImplMap);
}
public SyndContentImpl() { }
/**
* Creates a deep 'bean' clone of the object.
* <p>
*
* @return a clone of the object.
* @throws CloneNotSupportedException thrown if an element of the object cannot be cloned.
*
*/
@Override
public Object clone() throws CloneNotSupportedException {
return CloneableBean.beanClone(this, Collections.<String>emptySet());
}
/**
* Indicates whether some other object is "equal to" this one as defined by the Object equals()
* method.
* <p>
*
* @param other he reference object with which to compare.
* @return <b>true</b> if 'this' object is equal to the 'other' object.
*
*/
@Override
public boolean equals(final Object other) {
return EqualsBean.beanEquals(SyndContent.class, this, other);
}
/**
* Returns a hashcode value for the object.
* <p>
* It follows the contract defined by the Object hashCode() method.
* <p>
*
* @return the hashcode of the bean object.
*
*/
@Override
public int hashCode() {
return EqualsBean.beanHashCode(this);
}
/**
* Returns the String representation for the object.
* <p>
*
* @return String representation for the object.
*
*/
@Override
public String toString() {
return ToStringBean.toString(SyndContent.class, this);
}
/**
* Returns the content type.
* <p>
* When used for the description of an entry, if <b>null</b> 'text/plain' must be assumed.
* <p>
*
* @return the content type, <b>null</b> if none.
*
*/
@Override
public String getType() {
return type;
}
/**
* Sets the content type.
* <p>
* When used for the description of an entry, if <b>null</b> 'text/plain' must be assumed.
* <p>
*
* @param type the content type to set, <b>null</b> if none.
*
*/
@Override
public void setType(final String type) {
this.type = type;
}
/**
* Returns the content mode.
*
* @return the content mode, <b>null</b> if none.
*
*/
@Override
public String getMode() {
return mode;
}
/**
* Sets the content mode.
*
* @param mode the content mode to set, <b>null</b> if none.
*
*/
@Override
public void setMode(final String mode) {
this.mode = mode;
}
/**
* Returns the content value.
* <p>
*
* @return the content value, <b>null</b> if none.
*
*/
@Override
public String getValue() {
return value;
}
/**
* Sets the content value.
* <p>
*
* @param value the content value to set, <b>null</b> if none.
*
*/
@Override
public void setValue(final String value) {
this.value = value;
}
@Override
public Class<SyndContent> getInterface() {
return SyndContent.class;
}
@Override
public void copyFrom(final CopyFrom obj) {
COPY_FROM_HELPER.copy(this, obj);
}
}
| apache-2.0 |
andredalton/bcc | 2013/mac0438/Beto/ep3/src/Pessoa.java | 3545 | import java.util.Random;
public class Pessoa extends Thread {
private int id;
private int andarOrigem;
private int andarDestino;
private int modulo; // Módulo inteiro para a geração do tempo de espera para nascer
private int espera; // Tempo de espera (em rodadas) para uma thread "nascer"
private Random generator; // Gerador de número aleatórias
private final Monitor monitorLocal; // Referência para o Monitor
private boolean viva;
private boolean dentroElevador;
// Inicialização
public Pessoa(int id, Monitor monitor) {
this.id = id;
this.viva = false;
this.andarOrigem = 0;
this.andarDestino = 0;
this.dentroElevador = false;
this.modulo = 4 * monitor.getNAndares();
monitorLocal = monitor;
this.generator = new Random();
this.espera = 1 * this.generator.nextInt(this.modulo) + 1;
start();
}
public boolean getViva() {
return this.viva;
}
public boolean getDentroElevador() {
return this.dentroElevador;
}
public int getAndarDestino() {
return this.andarDestino;
}
public int getAndarOrigem() {
return this.andarOrigem;
}
public void run() {
while (true) {
// Fica esperando para nascer
while (this.espera > 0) {
while (monitorLocal.getBarreira() != this.id || monitorLocal.terminouThreads() == true) {
Main.busy();
}
try {
monitorLocal.s.acquire();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
this.espera--;
monitorLocal.incBarreira();
monitorLocal.s.release();
}
// Nasce
while (monitorLocal.getBarreira() != this.id || monitorLocal.terminouThreads() == true) {
Main.busy();
}
try {
monitorLocal.s.acquire();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
this.viva = true;
this.andarOrigem = this.generator.nextInt(monitorLocal.getNAndares()) + 1;
this.andarDestino = this.generator.nextInt(monitorLocal.getNAndares()) + 1;
while (this.andarDestino == this.andarOrigem) { // Dessa forma, origem != destino
this.andarDestino = this.generator.nextInt(monitorLocal.getNAndares()) + 1;
}
monitorLocal.incBarreira();
monitorLocal.s.release();
// Fica esperando para entrar no elevador
while(true) {
while (monitorLocal.getBarreira() != this.id || monitorLocal.terminouThreads() == true) {
Main.busy();
}
try {
monitorLocal.s.acquire();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if (monitorLocal.tentaEntrarPessoa(this.andarOrigem) ) {
break;
}
monitorLocal.incBarreira();
monitorLocal.s.release();
}
this.dentroElevador = true;
monitorLocal.entraPessoa();
monitorLocal.incBarreira();
monitorLocal.s.release();
// Espera para sair do elevador
while(true) {
while (monitorLocal.getBarreira() != this.id || monitorLocal.terminouThreads() == true) {
Main.busy();
}
try {
monitorLocal.s.acquire();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if (monitorLocal.tentaSairPessoa(this.andarDestino) ) {
break;
}
monitorLocal.incBarreira();
monitorLocal.s.release();
}
this.dentroElevador = false;
monitorLocal.saiPessoa();
this.viva = false;
this.espera = 1 * this.generator.nextInt(this.modulo) + 1;
monitorLocal.incBarreira();
monitorLocal.s.release();
}
}
}
| apache-2.0 |
cesarviana/grafos | src/astar/MapaImpl.java | 2069 | package astar;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
public class MapaImpl implements Mapa {
private Posicao inicio;
private Posicao fim;
private int linhas;
private int colunas;
private Collection<Posicao> muro;
public MapaImpl(Posicao inicio, Posicao fim, int linhas, int colunas,
Collection<Posicao> muro) {
super();
this.inicio = inicio;
this.fim = fim;
this.linhas = linhas;
this.colunas = colunas;
this.muro = muro;
}
public int getLinhas() {
return linhas;
}
public int getColunas() {
return colunas;
}
@Override
public Posicao getInicio() {
return inicio;
}
@Override
public Posicao getFim() {
return fim;
}
@Override
public Collection<Posicao> getVizinhosAcessiveisDe(Posicao posicao) {
Collection<Posicao> vizinhos = criaListaDeVizinhos(posicao);
marcaMuros(vizinhos);
removeInacessiveis(vizinhos);
return vizinhos;
}
private ArrayList<Posicao> criaListaDeVizinhos(Posicao posicao) {
return new ArrayList<>(Arrays.asList(posicao.cima(),
posicao.cimaDireita(), posicao.direita(),
posicao.baixoDireita(), posicao.baixo(),
posicao.baixoEsquerda(), posicao.esquerda(),
posicao.cimaEsquerda()));
}
private void marcaMuros(Collection<Posicao> vizinhos) {
vizinhos.forEach(vizinho -> {
boolean visinhoIsMuro = muro.stream().anyMatch(
posicaoMuro -> posicaoMuro.coluna == vizinho.coluna
&& posicaoMuro.linha == vizinho.linha);
if (visinhoIsMuro) {
vizinho.tipo = Posicao.Tipo.MURO;
}
});
}
private void removeInacessiveis(Collection<Posicao> vizinhos) {
vizinhos.removeIf(vizinho -> vizinho.coluna == 0 || vizinho.linha == 0
|| vizinho.coluna > colunas || vizinho.linha > linhas
|| !vizinho.isAcessivel());
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(linhas).append(" linhas").append(",")
.append(colunas).append(" colunas");
return sb.toString();
}
}
| apache-2.0 |
weiwenqiang/GitHub | expert/fastjson/src/test/java/com/alibaba/json/bvt/bug/Bug_10.java | 698 | package com.alibaba.json.bvt.bug;
import junit.framework.TestCase;
import com.alibaba.fastjson.JSON;
public class Bug_10 extends TestCase {
public void test_0() throws Exception {
String text = "{'jdbcUrl':\"jdbc:wrap-jdbc:filters=default:name=com.alibaba.dragoon.monitor:jdbc:mysql:\\/\\/10.20.129.167\\/dragoon_v25monitordb?useUnicode=true&characterEncoding=UTF-8\"}";
JSON.parse(text);
}
public void test_1() throws Exception {
String text = "{'jdbcUrl':'jdbc:wrap-jdbc:filters=default:name=com.alibaba.dragoon.monitor:jdbc:mysql:\\/\\/10.20.129.167\\/dragoon_v25monitordb?useUnicode=true&characterEncoding=UTF-8'}";
JSON.parse(text);
}
}
| apache-2.0 |
fishercoder1534/Leetcode | src/main/java/com/fishercoder/solutions/_1065.java | 1691 | package com.fishercoder.solutions;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public class _1065 {
public static class Solution1 {
public int[][] indexPairs(String text, String[] words) {
List<List<Integer>> lists = new ArrayList<>();
for (String word : words) {
lists.addAll(findAllMatchsForThisWord(word, text));
}
if (lists.isEmpty()) {
return new int[][]{};
}
Collections.sort(lists, (o1, o2) -> {
if (o1.get(0) > o2.get(0)) {
return 1;
} else if (o1.get(0) < o2.get(0)) {
return -1;
} else {
if (o1.get(1) > o2.get(1)) {
return 1;
} else {
return -1;
}
}
});
int[][] result = new int[lists.size()][lists.get(0).size()];
for (int i = 0; i < lists.size(); i++) {
result[i][0] = lists.get(i).get(0);
result[i][1] = lists.get(i).get(1);
}
return result;
}
private List<List<Integer>> findAllMatchsForThisWord(String word, String text) {
List<List<Integer>> lists = new ArrayList<>();
for (int i = 0; i <= text.length() - word.length(); i++) {
if (text.substring(i, i + word.length()).equals(word)) {
lists.add(Arrays.asList(i, i + word.length() - 1));
}
}
return lists;
}
}
}
| apache-2.0 |
reynoldsm88/drools | drools-test-coverage/test-suite/src/test/java/org/drools/testcoverage/functional/oopath/OOPathSmokeTest.java | 3719 | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.testcoverage.functional.oopath;
import java.util.Collection;
import org.assertj.core.api.Assertions;
import org.drools.testcoverage.common.model.Address;
import org.drools.testcoverage.common.model.Person;
import org.drools.testcoverage.common.util.KieBaseTestConfiguration;
import org.drools.testcoverage.common.util.KieBaseUtil;
import org.drools.testcoverage.common.util.TestParametersUtil;
import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.kie.api.KieBase;
import org.kie.api.KieServices;
import org.kie.api.builder.ReleaseId;
import org.kie.api.io.Resource;
import org.kie.api.runtime.KieContainer;
import org.kie.api.runtime.KieSession;
/**
* Tests basic usage of OOPath expressions.
*/
@RunWith(Parameterized.class)
public class OOPathSmokeTest {
private static final KieServices KIE_SERVICES = KieServices.Factory.get();
private static final ReleaseId RELEASE_ID = KIE_SERVICES.newReleaseId("org.drools.testcoverage.oopath", "marshalling-test", "1.0");
private KieSession kieSession;
private final KieBaseTestConfiguration kieBaseTestConfiguration;
public OOPathSmokeTest(final KieBaseTestConfiguration kieBaseTestConfiguration) {
this.kieBaseTestConfiguration = kieBaseTestConfiguration;
}
@Parameterized.Parameters(name = "KieBase type={0}")
public static Collection<Object[]> getParameters() {
return TestParametersUtil.getKieBaseConfigurations();
}
@After
public void disposeKieSession() {
if (this.kieSession != null) {
this.kieSession.dispose();
this.kieSession = null;
}
}
@Test
public void testBuildKieBase() {
final KieBase kieBase = KieBaseUtil.getKieBaseFromClasspathResources(this.getClass(), kieBaseTestConfiguration,
"oopath.drl");
Assertions.assertThat(kieBase).isNotNull();
}
@Test
public void testBuildTwoKieBases() {
final Resource drlResource = KIE_SERVICES.getResources().newUrlResource(this.getClass().getResource("oopath.drl"));
KieBaseUtil.getKieModuleAndBuildInstallModule(RELEASE_ID, KieBaseTestConfiguration.CLOUD_IDENTITY, drlResource);
// creating two KieContainers and KieBases may trigger deep cloning
for (int i = 0; i < 2; i++) {
final KieContainer kieContainer = KIE_SERVICES.newKieContainer(RELEASE_ID);
final KieBase kieBase = kieContainer.getKieBase();
Assertions.assertThat(kieBase).isNotNull();
}
}
@Test
public void testFireRule() {
final KieBase kieBase = KieBaseUtil.getKieBaseFromClasspathResources(this.getClass(), kieBaseTestConfiguration,
"oopath.drl");
this.kieSession = kieBase.newKieSession();
final Person person = new Person("Bruno", 21);
person.setAddress(new Address("Some Street", 10, "Beautiful City"));
this.kieSession.insert(person);
Assertions.assertThat(this.kieSession.fireAllRules()).isEqualTo(1);
}
}
| apache-2.0 |
valikir/vturbin | chapter_001/src/main/java/ru/job4j/array/ContainsSub.java | 737 | package ru.job4j.array;
/**
* Find Substring in a string.
*
* @author Valentin Turbin
* @version 1
* @since 24.05.2017
*/
public class ContainsSub {
/**
* @param j integer
*/
private static int j = 0;
/**
* @param origin String
* @param sub SubString
* @return true
*/
boolean contains(String origin, String sub) {
char[] chOrigin = origin.toCharArray();
char[] chSub = sub.toCharArray();
for (int i = 0; i < chOrigin.length; i++) {
if (chOrigin[i] == chSub[j] && j != chSub.length - 1) {
j++;
System.out.println(j);
continue;
} else if (i == chOrigin.length - 1 && j == 0) {
return false;
} else if (j == chSub.length - 1) {
return true;
} else {
j = 0;
}
}
return false;
}
} | apache-2.0 |
alvinkwekel/camel | components/camel-jslt/src/generated/java/org/apache/camel/component/jslt/JsltComponentConfigurer.java | 3046 | /* Generated by camel build tools - do NOT edit this file! */
package org.apache.camel.component.jslt;
import java.util.Map;
import org.apache.camel.CamelContext;
import org.apache.camel.spi.GeneratedPropertyConfigurer;
import org.apache.camel.spi.PropertyConfigurerGetter;
import org.apache.camel.util.CaseInsensitiveMap;
import org.apache.camel.support.component.PropertyConfigurerSupport;
/**
* Generated by camel build tools - do NOT edit this file!
*/
@SuppressWarnings("unchecked")
public class JsltComponentConfigurer extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("allowTemplateFromHeader", boolean.class);
map.put("lazyStartProducer", boolean.class);
map.put("basicPropertyBinding", boolean.class);
map.put("functions", java.util.Collection.class);
map.put("objectFilter", com.schibsted.spt.data.jslt.filters.JsonFilter.class);
ALL_OPTIONS = map;
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
JsltComponent target = (JsltComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "allowtemplatefromheader":
case "allowTemplateFromHeader": target.setAllowTemplateFromHeader(property(camelContext, boolean.class, value)); return true;
case "basicpropertybinding":
case "basicPropertyBinding": target.setBasicPropertyBinding(property(camelContext, boolean.class, value)); return true;
case "functions": target.setFunctions(property(camelContext, java.util.Collection.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "objectfilter":
case "objectFilter": target.setObjectFilter(property(camelContext, com.schibsted.spt.data.jslt.filters.JsonFilter.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
JsltComponent target = (JsltComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "allowtemplatefromheader":
case "allowTemplateFromHeader": return target.isAllowTemplateFromHeader();
case "basicpropertybinding":
case "basicPropertyBinding": return target.isBasicPropertyBinding();
case "functions": return target.getFunctions();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "objectfilter":
case "objectFilter": return target.getObjectFilter();
default: return null;
}
}
}
| apache-2.0 |
Bella-Assistant/Bella-Android | app/src/main/java/com/example/android/Bella/AppCont.java | 2357 | package com.example.android.Bella;
import android.app.Application;
import android.text.TextUtils;
import com.android.volley.Request;
import com.android.volley.RequestQueue;
import com.android.volley.toolbox.Volley;
public class AppCont extends Application {
/*public static final String TAG = AppCont.class.getSimpleName();
private RequestQueue mRequestQueue;
private static AppCont mInstance;
@Override
public void onCreate() {
super.onCreate();
mInstance = this;
}
public static synchronized AppCont getInstance() {
return mInstance;
}
public RequestQueue getRequestQueue() {
if (mRequestQueue == null) {
mRequestQueue = Volley.newRequestQueue(getApplicationContext());
}
return mRequestQueue;
}
public <T> void addToRequestQueue(Request<T> req, String tag) {
req.setTag(TextUtils.isEmpty(tag) ? TAG : tag);
getRequestQueue().add(req);
}
public <T> void addToRequestQueue(Request<T> req) {
req.setTag(TAG);
getRequestQueue().add(req);
}
public void cancelPendingRequests(Object tag) {
if (mRequestQueue != null) {
mRequestQueue.cancelAll(tag);
}
}*/
public static final String TAG = AppCont.class.getSimpleName();
private RequestQueue mRequestQueue;
private static AppCont mInstance;
@Override
public void onCreate() {
super.onCreate();
mInstance = this;
}
public static synchronized AppCont getInstance() {
return mInstance;
}
public RequestQueue getRequestQueue() {
if (mRequestQueue == null) {
mRequestQueue = Volley.newRequestQueue(getApplicationContext());
}
return mRequestQueue;
}
public <T> void addToRequestQueue(Request<T> req, String tag) {
req.setTag(TextUtils.isEmpty(tag) ? TAG : tag);
getRequestQueue().add(req);
}
public <T> void addToRequestQueue(Request<T> req) {
req.setTag(TAG);
getRequestQueue().add(req);
}
public void cancelPendingRequests(Object tag) {
if (mRequestQueue != null) {
mRequestQueue.cancelAll(tag);
}
}
}
| apache-2.0 |
asakusafw/asakusafw-mapreduce | integration/src/integration-test/java/com/asakusafw/integration/mapreduce/Util.java | 2412 | /**
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.integration.mapreduce;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.MessageFormat;
import java.util.Optional;
import java.util.stream.Stream;
final class Util {
private static final Path PATH_OWNED = Paths.get("src/integration-test/data");
private static final Path PATH_INHERITED = Paths.get("build/integration-test/data");
private Util() {
return;
}
static Path data(String path) {
// owned project templates
Path owned = PATH_OWNED.resolve(path);
if (Files.exists(owned)) {
return owned;
}
// "./gradlew prepareIntegrationTest" copies project templates
Path inherited = PATH_INHERITED.resolve(path);
if (Files.exists(inherited)) {
return inherited;
}
throw new IllegalStateException(MessageFormat.format(
"missing template data \"{0}\"",
path));
}
static Stream<String> lines(Path file) {
if (Files.isRegularFile(file) == false) {
return Stream.empty();
}
if (Optional.ofNullable(file.getFileName())
.map(Path::toString)
.map(it -> it.startsWith(".") && it.endsWith(".crc"))
.orElse(false)) {
return Stream.empty();
}
try {
return Files.readAllLines(file, StandardCharsets.UTF_8).stream()
.filter(it -> it.isEmpty() == false);
} catch (IOException e) {
throw new IllegalStateException(MessageFormat.format(
"error occurred while reading: {0}",
file), e);
}
}
}
| apache-2.0 |
chao-sun-kaazing/gateway | transport/wsn/src/test/java/org/kaazing/gateway/transport/wsn/handshake/WsxExtensionsNegotiationIT.java | 2999 | /**
* Copyright 2007-2015, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaazing.gateway.transport.wsn.handshake;
import static org.kaazing.test.util.ITUtil.createRuleChain;
import java.net.URI;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestRule;
import org.kaazing.gateway.server.test.GatewayRule;
import org.kaazing.gateway.server.test.config.GatewayConfiguration;
import org.kaazing.gateway.server.test.config.builder.GatewayConfigurationBuilder;
import org.kaazing.k3po.junit.annotation.Specification;
import org.kaazing.k3po.junit.rules.K3poRule;
public class WsxExtensionsNegotiationIT {
private final K3poRule robot = new K3poRule();
private GatewayRule gateway = new GatewayRule() {
{
// @formatter:off
GatewayConfiguration configuration =
new GatewayConfigurationBuilder()
.service()
.accept(URI.create("ws://localhost:8001/echo8001"))
.type("echo")
.crossOrigin()
.allowOrigin("http://localhost:8000")
.done()
.realmName("auth-required")
.done()
.security()
.realm()
.name("auth-required")
.description("Kaazing WebSocket Gateway Demo")
.httpChallengeScheme("Application Token")
.authorizationMode("challenge")
.loginModule()
.type("class:org.kaazing.gateway.security.auth.YesLoginModule")
.success("requisite")
.option("roles", "AUTHORIZED, ADMINISTRATOR")
.done()
.done()
.done()
.done();
// @formatter:on
init(configuration);
}
};
@Rule
public TestRule chain = createRuleChain(gateway, robot);
@Specification("should.negotiate.extensions.in.wrapped.request")
@Test
public void shouldNegotiateExtensionsInWrappedRequest() throws Exception {
robot.finish();
}
}
| apache-2.0 |
println/prownloader | src/main/java/proto/cederj/prownloader/persistence/jpa/DataManager.java | 3134 | /*
* Copyright 2015 Felipe Santos <live.proto at hotmail.com>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package proto.cederj.prownloader.persistence.jpa;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.EntityTransaction;
import javax.persistence.Query;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
/**
*
* @author Felipe Santos <fralph at ic.uff.br>
*/
public class DataManager {
private final EntityManager em;
public DataManager(EntityManager em) {
this.em = em;
}
public void save(Object o) {
synchronized (em) {
EntityTransaction et = em.getTransaction();
et.begin();
em.persist(o);
et.commit();
}
}
public <T> T get(Class<T> entityType, Object key) {
synchronized (em) {
T object = em.find(entityType, key);
return object;
}
}
public <T> T getReference(Class<T> entityType, Object key) {
synchronized (em) {
T ref = em.getReference(entityType, key);
return ref;
}
}
public void update(Object o) {
synchronized (em) {
EntityTransaction et = em.getTransaction();
et.begin();
em.merge(o);
et.commit();
}
}
public void delete(Object o) {
synchronized (em) {
EntityTransaction et = em.getTransaction();
et.begin();
em.remove(em.contains(o) ? o : em.merge(o));
et.commit();
}
}
public CriteriaBuilder getCriteriaBuilder() {
synchronized (em) {
CriteriaBuilder cb = em.getCriteriaBuilder();
return cb;
}
}
public <T> List<T> getQuery(CriteriaQuery<T> query) {
synchronized (em) {
TypedQuery<T> tquery = em.createQuery(query);
List<T> result = tquery.getResultList();
return result;
}
}
public <T> T getQuerySingle(CriteriaQuery<T> query) {
synchronized (em) {
TypedQuery<T> tquery = em.createQuery(query);
List<T> list = tquery.getResultList();
if (list == null || list.isEmpty()) {
return null;
}
return (T) list.get(0);
}
}
public Query getQquery(String sql, Class klass) {
synchronized (em) {
Query query = em.createNativeQuery(sql, klass);
return query;
}
}
}
| apache-2.0 |
FinishX/coolweather | gradle/gradle-2.8/src/model-core/org/gradle/model/internal/core/ModelActionRole.java | 2014 | /*
* Copyright 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.model.internal.core;
/**
* A hard-coded sequence of model actions that can be applied to a model element.
*
* <p>This is pretty much a placeholder for something more descriptive.
*/
public enum ModelActionRole {
DefineProjections(ModelNode.State.ProjectionsDefined, false), // Defines projections for the node
Create(ModelNode.State.Created, false), // Initializes the node
DefineRules(ModelNode.State.RulesDefined, true), // Defines rules for an element. Does not use the subject as input
Defaults(ModelNode.State.DefaultsApplied, true), // Allows a mutation to setup default values for an element
Initialize(ModelNode.State.Initialized, true), // Mutation action provided when an element is defined
Mutate(ModelNode.State.Mutated, true), // Customisations
Finalize(ModelNode.State.Finalized, true), // Post customisation default values
Validate(ModelNode.State.SelfClosed, true); // Post mutation validations
private final ModelNode.State target;
private final boolean subjectViewAvailable;
ModelActionRole(ModelNode.State target, boolean subjectViewAvailable) {
this.target = target;
this.subjectViewAvailable = subjectViewAvailable;
}
public ModelNode.State getTargetState() {
return target;
}
public boolean isSubjectViewAvailable() {
return subjectViewAvailable;
}
}
| apache-2.0 |
blusechen/venus | venus-commons/venus-common-base/src/main/java/com/meidusa/venus/util/OgnlUtil.java | 1837 | package com.meidusa.venus.util;
import java.util.HashMap;
import java.util.Map;
import ognl.Ognl;
import ognl.OgnlException;
/**
* Used for OgnlBasicValueHolder to do expression computation.
*
* @author lichencheng.daisy
* @since 1.0.0-SNAPSHOT
*
*/
public class OgnlUtil {
private static Map<String, Object> expressionMap = new HashMap<String, Object>();;
private static Object getExpression(String expr) throws OgnlException {
Object parsedExpression = expressionMap.get(expr);
if (parsedExpression == null) {
synchronized (expr) {
parsedExpression = expressionMap.get(expr);
if (parsedExpression == null) {
parsedExpression = Ognl.parseExpression(expr);
if (parsedExpression != null) {
expressionMap.put(expr, parsedExpression);
}
}
}
}
return parsedExpression;
}
public static void setValue(Object root, String expr, Object value) throws OgnlException {
Object parsedExpression = getExpression(expr);
Ognl.setValue(parsedExpression, root, value);
}
public static String findString(Object root, String expr) throws OgnlException {
Object parsedExpression = getExpression(expr);
return String.valueOf(Ognl.getValue(parsedExpression, root));
}
public static Object findValue(Object root, String expr) throws OgnlException {
Object parsedExpression = getExpression(expr);
return Ognl.getValue(parsedExpression, root);
}
public static Object findValue(Object root, String expr, Class<?> asType) throws OgnlException {
Object parsedExpression = getExpression(expr);
return Ognl.getValue(parsedExpression, root, asType);
}
}
| apache-2.0 |
TheRingbearer/HAWKS | ode/bpel-epr/src/main/java/org/apache/ode/bpel/epr/WSDL11Endpoint.java | 5664 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ode.bpel.epr;
import org.apache.ode.utils.DOMUtils;
import org.apache.ode.utils.Namespaces;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import javax.xml.namespace.QName;
import java.util.HashMap;
import java.util.Map;
/**
* A service endpoint represented as a wsdl11:service element.
*/
public class WSDL11Endpoint implements MutableEndpoint {
private Element _serviceElmt;
public WSDL11Endpoint() {
}
public WSDL11Endpoint(QName serviceQName, String portName, String location) {
Document doc = DOMUtils.newDocument();
Element serviceRef = doc.createElementNS(
SERVICE_REF_QNAME.getNamespaceURI(),
SERVICE_REF_QNAME.getLocalPart());
doc.appendChild(serviceRef);
_serviceElmt = doc.createElementNS(Namespaces.WSDL_11, "service");
serviceRef.appendChild(_serviceElmt);
if (serviceQName != null) {
_serviceElmt.setAttribute("name", serviceQName.getLocalPart());
_serviceElmt.setAttribute("targetNamespace",
serviceQName.getNamespaceURI());
}
Element port = doc.createElementNS(Namespaces.WSDL_11, "port");
if (portName != null) {
port.setAttribute("name", portName);
}
port.setAttribute("binding", "");
Element address = doc.createElementNS(Namespaces.SOAP_NS, "address");
if (location != null)
address.setAttribute("location", location);
_serviceElmt.appendChild(port);
port.appendChild(address);
}
public WSDL11Endpoint(QName serviceName, String portName) {
this(serviceName, portName, null);
}
public String getUrl() {
Element port = (Element) _serviceElmt.getElementsByTagNameNS(
Namespaces.WSDL_11, "port").item(0);
// get soap:address
Element address = (Element) port.getElementsByTagNameNS(
Namespaces.SOAP_NS, "address").item(0);
// ... or the http:address
if (address == null) {
address = (Element) port.getElementsByTagNameNS(Namespaces.HTTP_NS,
"address").item(0);
}
if (address == null) {
throw new IllegalArgumentException(
"soap:address and http:address element in element "
+ DOMUtils.domToString(_serviceElmt)
+ " is missing or in the wrong namespace.");
}
return address.getAttribute("location");
}
public QName getServiceName() {
return new QName(_serviceElmt.getAttribute("targetNamespace"),
_serviceElmt.getAttribute("name"));
}
public boolean accept(Node node) {
if (node.getNodeType() == Node.ELEMENT_NODE) {
Element elmt = (Element) node;
if (elmt.getLocalName().equals("service-ref")
&& (elmt.getNamespaceURI().equals(Namespaces.WS_BPEL_20_NS) || elmt
.getNamespaceURI().equals(
Namespaces.WSBPEL2_0_FINAL_SERVREF)))
elmt = DOMUtils.getFirstChildElement(elmt);
if (elmt.getLocalName().equals("service")
&& elmt.getNamespaceURI().equals(Namespaces.WSDL_11))
return true;
}
return false;
}
public void set(Node node) {
if (node.getNamespaceURI().equals(SERVICE_REF_QNAME.getNamespaceURI()))
_serviceElmt = DOMUtils.getFirstChildElement((Element) node);
else
_serviceElmt = (Element) node;
}
public Document toXML() {
// Wrapping
Document doc = DOMUtils.newDocument();
Element serviceRef = doc.createElementNS(
Namespaces.WSBPEL2_0_FINAL_SERVREF, "service-ref");
doc.appendChild(serviceRef);
serviceRef.appendChild(doc.importNode(_serviceElmt, true));
return _serviceElmt.getOwnerDocument();
}
public Map toMap() {
HashMap<String, Object> result = new HashMap<String, Object>(1);
result.put(ADDRESS, getUrl());
result.put(SERVICE_QNAME,
new QName(_serviceElmt.getAttribute("targetNamespace"),
_serviceElmt.getAttribute("name")));
Element port = DOMUtils.getFirstChildElement(_serviceElmt);
result.put(PORT_NAME, port.getAttribute("name"));
// TODO binding
return result;
}
public void fromMap(Map eprMap) {
Document doc = DOMUtils.newDocument();
Element serviceRef = doc.createElementNS(
SERVICE_REF_QNAME.getNamespaceURI(),
SERVICE_REF_QNAME.getLocalPart());
doc.appendChild(serviceRef);
_serviceElmt = doc.createElementNS(Namespaces.WSDL_11, "service");
serviceRef.appendChild(_serviceElmt);
if (eprMap.get(SERVICE_QNAME) != null) {
QName serviceQName = ((QName) eprMap.get(SERVICE_QNAME));
_serviceElmt.setAttribute("name", serviceQName.getLocalPart());
_serviceElmt.setAttribute("targetNamespace",
serviceQName.getNamespaceURI());
}
Element port = doc.createElementNS(Namespaces.WSDL_11, "port");
if (eprMap.get(PORT_NAME) != null) {
port.setAttribute("name", (String) eprMap.get(PORT_NAME));
}
port.setAttribute("binding", "");
Element address = doc.createElementNS(Namespaces.SOAP_NS, "address");
if (eprMap.get(ADDRESS) != null)
address.setAttribute("location", (String) eprMap.get(ADDRESS));
_serviceElmt.appendChild(port);
port.appendChild(address);
}
}
| apache-2.0 |
MikeFot/android-crossy-score | app/src/main/java/com/michaelfotiadis/crossyscore/utils/ImageUtils.java | 2401 | package com.michaelfotiadis.crossyscore.utils;
import android.content.Context;
import android.support.v4.content.ContextCompat;
import android.widget.ImageView;
import com.michaelfotiadis.crossyscore.R;
import java.util.regex.Pattern;
import co.uk.alt236.reflectivedrawableloader.ReflectiveDrawableLoader;
/**
*
*/
public class ImageUtils {
public static final int DEFAULT_IMAGE_PLACEHOLDER = R.drawable.ic_android_light_blue_300_18dp;
private static final Pattern PATTERN_SPACE = Pattern.compile(" ");
private static final Pattern PATTERN_DASH = Pattern.compile("-");
private static final Pattern PATTERN_SPECIAL = Pattern.compile("[^()|\\- a-zA-Z0-9]");
private final Context mContext;
private final ReflectiveDrawableLoader mRefLoaderInstance;
public ImageUtils(final Context context) {
mContext = context;
mRefLoaderInstance = ReflectiveDrawableLoader.getInstance(mContext);
mRefLoaderInstance.setLogErrors(true);
}
public enum IMAGE_TYPE {
NORMAL, LIST
}
private static String sanitiseName(final String name) {
String noSpecialCharacters = PATTERN_SPECIAL.matcher(name).replaceAll("_");
noSpecialCharacters = PATTERN_DASH.matcher(noSpecialCharacters).replaceAll("_");
return PATTERN_SPACE.matcher(noSpecialCharacters).replaceAll("_").toLowerCase();
}
public int getImageIdReflectively(final String drawableName, final IMAGE_TYPE type) {
AppLog.d("Looking for drawable " + drawableName);
if (type == IMAGE_TYPE.NORMAL) {
return mRefLoaderInstance.
getDrawableId(drawableName, "", DEFAULT_IMAGE_PLACEHOLDER);
} else if (type == IMAGE_TYPE.LIST) {
return mRefLoaderInstance.
getListDrawableId(drawableName, "", DEFAULT_IMAGE_PLACEHOLDER);
} else {
return R.drawable.ic_default;
}
}
public void loadImageToViewReflectively(final ImageView view, final String drawableName, final IMAGE_TYPE type) {
if (drawableName == null) {
view.setImageDrawable(ContextCompat.getDrawable(mContext, DEFAULT_IMAGE_PLACEHOLDER));
} else {
view.setImageDrawable(ContextCompat.getDrawable(
mContext,
getImageIdReflectively(sanitiseName(drawableName), type))
);
}
}
}
| apache-2.0 |
weld/core | tests-arquillian/src/test/java/org/jboss/weld/tests/extensions/lifecycle/processBean/passivationCapable/AddPassivationCapableBeanWithNullIdTest.java | 1874 | /*
* JBoss, Home of Professional Open Source
* Copyright 2010, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.weld.tests.extensions.lifecycle.processBean.passivationCapable;
import static org.junit.Assert.assertTrue;
import jakarta.enterprise.inject.spi.Extension;
import jakarta.inject.Inject;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.Archive;
import org.jboss.shrinkwrap.api.BeanArchive;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.weld.test.util.Utils;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(Arquillian.class)
public class AddPassivationCapableBeanWithNullIdTest {
@Deployment
public static Archive<?> getDeployment() {
return ShrinkWrap.create(BeanArchive.class, Utils.getDeploymentNameAsHash(AddPassivationCapableBeanWithNullIdTest.class)).addPackage(AddPassivationCapableBeanWithNullIdTest.class.getPackage())
.addAsServiceProvider(Extension.class, SomeExtension.class);
}
@Inject
SomeExtension extension;
@Test
public void test() {
assertTrue(extension.isIAEthrown());
}
}
| apache-2.0 |
lesaint/experimenting-annotation-processing | experimenting-rounds/massive-count-of-annotated-classes/src/main/java/fr/javatronic/blog/massive/annotation1/Class_458.java | 145 | package fr.javatronic.blog.massive.annotation1;
import fr.javatronic.blog.processor.Annotation_001;
@Annotation_001
public class Class_458 {
}
| apache-2.0 |
Davidwu123/coolweather | app/src/main/java/com/coolweather/app/model/CoolWeatherDB.java | 5554 | package com.coolweather.app.model;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import com.coolweather.app.db.CoolWeatherOpenHelper;
import java.util.ArrayList;
import java.util.List;
/**
* Created by huiyi on 2016/5/27.
* 对常用的数据库进行封装
*/
public class CoolWeatherDB {
/*
* 数据库名
* */
public final static String DB_NAME="cool_weather";
/*
* 数据库版本
* */
public final static int VERSION=1;
//对数据库实例化
private static CoolWeatherDB sCoolWeatherDB;
private SQLiteDatabase mSQLiteDatabase;
//将构造方法私有化
private CoolWeatherDB(Context context){
//建立数据库
CoolWeatherOpenHelper dbHelper=new CoolWeatherOpenHelper
(context,DB_NAME,null,VERSION);
//获得可操作的数据库对象
mSQLiteDatabase=dbHelper.getWritableDatabase();
}
//用同步方法实现操作数据库的实例,即同一时刻只能由一个线程对该方法进行操作,也就是只能实例化一次
// 外部调用首先调用的是该方法,将context传进来,并执行构造函数
public synchronized static CoolWeatherDB getInstance(Context context){
if(sCoolWeatherDB==null){
//若没有被实例化,则实例化
sCoolWeatherDB =new CoolWeatherDB(context);
}
return sCoolWeatherDB;
}
/*
* 将Province实例存储到数据库(存储所有省份信息)
* */
public void saveProvince(Province province){
if (province!=null){
//定义数据集合
ContentValues valuesProvince=new ContentValues();
valuesProvince.put("province_name",province.getProvinceName());
valuesProvince.put("province_code",province.getProvinceCode());
mSQLiteDatabase.insert("Province",null,valuesProvince);
}
}
/*
* 将所有全国省份信息都读出(返回的是包含Province信息的list)
* */
public List<Province> loadProvinces(){
List<Province> listProvince=new ArrayList<Province>();
//cursor类似于java中的result
Cursor cursor=mSQLiteDatabase.query("Province",null,null,null,null,null,null);
if (cursor.moveToFirst()){//遍历所有数据
do {
//按id的不同放在不同的province中
Province province=new Province();
province.setId(cursor.getInt(cursor.getColumnIndex("id")));
province.setProvinceName(cursor.getString(cursor.getColumnIndex("province_name")));
province.setProvinceCode(cursor.getString(cursor.getColumnIndex("province_code")));
listProvince.add(province);
}while (cursor.moveToNext());
}
return listProvince;
}
/*
* 将City实例存储到数据库
* */
public void saveCity(City city){
if (city!=null) {
ContentValues valuesCity = new ContentValues();
valuesCity.put("city_name", city.getCityName());
valuesCity.put("city_code", city.getCityCode());
valuesCity.put("province_id", city.getProvinceId());
mSQLiteDatabase.insert("City", null, valuesCity);
}
}
/*
* 根据provinceId来读出该省所有城市信息
* */
public List<City> loadCities(int provinceId){
List<City> listCity=new ArrayList<City>();
//按条件查询
Cursor cursor=mSQLiteDatabase.query("City",null,"province_id=?",
new String[]{String.valueOf(provinceId)},null,null,null);
if (cursor.moveToFirst()){
do {
City city=new City();
city.setId(cursor.getInt(cursor.getColumnIndex("id")));
city.setCityName(cursor.getString(cursor.getColumnIndex("city_name")));
city.setCityCode(cursor.getString(cursor.getColumnIndex("city_code")));
city.setProvinceId(provinceId);
listCity.add(city);
}while (cursor.moveToNext());
}
return listCity;
}
/*
* 将County存储到数据库
* */
public void saveCounty(County county){
if (county!=null) {
ContentValues valuesCounty = new ContentValues();
valuesCounty.put("county_name", county.getCountyName());
valuesCounty.put("county_code", county.getCountyCode());
valuesCounty.put("city_id", county.getCityId());
mSQLiteDatabase.insert("County", null, valuesCounty);
}
}
/*
* 取出cityId下的所有县信息
* */
public List<County> loadCounties(int cityId){
List<County> listCounty=new ArrayList<County>();
Cursor cursor=mSQLiteDatabase.query("County",null,"city_id=?",
new String[]{String.valueOf(cityId)},null,null,null);
if (cursor.moveToFirst()){
do {
County county=new County();
county.setId(cursor.getInt(cursor.getColumnIndex("id")));
county.setCountyName(cursor.getString(cursor.getColumnIndex("county_name")));
county.setCountyCode(cursor.getString(cursor.getColumnIndex("county_code")));
county.setCityId(cityId);
listCounty.add(county);
}while (cursor.moveToNext());
}
return listCounty;
}
}
| apache-2.0 |
orientechnologies/orientdb | core/src/main/java/com/orientechnologies/orient/core/db/ODatabaseLifecycleListener.java | 2134 | /*
*
* * Copyright 2010-2016 OrientDB LTD (http://orientdb.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://orientdb.com
*
*/
package com.orientechnologies.orient.core.db;
import com.orientechnologies.orient.core.metadata.schema.OClass;
import com.orientechnologies.orient.core.metadata.schema.OView;
import com.orientechnologies.orient.core.record.impl.ODocument;
/**
* Listener Interface to receive callbacks on database usage.
*
* @author Luca Garulli (l.garulli--(at)--orientdb.com)
*/
public interface ODatabaseLifecycleListener {
enum PRIORITY {
FIRST,
EARLY,
REGULAR,
LATE,
LAST
}
default PRIORITY getPriority() {
return PRIORITY.LAST;
}
void onCreate(ODatabaseInternal iDatabase);
void onOpen(ODatabaseInternal iDatabase);
void onClose(ODatabaseInternal iDatabase);
void onDrop(ODatabaseInternal iDatabase);
@Deprecated
default void onCreateClass(ODatabaseInternal iDatabase, OClass iClass) {}
@Deprecated
default void onDropClass(ODatabaseInternal iDatabase, OClass iClass) {}
default void onCreateView(ODatabaseInternal database, OView view) {}
default void onDropView(ODatabaseInternal database, OView cls) {}
/**
* Event called during the retrieving of distributed configuration, usually at startup and when
* the cluster shape changes. You can use this event to enrich the ODocument sent to the client
* with custom properties.
*
* @param iConfiguration
*/
void onLocalNodeConfigurationRequest(ODocument iConfiguration);
}
| apache-2.0 |
crate/crate | server/src/main/java/io/crate/protocols/postgres/ConnectionProperties.java | 2581 | /*
* Licensed to Crate.io GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.protocols.postgres;
import io.crate.auth.Protocol;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import javax.annotation.Nullable;
import javax.net.ssl.SSLPeerUnverifiedException;
import javax.net.ssl.SSLSession;
import java.net.InetAddress;
import java.security.cert.Certificate;
public class ConnectionProperties {
private static final Logger LOGGER = LogManager.getLogger(ConnectionProperties.class);
private final InetAddress address;
private final Protocol protocol;
private final boolean hasSSL;
@Nullable
private final SSLSession sslSession;
public ConnectionProperties(InetAddress address, Protocol protocol, @Nullable SSLSession sslSession) {
this.address = address;
this.protocol = protocol;
this.hasSSL = sslSession != null;
this.sslSession = sslSession;
}
public boolean hasSSL() {
return hasSSL;
}
public InetAddress address() {
return address;
}
public Protocol protocol() {
return protocol;
}
public Certificate clientCert() {
// This logic isn't in the constructor to prevent logging in case of SSL without (expected) client-certificate auth
if (sslSession != null) {
try {
return sslSession.getPeerCertificates()[0];
} catch (ArrayIndexOutOfBoundsException | SSLPeerUnverifiedException e) {
LOGGER.debug("Client certificate not available", e);
}
}
return null;
}
}
| apache-2.0 |
wuzhendev/samples | EcoGalleryDemo/app/src/main/java/us/feras/ecogallery/EcoGalleryAdapterView.java | 36922 | package us.feras.ecogallery;
import android.content.Context;
import android.database.DataSetObserver;
import android.os.Parcelable;
import android.os.SystemClock;
import android.util.AttributeSet;
import android.util.SparseArray;
import android.view.ContextMenu;
import android.view.ContextMenu.ContextMenuInfo;
import android.view.SoundEffectConstants;
import android.view.View;
import android.view.ViewDebug;
import android.view.ViewGroup;
import android.view.accessibility.AccessibilityEvent;
import android.widget.Adapter;
import android.widget.ListView;
public abstract class EcoGalleryAdapterView<T extends Adapter> extends ViewGroup {
/**
* The item view type returned by {@link Adapter#getItemViewType(int)} when
* the adapter does not want the item's view recycled.
*/
public static final int ITEM_VIEW_TYPE_IGNORE = -1;
/**
* The item view type returned by {@link Adapter#getItemViewType(int)} when
* the item is a header or footer.
*/
public static final int ITEM_VIEW_TYPE_HEADER_OR_FOOTER = -2;
/**
* The position of the first child displayed
*/
int mFirstPosition = 0;
/**
* The offset in pixels from the top of the AdapterView to the top of the
* view to select during the next layout.
*/
int mSpecificTop;
/**
* Position from which to start looking for mSyncRowId
*/
int mSyncPosition;
/**
* Row id to look for when data has changed
*/
long mSyncRowId = INVALID_ROW_ID;
/**
* Height of the view when mSyncPosition and mSyncRowId where set
*/
long mSyncHeight;
/**
* True if we need to sync to mSyncRowId
*/
boolean mNeedSync = false;
/**
* Indicates whether to sync based on the selection or position. Possible
* values are {@link #SYNC_SELECTED_POSITION} or
* {@link #SYNC_FIRST_POSITION}.
*/
int mSyncMode;
/**
* Our height after the last layout
*/
private int mLayoutHeight;
/**
* Sync based on the selected child
*/
static final int SYNC_SELECTED_POSITION = 0;
/**
* Sync based on the first child displayed
*/
static final int SYNC_FIRST_POSITION = 1;
/**
* Maximum amount of time to spend in {@link #findSyncPosition()}
*/
static final int SYNC_MAX_DURATION_MILLIS = 100;
/**
* Indicates that this view is currently being laid out.
*/
boolean mInLayout = false;
/**
* The listener that receives notifications when an item is selected.
*/
OnItemSelectedListener mOnItemSelectedListener;
/**
* The listener that receives notifications when an item is clicked.
*/
OnItemClickListener mOnItemClickListener;
/**
* The listener that receives notifications when an item is long clicked.
*/
OnItemLongClickListener mOnItemLongClickListener;
/**
* True if the data has changed since the last layout
*/
boolean mDataChanged;
/**
* The position within the adapter's data set of the item to select during
* the next layout.
*/
int mNextSelectedPosition = INVALID_POSITION;
/**
* The item id of the item to select during the next layout.
*/
long mNextSelectedRowId = INVALID_ROW_ID;
/**
* The position within the adapter's data set of the currently selected
* item.
*/
int mSelectedPosition = INVALID_POSITION;
/**
* The item id of the currently selected item.
*/
long mSelectedRowId = INVALID_ROW_ID;
/**
* View to show if there are no items to show.
*/
private View mEmptyView;
/**
* The number of items in the current adapter.
*/
int mItemCount;
/**
* The number of items in the adapter before a data changed event occured.
*/
int mOldItemCount;
/**
* Represents an invalid position. All valid positions are in the range 0 to
* 1 less than the number of items in the current adapter.
*/
public static final int INVALID_POSITION = -1;
/**
* Represents an empty or invalid row id
*/
public static final long INVALID_ROW_ID = Long.MIN_VALUE;
/**
* The last selected position we used when notifying
*/
int mOldSelectedPosition = INVALID_POSITION;
/**
* The id of the last selected position we used when notifying
*/
long mOldSelectedRowId = INVALID_ROW_ID;
/**
* Indicates what focusable state is requested when calling setFocusable().
* In addition to this, this view has other criteria for actually
* determining the focusable state (such as whether its empty or the text
* filter is shown).
*
* @see #setFocusable(boolean)
* @see #checkFocus()
*/
private boolean mDesiredFocusableState;
private boolean mDesiredFocusableInTouchModeState;
private SelectionNotifier mSelectionNotifier;
/**
* When set to true, calls to requestLayout() will not propagate up the
* parent hierarchy. This is used to layout the children during a layout
* pass.
*/
boolean mBlockLayoutRequests = false;
public EcoGalleryAdapterView(Context context) {
super(context);
}
public EcoGalleryAdapterView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public EcoGalleryAdapterView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
/**
* Interface definition for a callback to be invoked when an item in this
* AdapterView has been clicked.
*/
public interface OnItemClickListener {
/**
* Callback method to be invoked when an item in this AdapterView has
* been clicked.
* <p/>
* Implementers can call getItemAtPosition(position) if they need to
* access the data associated with the selected item.
*
* @param parent The AdapterView where the click happened.
* @param view The view within the AdapterView that was clicked (this
* will be a view provided by the adapter)
* @param position The position of the view in the adapter.
* @param id The row id of the item that was clicked.
*/
void onItemClick(EcoGalleryAdapterView<?> parent, View view, int position, long id);
}
/**
* Register a callback to be invoked when an item in this AdapterView has
* been clicked.
*
* @param listener The callback that will be invoked.
*/
public void setOnItemClickListener(OnItemClickListener listener) {
mOnItemClickListener = listener;
}
/**
* @return The callback to be invoked with an item in this AdapterView has
* been clicked, or null id no callback has been set.
*/
public final OnItemClickListener getOnItemClickListener() {
return mOnItemClickListener;
}
/**
* Call the OnItemClickListener, if it is defined.
*
* @param view The view within the AdapterView that was clicked.
* @param position The position of the view in the adapter.
* @param id The row id of the item that was clicked.
* @return True if there was an assigned OnItemClickListener that was
* called, false otherwise is returned.
*/
public boolean performItemClick(View view, int position, long id) {
if (mOnItemClickListener != null) {
playSoundEffect(SoundEffectConstants.CLICK);
mOnItemClickListener.onItemClick(this, view, position, id);
return true;
}
return false;
}
/**
* Interface definition for a callback to be invoked when an item in this
* view has been clicked and held.
*/
public interface OnItemLongClickListener {
/**
* Callback method to be invoked when an item in this view has been
* clicked and held.
* <p/>
* Implementers can call getItemAtPosition(position) if they need to
* access the data associated with the selected item.
*
* @param parent The AbsListView where the click happened
* @param view The view within the AbsListView that was clicked
* @param position The position of the view in the list
* @param id The row id of the item that was clicked
* @return true if the callback consumed the long click, false otherwise
*/
boolean onItemLongClick(EcoGalleryAdapterView<?> parent, View view, int position, long id);
}
/**
* Register a callback to be invoked when an item in this AdapterView has
* been clicked and held
*
* @param listener The callback that will run
*/
public void setOnItemLongClickListener(OnItemLongClickListener listener) {
if (!isLongClickable()) {
setLongClickable(true);
}
mOnItemLongClickListener = listener;
}
/**
* @return The callback to be invoked with an item in this AdapterView has
* been clicked and held, or null id no callback as been set.
*/
public final OnItemLongClickListener getOnItemLongClickListener() {
return mOnItemLongClickListener;
}
/**
* Interface definition for a callback to be invoked when an item in this
* view has been selected.
*/
public interface OnItemSelectedListener {
/**
* Callback method to be invoked when an item in this view has been
* selected.
* <p/>
* Impelmenters can call getItemAtPosition(position) if they need to
* access the data associated with the selected item.
*
* @param parent The AdapterView where the selection happened
* @param view The view within the AdapterView that was clicked
* @param position The position of the view in the adapter
* @param id The row id of the item that is selected
*/
void onItemSelected(EcoGalleryAdapterView<?> parent, View view, int position, long id);
/**
* Callback method to be invoked when the selection disappears from this
* view. The selection can disappear for instance when touch is
* activated or when the adapter becomes empty.
*
* @param parent The AdapterView that now contains no selected item.
*/
void onNothingSelected(EcoGalleryAdapterView<?> parent);
}
/**
* Register a callback to be invoked when an item in this AdapterView has
* been selected.
*
* @param listener The callback that will run
*/
public void setOnItemSelectedListener(OnItemSelectedListener listener) {
mOnItemSelectedListener = listener;
}
public final OnItemSelectedListener getOnItemSelectedListener() {
return mOnItemSelectedListener;
}
/**
* Extra menu information provided to the
* {@link OnCreateContextMenuListener#onCreateContextMenu(ContextMenu, View, ContextMenuInfo) }
* callback when a context menu is brought up for this AdapterView.
*/
public static class AdapterContextMenuInfo implements ContextMenuInfo {
public AdapterContextMenuInfo(View targetView, int position, long id) {
this.targetView = targetView;
this.position = position;
this.id = id;
}
/**
* The child view for which the context menu is being displayed. This
* will be one of the children of this AdapterView.
*/
public View targetView;
/**
* The position in the adapter for which the context menu is being
* displayed.
*/
public int position;
/**
* The row id of the item for which the context menu is being displayed.
*/
public long id;
}
/**
* Returns the adapter currently associated with this widget.
*
* @return The adapter used to provide this view's content.
*/
public abstract T getAdapter();
/**
* Sets the adapter that provides the data and the views to represent the
* data in this widget.
*
* @param adapter The adapter to use to create this view's content.
*/
public abstract void setAdapter(T adapter);
/**
* This method is not supported and throws an UnsupportedOperationException
* when called.
*
* @param child Ignored.
* @throws UnsupportedOperationException Every time this method is invoked.
*/
@Override
public void addView(View child) {
throw new UnsupportedOperationException("addView(View) is not supported in AdapterView");
}
/**
* This method is not supported and throws an UnsupportedOperationException
* when called.
*
* @param child Ignored.
* @param index Ignored.
* @throws UnsupportedOperationException Every time this method is invoked.
*/
@Override
public void addView(View child, int index) {
throw new UnsupportedOperationException("addView(View, int) is not supported in AdapterView");
}
/**
* This method is not supported and throws an UnsupportedOperationException
* when called.
*
* @param child Ignored.
* @param params Ignored.
* @throws UnsupportedOperationException Every time this method is invoked.
*/
@Override
public void addView(View child, LayoutParams params) {
throw new UnsupportedOperationException("addView(View, LayoutParams) " + "is not supported in AdapterView");
}
/**
* This method is not supported and throws an UnsupportedOperationException
* when called.
*
* @param child Ignored.
* @param index Ignored.
* @param params Ignored.
* @throws UnsupportedOperationException Every time this method is invoked.
*/
@Override
public void addView(View child, int index, LayoutParams params) {
throw new UnsupportedOperationException("addView(View, int, LayoutParams) " + "is not supported in AdapterView");
}
/**
* This method is not supported and throws an UnsupportedOperationException
* when called.
*
* @param child Ignored.
* @throws UnsupportedOperationException Every time this method is invoked.
*/
@Override
public void removeView(View child) {
throw new UnsupportedOperationException("removeView(View) is not supported in AdapterView");
}
/**
* This method is not supported and throws an UnsupportedOperationException
* when called.
*
* @param index Ignored.
* @throws UnsupportedOperationException Every time this method is invoked.
*/
@Override
public void removeViewAt(int index) {
throw new UnsupportedOperationException("removeViewAt(int) is not supported in AdapterView");
}
/**
* This method is not supported and throws an UnsupportedOperationException
* when called.
*
* @throws UnsupportedOperationException Every time this method is invoked.
*/
@Override
public void removeAllViews() {
throw new UnsupportedOperationException("removeAllViews() is not supported in AdapterView");
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
mLayoutHeight = getHeight();
}
/**
* Return the position of the currently selected item within the adapter's
* data set
*
* @return int Position (starting at 0), or {@link #INVALID_POSITION} if
* there is nothing selected.
*/
@ViewDebug.CapturedViewProperty
public int getSelectedItemPosition() {
return mNextSelectedPosition;
}
/**
* @return The id corresponding to the currently selected item, or
* {@link #INVALID_ROW_ID} if nothing is selected.
*/
@ViewDebug.CapturedViewProperty
public long getSelectedItemId() {
return mNextSelectedRowId;
}
/**
* @return The view corresponding to the currently selected item, or null if
* nothing is selected
*/
public abstract View getSelectedView();
/**
* @return The data corresponding to the currently selected item, or null if
* there is nothing selected.
*/
public Object getSelectedItem() {
T adapter = getAdapter();
int selection = getSelectedItemPosition();
if (adapter != null && adapter.getCount() > 0 && selection >= 0) {
return adapter.getItem(selection);
} else {
return null;
}
}
/**
* @return The number of items owned by the Adapter associated with this
* AdapterView. (This is the number of data items, which may be
* larger than the number of visible view.)
*/
@ViewDebug.CapturedViewProperty
public int getCount() {
return mItemCount;
}
/**
* Get the position within the adapter's data set for the view, where view
* is a an adapter item or a descendant of an adapter item.
*
* @param view an adapter item, or a descendant of an adapter item. This must
* be visible in this AdapterView at the time of the call.
* @return the position within the adapter's data set of the view, or
* {@link #INVALID_POSITION} if the view does not correspond to a
* list item (or it is not currently visible).
*/
public int getPositionForView(View view) {
View listItem = view;
try {
View v;
while (!(v = (View) listItem.getParent()).equals(this)) {
listItem = v;
}
} catch (ClassCastException e) {
// We made it up to the window without find this list view
return INVALID_POSITION;
}
// Search the children for the list item
final int childCount = getChildCount();
for (int i = 0; i < childCount; i++) {
if (getChildAt(i).equals(listItem)) {
return mFirstPosition + i;
}
}
// Child not found!
return INVALID_POSITION;
}
/**
* Returns the position within the adapter's data set for the first item
* displayed on screen.
*
* @return The position within the adapter's data set
*/
public int getFirstVisiblePosition() {
return mFirstPosition;
}
/**
* Returns the position within the adapter's data set for the last item
* displayed on screen.
*
* @return The position within the adapter's data set
*/
public int getLastVisiblePosition() {
return mFirstPosition + getChildCount() - 1;
}
/**
* Sets the currently selected item. To support accessibility subclasses
* that override this method must invoke the overriden super method first.
*
* @param position Index (starting at 0) of the data item to be selected.
*/
public abstract void setSelection(int position);
/**
* Sets the view to show if the adapter is empty
*/
public void setEmptyView(View emptyView) {
mEmptyView = emptyView;
final T adapter = getAdapter();
final boolean empty = ((adapter == null) || adapter.isEmpty());
updateEmptyStatus(empty);
}
/**
* When the current adapter is empty, the AdapterView can display a special
* view call the empty view. The empty view is used to provide feedback to
* the user that no data is available in this AdapterView.
*
* @return The view to show if the adapter is empty.
*/
public View getEmptyView() {
return mEmptyView;
}
/**
* Indicates whether this view is in filter mode. Filter mode can for
* instance be enabled by a user when typing on the keyboard.
*
* @return True if the view is in filter mode, false otherwise.
*/
boolean isInFilterMode() {
return false;
}
@Override
public void setFocusable(boolean focusable) {
final T adapter = getAdapter();
final boolean empty = adapter == null || adapter.getCount() == 0;
mDesiredFocusableState = focusable;
if (!focusable) {
mDesiredFocusableInTouchModeState = false;
}
super.setFocusable(focusable && (!empty || isInFilterMode()));
}
@Override
public void setFocusableInTouchMode(boolean focusable) {
final T adapter = getAdapter();
final boolean empty = adapter == null || adapter.getCount() == 0;
mDesiredFocusableInTouchModeState = focusable;
if (focusable) {
mDesiredFocusableState = true;
}
super.setFocusableInTouchMode(focusable && (!empty || isInFilterMode()));
}
void checkFocus() {
final T adapter = getAdapter();
final boolean empty = adapter == null || adapter.getCount() == 0;
final boolean focusable = !empty || isInFilterMode();
// The order in which we set focusable in touch mode/focusable may
// matter
// for the client, see View.setFocusableInTouchMode() comments for more
// details
super.setFocusableInTouchMode(focusable && mDesiredFocusableInTouchModeState);
super.setFocusable(focusable && mDesiredFocusableState);
if (mEmptyView != null) {
updateEmptyStatus((adapter == null) || adapter.isEmpty());
}
}
/**
* Update the status of the list based on the empty parameter. If empty is
* true and we have an empty view, display it. In all the other cases, make
* sure that the listview is VISIBLE and that the empty view is GONE (if
* it's not null).
*/
private void updateEmptyStatus(boolean empty) {
if (isInFilterMode()) {
empty = false;
}
if (empty) {
if (mEmptyView != null) {
mEmptyView.setVisibility(View.VISIBLE);
setVisibility(View.GONE);
} else {
// If the caller just removed our empty view, make sure the list
// view is visible
setVisibility(View.VISIBLE);
}
// We are now GONE, so pending layouts will not be dispatched.
// Force one here to make sure that the state of the list matches
// the state of the adapter.
if (mDataChanged) {
this.onLayout(false, getLeft(), getTop(), getRight(), getBottom());
}
} else {
if (mEmptyView != null)
mEmptyView.setVisibility(View.GONE);
setVisibility(View.VISIBLE);
}
}
/**
* Gets the data associated with the specified position in the list.
*
* @param position Which data to get
* @return The data associated with the specified position in the list
*/
public Object getItemAtPosition(int position) {
T adapter = getAdapter();
return (adapter == null || position < 0) ? null : adapter.getItem(position);
}
public long getItemIdAtPosition(int position) {
T adapter = getAdapter();
return (adapter == null || position < 0) ? INVALID_ROW_ID : adapter.getItemId(position);
}
@Override
public void setOnClickListener(OnClickListener l) {
throw new RuntimeException("Don't call setOnClickListener for an AdapterView. "
+ "You probably want setOnItemClickListener instead");
}
/**
* Override to prevent freezing of any views created by the adapter.
*/
@Override
protected void dispatchSaveInstanceState(SparseArray<Parcelable> container) {
dispatchFreezeSelfOnly(container);
}
/**
* Override to prevent thawing of any views created by the adapter.
*/
@Override
protected void dispatchRestoreInstanceState(SparseArray<Parcelable> container) {
dispatchThawSelfOnly(container);
}
class AdapterDataSetObserver extends DataSetObserver {
private Parcelable mInstanceState = null;
@Override
public void onChanged() {
mDataChanged = true;
mOldItemCount = mItemCount;
mItemCount = getAdapter().getCount();
// Detect the case where a cursor that was previously invalidated
// has
// been repopulated with new data.
if (EcoGalleryAdapterView.this.getAdapter().hasStableIds() && mInstanceState != null && mOldItemCount == 0
&& mItemCount > 0) {
EcoGalleryAdapterView.this.onRestoreInstanceState(mInstanceState);
mInstanceState = null;
} else {
rememberSyncState();
}
checkFocus();
requestLayout();
}
@Override
public void onInvalidated() {
mDataChanged = true;
if (EcoGalleryAdapterView.this.getAdapter().hasStableIds()) {
// Remember the current state for the case where our hosting
// activity is being
// stopped and later restarted
mInstanceState = EcoGalleryAdapterView.this.onSaveInstanceState();
}
// Data is invalid so we should reset our state
mOldItemCount = mItemCount;
mItemCount = 0;
mSelectedPosition = INVALID_POSITION;
mSelectedRowId = INVALID_ROW_ID;
mNextSelectedPosition = INVALID_POSITION;
mNextSelectedRowId = INVALID_ROW_ID;
mNeedSync = false;
checkFocus();
requestLayout();
}
public void clearSavedState() {
mInstanceState = null;
}
}
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
removeCallbacks(mSelectionNotifier);
}
private class SelectionNotifier implements Runnable {
public void run() {
if (mDataChanged) {
// Data has changed between when this SelectionNotifier
// was posted and now. We need to wait until the AdapterView
// has been synched to the new data.
if (getAdapter() != null) {
post(this);
}
} else {
fireOnSelected();
}
}
}
void selectionChanged() {
if (mOnItemSelectedListener != null) {
if (mInLayout || mBlockLayoutRequests) {
// If we are in a layout traversal, defer notification
// by posting. This ensures that the view tree is
// in a consistent state and is able to accomodate
// new layout or invalidate requests.
if (mSelectionNotifier == null) {
mSelectionNotifier = new SelectionNotifier();
}
post(mSelectionNotifier);
} else {
fireOnSelected();
}
}
// we fire selection events here not in View
if (mSelectedPosition != ListView.INVALID_POSITION && isShown() && !isInTouchMode()) {
sendAccessibilityEvent(AccessibilityEvent.TYPE_VIEW_SELECTED);
}
}
private void fireOnSelected() {
if (mOnItemSelectedListener == null)
return;
int selection = this.getSelectedItemPosition();
if (selection >= 0) {
View v = getSelectedView();
mOnItemSelectedListener.onItemSelected(this, v, selection, getAdapter().getItemId(selection));
} else {
mOnItemSelectedListener.onNothingSelected(this);
}
}
@Override
public boolean dispatchPopulateAccessibilityEvent(AccessibilityEvent event) {
boolean populated = false;
// This is an exceptional case which occurs when a window gets the
// focus and sends a focus event via its focused child to announce
// current focus/selection. AdapterView fires selection but not focus
// events so we change the event type here.
if (event.getEventType() == AccessibilityEvent.TYPE_VIEW_FOCUSED) {
event.setEventType(AccessibilityEvent.TYPE_VIEW_SELECTED);
}
// we send selection events only from AdapterView to avoid
// generation of such event for each child
View selectedView = getSelectedView();
if (selectedView != null) {
populated = selectedView.dispatchPopulateAccessibilityEvent(event);
}
if (!populated) {
if (selectedView != null) {
event.setEnabled(selectedView.isEnabled());
}
event.setItemCount(getCount());
event.setCurrentItemIndex(getSelectedItemPosition());
}
return populated;
}
@Override
protected boolean canAnimate() {
return super.canAnimate() && mItemCount > 0;
}
void handleDataChanged() {
final int count = mItemCount;
boolean found = false;
if (count > 0) {
int newPos;
// Find the row we are supposed to sync to
if (mNeedSync) {
// Update this first, since setNextSelectedPositionInt inspects
// it
mNeedSync = false;
// See if we can find a position in the new data with the same
// id as the old selection
newPos = findSyncPosition();
if (newPos >= 0) {
// Verify that new selection is selectable
int selectablePos = lookForSelectablePosition(newPos, true);
if (selectablePos == newPos) {
// Same row id is selected
setNextSelectedPositionInt(newPos);
found = true;
}
}
}
if (!found) {
// Try to use the same position if we can't find matching data
newPos = getSelectedItemPosition();
// Pin position to the available range
if (newPos >= count) {
newPos = count - 1;
}
if (newPos < 0) {
newPos = 0;
}
// Make sure we select something selectable -- first look down
int selectablePos = lookForSelectablePosition(newPos, true);
if (selectablePos < 0) {
// Looking down didn't work -- try looking up
selectablePos = lookForSelectablePosition(newPos, false);
}
if (selectablePos >= 0) {
setNextSelectedPositionInt(selectablePos);
checkSelectionChanged();
found = true;
}
}
}
if (!found) {
// Nothing is selected
mSelectedPosition = INVALID_POSITION;
mSelectedRowId = INVALID_ROW_ID;
mNextSelectedPosition = INVALID_POSITION;
mNextSelectedRowId = INVALID_ROW_ID;
mNeedSync = false;
checkSelectionChanged();
}
}
void checkSelectionChanged() {
if ((mSelectedPosition != mOldSelectedPosition) || (mSelectedRowId != mOldSelectedRowId)) {
selectionChanged();
mOldSelectedPosition = mSelectedPosition;
mOldSelectedRowId = mSelectedRowId;
}
}
/**
* Searches the adapter for a position matching mSyncRowId. The search
* starts at mSyncPosition and then alternates between moving up and moving
* down until 1) we find the right position, or 2) we run out of time, or 3)
* we have looked at every position
*
* @return Position of the row that matches mSyncRowId, or
* {@link #INVALID_POSITION} if it can't be found
*/
int findSyncPosition() {
int count = mItemCount;
if (count == 0) {
return INVALID_POSITION;
}
long idToMatch = mSyncRowId;
int seed = mSyncPosition;
// If there isn't a selection don't hunt for it
if (idToMatch == INVALID_ROW_ID) {
return INVALID_POSITION;
}
// Pin seed to reasonable values
seed = Math.max(0, seed);
seed = Math.min(count - 1, seed);
long endTime = SystemClock.uptimeMillis() + SYNC_MAX_DURATION_MILLIS;
long rowId;
// first position scanned so far
int first = seed;
// last position scanned so far
int last = seed;
// True if we should move down on the next iteration
boolean next = false;
// True when we have looked at the first item in the data
boolean hitFirst;
// True when we have looked at the last item in the data
boolean hitLast;
// Get the item ID locally (instead of getItemIdAtPosition), so
// we need the adapter
T adapter = getAdapter();
if (adapter == null) {
return INVALID_POSITION;
}
while (SystemClock.uptimeMillis() <= endTime) {
rowId = adapter.getItemId(seed);
if (rowId == idToMatch) {
// Found it!
return seed;
}
hitLast = last == count - 1;
hitFirst = first == 0;
if (hitLast && hitFirst) {
// Looked at everything
break;
}
if (hitFirst || (next && !hitLast)) {
// Either we hit the top, or we are trying to move down
last++;
seed = last;
// Try going up next time
next = false;
} else if (hitLast || (!next && !hitFirst)) {
// Either we hit the bottom, or we are trying to move up
first--;
seed = first;
// Try going down next time
next = true;
}
}
return INVALID_POSITION;
}
/**
* Find a position that can be selected (i.e., is not a separator).
*
* @param position The starting position to look at.
* @param lookDown Whether to look down for other positions.
* @return The next selectable position starting at position and then
* searching either up or down. Returns {@link #INVALID_POSITION} if
* nothing can be found.
*/
int lookForSelectablePosition(int position, boolean lookDown) {
return position;
}
/**
* Utility to keep mSelectedPosition and mSelectedRowId in sync
*
* @param position Our current position
*/
void setSelectedPositionInt(int position) {
mSelectedPosition = position;
mSelectedRowId = getItemIdAtPosition(position);
}
/**
* Utility to keep mNextSelectedPosition and mNextSelectedRowId in sync
*
* @param position Intended value for mSelectedPosition the next time we go
* through layout
*/
void setNextSelectedPositionInt(int position) {
mNextSelectedPosition = position;
mNextSelectedRowId = getItemIdAtPosition(position);
// If we are trying to sync to the selection, update that too
if (mNeedSync && mSyncMode == SYNC_SELECTED_POSITION && position >= 0) {
mSyncPosition = position;
mSyncRowId = mNextSelectedRowId;
}
}
/**
* Remember enough information to restore the screen state when the data has
* changed.
*/
void rememberSyncState() {
if (getChildCount() > 0) {
mNeedSync = true;
mSyncHeight = mLayoutHeight;
if (mSelectedPosition >= 0) {
// Sync the selection state
View v = getChildAt(mSelectedPosition - mFirstPosition);
mSyncRowId = mNextSelectedRowId;
mSyncPosition = mNextSelectedPosition;
if (v != null) {
mSpecificTop = v.getTop();
}
mSyncMode = SYNC_SELECTED_POSITION;
} else {
// Sync the based on the offset of the first view
View v = getChildAt(0);
T adapter = getAdapter();
if (mFirstPosition >= 0 && mFirstPosition < adapter.getCount()) {
mSyncRowId = adapter.getItemId(mFirstPosition);
} else {
mSyncRowId = NO_ID;
}
mSyncPosition = mFirstPosition;
if (v != null) {
mSpecificTop = v.getTop();
}
mSyncMode = SYNC_FIRST_POSITION;
}
}
}
} | apache-2.0 |
zongchao/redisTest | redisTest/src/main/java/com/neko/dao/mapper/UserMapper.java | 784 | package com.neko.dao.mapper;
import com.neko.dao.po.User;
import com.neko.dao.po.UserExample;
import java.util.List;
import org.apache.ibatis.annotations.Param;
public interface UserMapper {
int countByExample(UserExample example);
int deleteByExample(UserExample example);
int deleteByPrimaryKey(Integer id);
int insert(User record);
int insertSelective(User record);
List<User> selectByExample(UserExample example);
User selectByPrimaryKey(Integer id);
int updateByExampleSelective(@Param("record") User record, @Param("example") UserExample example);
int updateByExample(@Param("record") User record, @Param("example") UserExample example);
int updateByPrimaryKeySelective(User record);
int updateByPrimaryKey(User record);
} | apache-2.0 |
liuhaoran/work | mongosRW/src/mongosRW/RWMain.java | 2091 | package mongosRW;
import java.net.UnknownHostException;
import mongosRW.MongoDb;
public class RWMain {
public static void main(String[] args) throws UnknownHostException {
long rows = 0;
long start = System.currentTimeMillis();
if ( args.length < 4 ) {
usage(1);
}
String operation1 = "select";
String operation2 = args[0];
long Readrows = Long.parseLong(args[1]); //ÐèÒª²âÊÔ²åÈëµÄÌõÊý
int Readtnum = Integer.parseInt(args[2]);
long WriteRow = Long.parseLong(args[3]);
int WriteTnum = Integer.parseInt(args[4]);
long Readbatchnum = Long.parseLong(args[5]);
long Writebatchnum = Long.parseLong(args[6]);
String host = "172.17.0.9"; //mongos ·þÎñµØÖ·
String dbname = "rtm"; //Êý¾Ý¿âÃû³Æ
int tnum = 2;
// ʵÏÖinvoke µÄ ʵÀý»¯ µ÷¶È Ò»¸ö¶Á½ø³Ì ºÍÒ»¸öд½ø³Ì
MongoInvoke mongoWthread = new MongoInvoke(operation2, WriteRow, host, dbname, Writebatchnum, WriteTnum);
MongoInvoke mongoRthread = new MongoInvoke(operation1, Readrows, host, dbname, Readbatchnum, Readtnum);
mongoWthread.start();
mongoRthread.start();
}
public static void usage(int errorno){
System.out.print("Usage:\n");
// System.out.print("mysql test:\n");
// System.out.print("java -jar mongotest.jar < mysql > < [select | update | insert] > < rows > <concurrent> < host > < username > < password> <database> \n");
System.out.print("mongo Select:\n");
// System.out.print("java -jar mongotest.jar < mongo > < [select | update | insert] > < rows > <concurrent> < host > <database> \n");
// System.out.print("java -jar mongoWrite.jar <insertOne|insertMany> < rows > <threads> <batchrow> \n");
System.out.print("java -jar mongoRW.jar <WriteOp:insertOne|insertMany> <Rrows> <Rthread> <Wrows> <Wthreads> <Rbatch> <Wbatch> \n");
System.exit( errorno );
}
}
| apache-2.0 |
ganshane/lichen | lichen-migration/src/test/java/lichen/migration/internal/ColumnDefinitionTest.java | 1490 | // Copyright 2013 the original author or authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package lichen.migration.internal;
import junit.framework.Assert;
import lichen.migration.services.Options;
import org.junit.Test;
/**
*
* @author jcai
*/
public class ColumnDefinitionTest {
@Test
public void testAutotoIncrement() {
A a = new A();
a.setColumnNameOpt(Option.some("test_column"));
a.initialize();
Assert.assertTrue(a.isAutoIncrement());
Assert.assertEquals(a.getOptions().size(), 0);
}
@ColumnSupportsAutoIncrement
class A extends ColumnDefinition {
public A() {
Options optionsService = new OptionsImpl();
getOptions().add(optionsService.AutoIncrement());
getOptions().add(optionsService.AutoIncrement());
}
@Override
protected String sql() {
return null;
}
}
}
| apache-2.0 |
waynezhang87/MCommon | sdk/src/main/java/com/waynezhang/mcommon/xwidget/PageManager.java | 7330 | package com.waynezhang.mcommon.xwidget;
import android.widget.ListView;
import com.waynezhang.mcommon.compt.ArrayAdapterCompat;
import com.waynezhang.mcommon.util.ListViewUtil;
import java.lang.ref.WeakReference;
import java.util.List;
import thirdpart.com.handmark.pulltorefresh.library.PullToRefreshBase;
import thirdpart.com.handmark.pulltorefresh.library.PullToRefreshListView;
/**
* Created by don on 1/26/15.
*/
public class PageManager<T> {
protected final WeakReference<PullToRefreshListView> mPtrList;
protected final ArrayAdapterCompat<T> mAdapter;
protected final int mStartPage;
protected final int mNumPageItem;
protected int mCurrentPageNo;
protected boolean enableRefresh;
protected PageLoadListener mPageLoadListener;
protected boolean loading;
public PageManager(PullToRefreshListView ptrList, ArrayAdapterCompat<T> adapter, int startPage, final int numPageItem) {
//numPageItem为服务端返回的一页里面item个数
this.mPtrList = new WeakReference<PullToRefreshListView>(ptrList);
this.mAdapter = adapter;
this.mStartPage = startPage;
this.mNumPageItem = numPageItem;
ptrList.setAdapter(mAdapter);
ptrList.setMode(PullToRefreshBase.Mode.PULL_FROM_END);
mCurrentPageNo = mStartPage - 1;
ptrList.setOnLastItemVisibleListener(new PullToRefreshBase.OnLastItemVisibleListener() {
@Override
public void onLastItemVisible() {
//fix 当第一页记录数的最后一条刚好可以看见,同时第二页没有数据时ptr会自动跳到底部
if(mCurrentPageNo == mStartPage && mAdapter.getCount() < numPageItem){
return;
}
// if (mCurrentPageNo == mStartPage && mPtrList.getRefreshableView().getCount() >= 3 && mPtrList.getRefreshableView().getCount() <= 7) {
// return;
// }
loadMorePage();
}
});
ptrList.setOnRefreshListener(new PullToRefreshBase.OnRefreshListener2<ListView>() {
@Override
public void onPullDownToRefresh(PullToRefreshBase<ListView> refreshView) {
if (enableRefresh) {
refresh();
}
}
@Override
public void onPullUpToRefresh(PullToRefreshBase<ListView> refreshView) {
loadMorePage();
}
});
ptrList.setShowIndicator(false);
}
public PageManager(final PullToRefreshListView ptrList, ArrayAdapterCompat<T> adapter, int startPage) {
this.mNumPageItem = 0;
this.mPtrList = new WeakReference<PullToRefreshListView>(ptrList);
this.mAdapter = adapter;
this.mStartPage = startPage;
ptrList.setAdapter(mAdapter);
ptrList.setMode(PullToRefreshBase.Mode.PULL_FROM_END);
mCurrentPageNo = mStartPage - 1;
ptrList.setOnLastItemVisibleListener(new PullToRefreshBase.OnLastItemVisibleListener() {
@Override
public void onLastItemVisible() {
//fix 当第一页记录数的最后一条刚好可以看见,同时第二页没有数据时ptr会自动跳到底部
if (mCurrentPageNo == mStartPage && ptrList.getRefreshableView().getCount() >= 3 && ptrList.getRefreshableView().getCount() <= 7) {
return;
}
loadMorePage();
}
});
ptrList.setOnRefreshListener(new PullToRefreshBase.OnRefreshListener2<ListView>() {
@Override
public void onPullDownToRefresh(PullToRefreshBase<ListView> refreshView) {
if (enableRefresh) {
refresh();
}
}
@Override
public void onPullUpToRefresh(PullToRefreshBase<ListView> refreshView) {
loadMorePage();
}
});
ptrList.setShowIndicator(false);
}
public void enableRefresh(boolean enable) {
this.enableRefresh = enable;
PullToRefreshListView ptrList = mPtrList.get();
if(ptrList != null){
ptrList.setMode(enableRefresh ? PullToRefreshBase.Mode.BOTH : PullToRefreshBase.Mode.PULL_FROM_END);
}
}
public void disableRefresh() {
this.enableRefresh = false;
PullToRefreshListView ptrList = mPtrList.get();
if(ptrList != null){
ptrList.setMode(PullToRefreshBase.Mode.DISABLED);
}
}
public void setPageLoadListener(PageLoadListener pageLoadListener) {
this.mPageLoadListener = pageLoadListener;
}
public void loadMorePage() {
//loadMorePage在网上拉加载时和最后一条可见时都会触发,导致最后数据显示会出错,因此该方法要串行调用
if(loading){
return;
}
loading = true;
mPageLoadListener.pageLoad(++mCurrentPageNo, false, false);
}
public void loadFirstPage() {
mCurrentPageNo = mStartPage;
mPageLoadListener.pageLoad(mCurrentPageNo, true, false);
}
public void refresh() {
mCurrentPageNo = mStartPage;
mPageLoadListener.pageLoad(mCurrentPageNo, true, true);
}
public void bind(List<T> list, int pageNo , final boolean needGoTop) {
final PullToRefreshListView ptrList = mPtrList.get();
if(ptrList == null){
return;
}
if (pageNo == mStartPage) {
ptrList.post(new Runnable() {
@Override
public void run() {
if(needGoTop){
ptrList.getRefreshableView().setSelection(0);
}
ListViewUtil.stopScrolling(ptrList.getRefreshableView());
}
});
mAdapter.clear();
} else if (list.isEmpty()) {
mCurrentPageNo--;
ToastUtil.showToast("没有更多结果了");
}
mAdapter.addAll(list);
loading = false;
ptrList.onRefreshComplete();
}
public void bind(List<T> list, int pageNo) {
final PullToRefreshListView ptrList = mPtrList.get();
if(ptrList == null){
return;
}
if (pageNo == mStartPage) {
ptrList.post(new Runnable() {
@Override
public void run() {
ptrList.getRefreshableView().setSelection(0);
ListViewUtil.stopScrolling(ptrList.getRefreshableView());
}
});
mAdapter.clear();
} else if (list.isEmpty()) {
mCurrentPageNo--;
ToastUtil.showToast("没有更多结果了");
}
mAdapter.addAll(list);
loading = false;
ptrList.onRefreshComplete();
}
public void onFailure() {
loading = false;
mCurrentPageNo--;
PullToRefreshListView ptrList = mPtrList.get();
if(ptrList != null){
ptrList.onRefreshComplete();
}
}
public ArrayAdapterCompat<T> getAdapter() {
return mAdapter;
}
public interface PageLoadListener {
void pageLoad(int pageNo, boolean isFirstPage, boolean isRefresh);
}
}
| apache-2.0 |
koert/monitor-app | monitor-web/src/main/java/nl/zencode/monitor/security/RequiresLoggedInUser.java | 306 | package nl.zencode.monitor.security;
import javax.interceptor.InterceptorBinding;
import java.lang.annotation.*;
/**
* @author Koert Zeilstra
*/
@Inherited
@InterceptorBinding
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.METHOD, ElementType.TYPE})
public @interface RequiresLoggedInUser {
} | apache-2.0 |
anuraaga/armeria | testing/common/src/main/java/com/linecorp/armeria/internal/testing/package-info.java | 798 | /*
* Copyright 2019 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
/**
* Internal testing utilities.
*/
@NonNullByDefault
package com.linecorp.armeria.internal.testing;
import com.linecorp.armeria.common.util.NonNullByDefault;
| apache-2.0 |
jinfengni/incubator-drill | exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java | 45906 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.parquet;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.drill.common.exceptions.ExecutionSetupException;
import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.common.expression.ErrorCollector;
import org.apache.drill.common.expression.ErrorCollectorImpl;
import org.apache.drill.common.expression.ExpressionStringBuilder;
import org.apache.drill.common.expression.LogicalExpression;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.expression.ValueExpressions;
import org.apache.drill.common.logical.FormatPluginConfig;
import org.apache.drill.common.logical.StoragePluginConfig;
import org.apache.drill.common.types.TypeProtos.MajorType;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.apache.drill.common.types.Types;
import org.apache.drill.exec.compile.sig.ConstantExpressionIdentifier;
import org.apache.drill.exec.expr.ExpressionTreeMaterializer;
import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
import org.apache.drill.exec.expr.stat.ParquetFilterPredicate;
import org.apache.drill.exec.ops.UdfUtilities;
import org.apache.drill.exec.physical.EndpointAffinity;
import org.apache.drill.exec.physical.PhysicalOperatorSetupException;
import org.apache.drill.exec.physical.base.AbstractFileGroupScan;
import org.apache.drill.exec.physical.base.GroupScan;
import org.apache.drill.exec.physical.base.PhysicalOperator;
import org.apache.drill.exec.physical.base.ScanStats;
import org.apache.drill.exec.physical.base.ScanStats.GroupScanProperty;
import org.apache.drill.exec.planner.physical.PlannerSettings;
import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
import org.apache.drill.exec.server.options.OptionManager;
import org.apache.drill.exec.store.ColumnExplorer;
import org.apache.drill.exec.store.StoragePluginRegistry;
import org.apache.drill.exec.store.dfs.DrillFileSystem;
import org.apache.drill.exec.store.dfs.FileSelection;
import org.apache.drill.exec.util.DrillFileSystemUtil;
import org.apache.drill.exec.store.dfs.MetadataContext;
import org.apache.drill.exec.store.dfs.MetadataContext.PruneStatus;
import org.apache.drill.exec.store.dfs.ReadEntryFromHDFS;
import org.apache.drill.exec.store.dfs.ReadEntryWithPath;
import org.apache.drill.exec.store.dfs.easy.FileWork;
import org.apache.drill.exec.store.parquet.Metadata.ColumnMetadata;
import org.apache.drill.exec.store.parquet.Metadata.ParquetFileMetadata;
import org.apache.drill.exec.store.parquet.Metadata.ParquetTableMetadataBase;
import org.apache.drill.exec.store.parquet.Metadata.RowGroupMetadata;
import org.apache.drill.exec.store.parquet.stat.ColumnStatistics;
import org.apache.drill.exec.store.parquet.stat.ParquetMetaStatCollector;
import org.apache.drill.exec.store.schedule.AffinityCreator;
import org.apache.drill.exec.store.schedule.AssignmentCreator;
import org.apache.drill.exec.store.schedule.CompleteWork;
import org.apache.drill.exec.store.schedule.EndpointByteMap;
import org.apache.drill.exec.store.schedule.EndpointByteMapImpl;
import org.apache.drill.exec.util.ImpersonationUtil;
import org.apache.drill.exec.vector.NullableBigIntVector;
import org.apache.drill.exec.vector.NullableDateVector;
import org.apache.drill.exec.vector.NullableDecimal18Vector;
import org.apache.drill.exec.vector.NullableFloat4Vector;
import org.apache.drill.exec.vector.NullableFloat8Vector;
import org.apache.drill.exec.vector.NullableIntVector;
import org.apache.drill.exec.vector.NullableSmallIntVector;
import org.apache.drill.exec.vector.NullableTimeStampVector;
import org.apache.drill.exec.vector.NullableTimeVector;
import org.apache.drill.exec.vector.NullableTinyIntVector;
import org.apache.drill.exec.vector.NullableUInt1Vector;
import org.apache.drill.exec.vector.NullableUInt2Vector;
import org.apache.drill.exec.vector.NullableUInt4Vector;
import org.apache.drill.exec.vector.NullableVarBinaryVector;
import org.apache.drill.exec.vector.NullableVarCharVector;
import org.apache.drill.exec.vector.ValueVector;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.joda.time.DateTimeConstants;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.schema.OriginalType;
import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName;
import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.google.common.base.Preconditions;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
@JsonTypeName("parquet-scan")
public class ParquetGroupScan extends AbstractFileGroupScan {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ParquetGroupScan.class);
private final List<ReadEntryWithPath> entries;
private final ParquetFormatPlugin formatPlugin;
private final ParquetFormatConfig formatConfig;
private final DrillFileSystem fs;
private String selectionRoot;
private boolean usedMetadataCache = false;
private List<EndpointAffinity> endpointAffinities;
private List<SchemaPath> columns;
private ListMultimap<Integer, RowGroupInfo> mappings;
private List<RowGroupInfo> rowGroupInfos;
private LogicalExpression filter;
/**
* The parquet table metadata may have already been read
* from a metadata cache file earlier; we can re-use during
* the ParquetGroupScan and avoid extra loading time.
*/
private Metadata.ParquetTableMetadataBase parquetTableMetadata = null;
private String cacheFileRoot = null;
/*
* total number of rows (obtained from parquet footer)
*/
private long rowCount;
/*
* total number of non-null value for each column in parquet files.
*/
private Map<SchemaPath, Long> columnValueCounts;
@JsonCreator public ParquetGroupScan( //
@JsonProperty("userName") String userName,
@JsonProperty("entries") List<ReadEntryWithPath> entries,//
@JsonProperty("storage") StoragePluginConfig storageConfig, //
@JsonProperty("format") FormatPluginConfig formatConfig, //
@JacksonInject StoragePluginRegistry engineRegistry, //
@JsonProperty("columns") List<SchemaPath> columns, //
@JsonProperty("selectionRoot") String selectionRoot, //
@JsonProperty("cacheFileRoot") String cacheFileRoot, //
@JsonProperty("filter") LogicalExpression filter
) throws IOException, ExecutionSetupException {
super(ImpersonationUtil.resolveUserName(userName));
this.columns = columns;
if (formatConfig == null) {
formatConfig = new ParquetFormatConfig();
}
Preconditions.checkNotNull(storageConfig);
Preconditions.checkNotNull(formatConfig);
this.formatPlugin = (ParquetFormatPlugin) engineRegistry.getFormatPlugin(storageConfig, formatConfig);
Preconditions.checkNotNull(formatPlugin);
this.fs = ImpersonationUtil.createFileSystem(getUserName(), formatPlugin.getFsConf());
this.formatConfig = formatPlugin.getConfig();
this.entries = entries;
this.selectionRoot = selectionRoot;
this.cacheFileRoot = cacheFileRoot;
this.filter = filter;
init(null);
}
public ParquetGroupScan( //
String userName,
FileSelection selection, //
ParquetFormatPlugin formatPlugin, //
String selectionRoot,
String cacheFileRoot,
List<SchemaPath> columns) throws IOException{
this(userName, selection, formatPlugin, selectionRoot, cacheFileRoot, columns, ValueExpressions.BooleanExpression.TRUE);
}
public ParquetGroupScan( //
String userName,
FileSelection selection, //
ParquetFormatPlugin formatPlugin, //
String selectionRoot,
String cacheFileRoot,
List<SchemaPath> columns,
LogicalExpression filter) //
throws IOException {
super(userName);
this.formatPlugin = formatPlugin;
this.columns = columns;
this.formatConfig = formatPlugin.getConfig();
this.fs = ImpersonationUtil.createFileSystem(userName, formatPlugin.getFsConf());
this.selectionRoot = selectionRoot;
this.cacheFileRoot = cacheFileRoot;
final FileSelection fileSelection = expandIfNecessary(selection);
this.entries = Lists.newArrayList();
if (fileSelection.getMetaContext() != null &&
(fileSelection.getMetaContext().getPruneStatus() == PruneStatus.NOT_STARTED ||
fileSelection.getMetaContext().getPruneStatus() == PruneStatus.NOT_PRUNED)) {
// if pruning was not applicable or was attempted and nothing was pruned, initialize the
// entries with just the selection root instead of the fully expanded list to reduce overhead.
// The fully expanded list is already stored as part of the fileSet.
// TODO: at some point we should examine whether the list of entries is absolutely needed.
entries.add(new ReadEntryWithPath(fileSelection.getSelectionRoot()));
} else {
for (String fileName : fileSelection.getFiles()) {
entries.add(new ReadEntryWithPath(fileName));
}
}
this.filter = filter;
init(fileSelection.getMetaContext());
}
/*
* This is used to clone another copy of the group scan.
*/
private ParquetGroupScan(ParquetGroupScan that) {
super(that);
this.columns = that.columns == null ? null : Lists.newArrayList(that.columns);
this.endpointAffinities = that.endpointAffinities == null ? null : Lists.newArrayList(that.endpointAffinities);
this.entries = that.entries == null ? null : Lists.newArrayList(that.entries);
this.formatConfig = that.formatConfig;
this.formatPlugin = that.formatPlugin;
this.fs = that.fs;
this.mappings = that.mappings == null ? null : ArrayListMultimap.create(that.mappings);
this.rowCount = that.rowCount;
this.rowGroupInfos = that.rowGroupInfos == null ? null : Lists.newArrayList(that.rowGroupInfos);
this.selectionRoot = that.selectionRoot;
this.columnValueCounts = that.columnValueCounts == null ? null : new HashMap<>(that.columnValueCounts);
this.partitionColTypeMap = that.partitionColTypeMap == null ? null : new HashMap<>(that.partitionColTypeMap);
this.partitionValueMap = that.partitionValueMap == null ? null : new HashMap<>(that.partitionValueMap);
this.fileSet = that.fileSet == null ? null : new HashSet<>(that.fileSet);
this.usedMetadataCache = that.usedMetadataCache;
this.parquetTableMetadata = that.parquetTableMetadata;
this.filter = that.filter;
this.cacheFileRoot = that.cacheFileRoot;
}
/**
* expands the selection's folders if metadata cache is found for the selection root.<br>
* If the selection has already been expanded or no metadata cache was found, does nothing
*
* @param selection actual selection before expansion
* @return new selection after expansion, if no expansion was done returns the input selection
*
* @throws IOException
*/
private FileSelection expandIfNecessary(FileSelection selection) throws IOException {
if (selection.isExpandedFully()) {
return selection;
}
// use the cacheFileRoot if provided (e.g after partition pruning)
Path metaFilePath = new Path(cacheFileRoot != null ? cacheFileRoot : selectionRoot, Metadata.METADATA_FILENAME);
if (!fs.exists(metaFilePath)) { // no metadata cache
return selection;
}
FileSelection expandedSelection = initFromMetadataCache(selection, metaFilePath);
return expandedSelection;
}
public List<ReadEntryWithPath> getEntries() {
return entries;
}
@JsonProperty("format")
public ParquetFormatConfig getFormatConfig() {
return this.formatConfig;
}
@JsonProperty("storage")
public StoragePluginConfig getEngineConfig() {
return this.formatPlugin.getStorageConfig();
}
public String getSelectionRoot() {
return selectionRoot;
}
public Set<String> getFileSet() {
return fileSet;
}
public LogicalExpression getFilter() {
return this.filter;
}
public void setFilter(LogicalExpression filter) {
this.filter = filter;
}
@Override
public boolean hasFiles() {
return true;
}
@Override
public Collection<String> getFiles() {
return fileSet;
}
private Set<String> fileSet;
@JsonIgnore
// only for partition columns : value is unique for each partition
private Map<SchemaPath, MajorType> partitionColTypeMap = Maps.newHashMap();
/**
* When reading the very first footer, any column is a potential partition column. So for the first footer, we check
* every column to see if it is single valued, and if so, add it to the list of potential partition columns. For the
* remaining footers, we will not find any new partition columns, but we may discover that what was previously a
* potential partition column now no longer qualifies, so it needs to be removed from the list.
* @return whether column is a potential partition column
*/
private boolean checkForPartitionColumn(ColumnMetadata columnMetadata, boolean first) {
SchemaPath schemaPath = SchemaPath.getCompoundPath(columnMetadata.getName());
final PrimitiveTypeName primitiveType;
final OriginalType originalType;
if (this.parquetTableMetadata.hasColumnMetadata()) {
primitiveType = this.parquetTableMetadata.getPrimitiveType(columnMetadata.getName());
originalType = this.parquetTableMetadata.getOriginalType(columnMetadata.getName());
} else {
primitiveType = columnMetadata.getPrimitiveType();
originalType = columnMetadata.getOriginalType();
}
if (first) {
if (hasSingleValue(columnMetadata)) {
partitionColTypeMap.put(schemaPath, getType(primitiveType, originalType));
return true;
} else {
return false;
}
} else {
if (!partitionColTypeMap.keySet().contains(schemaPath)) {
return false;
} else {
if (!hasSingleValue(columnMetadata)) {
partitionColTypeMap.remove(schemaPath);
return false;
}
if (!getType(primitiveType, originalType).equals(partitionColTypeMap.get(schemaPath))) {
partitionColTypeMap.remove(schemaPath);
return false;
}
}
}
return true;
}
public static MajorType getType(PrimitiveTypeName type, OriginalType originalType) {
if (originalType != null) {
switch (originalType) {
case DECIMAL:
return Types.optional(MinorType.DECIMAL18);
case DATE:
return Types.optional(MinorType.DATE);
case TIME_MILLIS:
return Types.optional(MinorType.TIME);
case TIMESTAMP_MILLIS:
return Types.optional(MinorType.TIMESTAMP);
case UTF8:
return Types.optional(MinorType.VARCHAR);
case UINT_8:
return Types.optional(MinorType.UINT1);
case UINT_16:
return Types.optional(MinorType.UINT2);
case UINT_32:
return Types.optional(MinorType.UINT4);
case UINT_64:
return Types.optional(MinorType.UINT8);
case INT_8:
return Types.optional(MinorType.TINYINT);
case INT_16:
return Types.optional(MinorType.SMALLINT);
}
}
switch (type) {
case BOOLEAN:
return Types.optional(MinorType.BIT);
case INT32:
return Types.optional(MinorType.INT);
case INT64:
return Types.optional(MinorType.BIGINT);
case FLOAT:
return Types.optional(MinorType.FLOAT4);
case DOUBLE:
return Types.optional(MinorType.FLOAT8);
case BINARY:
case FIXED_LEN_BYTE_ARRAY:
case INT96:
return Types.optional(MinorType.VARBINARY);
default:
// Should never hit this
throw new UnsupportedOperationException("Unsupported type:" + type);
}
}
private boolean hasSingleValue(ColumnMetadata columnChunkMetaData) {
// ColumnMetadata will have a non-null value iff the minValue and the maxValue for the
// rowgroup are the same
return (columnChunkMetaData != null) && (columnChunkMetaData.hasSingleValue());
}
@Override public void modifyFileSelection(FileSelection selection) {
entries.clear();
fileSet = Sets.newHashSet();
for (String fileName : selection.getFiles()) {
entries.add(new ReadEntryWithPath(fileName));
fileSet.add(fileName);
}
List<RowGroupInfo> newRowGroupList = Lists.newArrayList();
for (RowGroupInfo rowGroupInfo : rowGroupInfos) {
if (fileSet.contains(rowGroupInfo.getPath())) {
newRowGroupList.add(rowGroupInfo);
}
}
this.rowGroupInfos = newRowGroupList;
}
public MajorType getTypeForColumn(SchemaPath schemaPath) {
return partitionColTypeMap.get(schemaPath);
}
// Map from file names to maps of column name to partition value mappings
private Map<String, Map<SchemaPath, Object>> partitionValueMap = Maps.newHashMap();
public void populatePruningVector(ValueVector v, int index, SchemaPath column, String file) {
String f = Path.getPathWithoutSchemeAndAuthority(new Path(file)).toString();
MinorType type = getTypeForColumn(column).getMinorType();
switch (type) {
case INT: {
NullableIntVector intVector = (NullableIntVector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
intVector.getMutator().setSafe(index, value);
return;
}
case SMALLINT: {
NullableSmallIntVector smallIntVector = (NullableSmallIntVector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
smallIntVector.getMutator().setSafe(index, value.shortValue());
return;
}
case TINYINT: {
NullableTinyIntVector tinyIntVector = (NullableTinyIntVector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
tinyIntVector.getMutator().setSafe(index, value.byteValue());
return;
}
case UINT1: {
NullableUInt1Vector intVector = (NullableUInt1Vector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
intVector.getMutator().setSafe(index, value.byteValue());
return;
}
case UINT2: {
NullableUInt2Vector intVector = (NullableUInt2Vector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
intVector.getMutator().setSafe(index, (char) value.shortValue());
return;
}
case UINT4: {
NullableUInt4Vector intVector = (NullableUInt4Vector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
intVector.getMutator().setSafe(index, value);
return;
}
case BIGINT: {
NullableBigIntVector bigIntVector = (NullableBigIntVector) v;
Long value = (Long) partitionValueMap.get(f).get(column);
bigIntVector.getMutator().setSafe(index, value);
return;
}
case FLOAT4: {
NullableFloat4Vector float4Vector = (NullableFloat4Vector) v;
Float value = (Float) partitionValueMap.get(f).get(column);
float4Vector.getMutator().setSafe(index, value);
return;
}
case FLOAT8: {
NullableFloat8Vector float8Vector = (NullableFloat8Vector) v;
Double value = (Double) partitionValueMap.get(f).get(column);
float8Vector.getMutator().setSafe(index, value);
return;
}
case VARBINARY: {
NullableVarBinaryVector varBinaryVector = (NullableVarBinaryVector) v;
Object s = partitionValueMap.get(f).get(column);
byte[] bytes;
if (s instanceof Binary) {
bytes = ((Binary) s).getBytes();
} else if (s instanceof String) {
bytes = ((String) s).getBytes();
} else if (s instanceof byte[]) {
bytes = (byte[]) s;
} else {
throw new UnsupportedOperationException("Unable to create column data for type: " + type);
}
varBinaryVector.getMutator().setSafe(index, bytes, 0, bytes.length);
return;
}
case DECIMAL18: {
NullableDecimal18Vector decimalVector = (NullableDecimal18Vector) v;
Long value = (Long) partitionValueMap.get(f).get(column);
decimalVector.getMutator().setSafe(index, value);
return;
}
case DATE: {
NullableDateVector dateVector = (NullableDateVector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
dateVector.getMutator().setSafe(index, value * (long) DateTimeConstants.MILLIS_PER_DAY);
return;
}
case TIME: {
NullableTimeVector timeVector = (NullableTimeVector) v;
Integer value = (Integer) partitionValueMap.get(f).get(column);
timeVector.getMutator().setSafe(index, value);
return;
}
case TIMESTAMP: {
NullableTimeStampVector timeStampVector = (NullableTimeStampVector) v;
Long value = (Long) partitionValueMap.get(f).get(column);
timeStampVector.getMutator().setSafe(index, value);
return;
}
case VARCHAR: {
NullableVarCharVector varCharVector = (NullableVarCharVector) v;
Object s = partitionValueMap.get(f).get(column);
byte[] bytes;
if (s instanceof String) { // if the metadata was read from a JSON cache file it maybe a string type
bytes = ((String) s).getBytes();
} else if (s instanceof Binary) {
bytes = ((Binary) s).getBytes();
} else if (s instanceof byte[]) {
bytes = (byte[]) s;
} else {
throw new UnsupportedOperationException("Unable to create column data for type: " + type);
}
varCharVector.getMutator().setSafe(index, bytes, 0, bytes.length);
return;
}
default:
throw new UnsupportedOperationException("Unsupported type: " + type);
}
}
public static class RowGroupInfo extends ReadEntryFromHDFS implements CompleteWork, FileWork {
private EndpointByteMap byteMap;
private int rowGroupIndex;
private String root;
private long rowCount; // rowCount = -1 indicates to include all rows.
private long numRecordsToRead;
@JsonCreator
public RowGroupInfo(@JsonProperty("path") String path, @JsonProperty("start") long start,
@JsonProperty("length") long length, @JsonProperty("rowGroupIndex") int rowGroupIndex, long rowCount) {
super(path, start, length);
this.rowGroupIndex = rowGroupIndex;
this.rowCount = rowCount;
this.numRecordsToRead = rowCount;
}
public RowGroupReadEntry getRowGroupReadEntry() {
return new RowGroupReadEntry(this.getPath(), this.getStart(), this.getLength(),
this.rowGroupIndex, this.getNumRecordsToRead());
}
public int getRowGroupIndex() {
return this.rowGroupIndex;
}
@Override
public int compareTo(CompleteWork o) {
return Long.compare(getTotalBytes(), o.getTotalBytes());
}
@Override
public long getTotalBytes() {
return this.getLength();
}
@Override
public EndpointByteMap getByteMap() {
return byteMap;
}
public long getNumRecordsToRead() {
return numRecordsToRead;
}
public void setNumRecordsToRead(long numRecords) {
numRecordsToRead = numRecords;
}
public void setEndpointByteMap(EndpointByteMap byteMap) {
this.byteMap = byteMap;
}
public long getRowCount() {
return rowCount;
}
}
/**
* Create and return a new file selection based on reading the metadata cache file.
*
* This function also initializes a few of ParquetGroupScan's fields as appropriate.
*
* @param selection initial file selection
* @param metaFilePath metadata cache file path
* @return file selection read from cache
*
* @throws IOException
* @throws UserException when the updated selection is empty, this happens if the user selects an empty folder.
*/
private FileSelection
initFromMetadataCache(FileSelection selection, Path metaFilePath) throws IOException {
// get the metadata for the root directory by reading the metadata file
// parquetTableMetadata contains the metadata for all files in the selection root folder, but we need to make sure
// we only select the files that are part of selection (by setting fileSet appropriately)
// get (and set internal field) the metadata for the directory by reading the metadata file
this.parquetTableMetadata = Metadata.readBlockMeta(fs, metaFilePath.toString(), selection.getMetaContext(), formatConfig);
if (formatConfig.autoCorrectCorruptDates) {
ParquetReaderUtility.correctDatesInMetadataCache(this.parquetTableMetadata);
}
List<FileStatus> fileStatuses = selection.getStatuses(fs);
if (fileSet == null) {
fileSet = Sets.newHashSet();
}
final Path first = fileStatuses.get(0).getPath();
if (fileStatuses.size() == 1 && selection.getSelectionRoot().equals(first.toString())) {
// we are selecting all files from selection root. Expand the file list from the cache
for (Metadata.ParquetFileMetadata file : parquetTableMetadata.getFiles()) {
fileSet.add(file.getPath());
}
} else if (selection.isExpandedPartial() && !selection.hadWildcard() &&
cacheFileRoot != null) {
if (selection.wasAllPartitionsPruned()) {
// if all partitions were previously pruned, we only need to read 1 file (for the schema)
fileSet.add(this.parquetTableMetadata.getFiles().get(0).getPath());
} else {
// we are here if the selection is in the expanded_partial state (i.e it has directories). We get the
// list of files from the metadata cache file that is present in the cacheFileRoot directory and populate
// the fileSet. However, this is *not* the final list of files that will be scanned in execution since the
// second phase of partition pruning will apply on the files and modify the file selection appropriately.
for (Metadata.ParquetFileMetadata file : this.parquetTableMetadata.getFiles()) {
fileSet.add(file.getPath());
}
}
} else {
// we need to expand the files from fileStatuses
for (FileStatus status : fileStatuses) {
Path cacheFileRoot = status.getPath();
if (status.isDirectory()) {
//TODO [DRILL-4496] read the metadata cache files in parallel
final Path metaPath = new Path(cacheFileRoot, Metadata.METADATA_FILENAME);
final Metadata.ParquetTableMetadataBase metadata = Metadata.readBlockMeta(fs, metaPath.toString(), selection.getMetaContext(), formatConfig);
for (Metadata.ParquetFileMetadata file : metadata.getFiles()) {
fileSet.add(file.getPath());
}
} else {
final Path path = Path.getPathWithoutSchemeAndAuthority(cacheFileRoot);
fileSet.add(path.toString());
}
}
}
if (fileSet.isEmpty()) {
// no files were found, most likely we tried to query some empty sub folders
throw UserException.validationError().message("The table you tried to query is empty").build(logger);
}
List<String> fileNames = Lists.newArrayList(fileSet);
// when creating the file selection, set the selection root without the URI prefix
// The reason is that the file names above have been created in the form
// /a/b/c.parquet and the format of the selection root must match that of the file names
// otherwise downstream operations such as partition pruning can break.
final Path metaRootPath = Path.getPathWithoutSchemeAndAuthority(new Path(selection.getSelectionRoot()));
this.selectionRoot = metaRootPath.toString();
// Use the FileSelection constructor directly here instead of the FileSelection.create() method
// because create() changes the root to include the scheme and authority; In future, if create()
// is the preferred way to instantiate a file selection, we may need to do something different...
// WARNING: file statuses and file names are inconsistent
FileSelection newSelection = new FileSelection(selection.getStatuses(fs), fileNames, metaRootPath.toString(),
cacheFileRoot, selection.wasAllPartitionsPruned());
newSelection.setExpandedFully();
newSelection.setMetaContext(selection.getMetaContext());
return newSelection;
}
private void init(MetadataContext metaContext) throws IOException {
Path metaPath = null;
if (entries.size() == 1 && parquetTableMetadata == null) {
Path p = Path.getPathWithoutSchemeAndAuthority(new Path(entries.get(0).getPath()));
if (fs.isDirectory(p)) {
// Using the metadata file makes sense when querying a directory; otherwise
// if querying a single file we can look up the metadata directly from the file
metaPath = new Path(p, Metadata.METADATA_FILENAME);
}
if (metaPath != null && fs.exists(metaPath)) {
usedMetadataCache = true;
parquetTableMetadata = Metadata.readBlockMeta(fs, metaPath.toString(), metaContext, formatConfig);
} else {
parquetTableMetadata = Metadata.getParquetTableMetadata(fs, p.toString(), formatConfig);
}
} else {
Path p = Path.getPathWithoutSchemeAndAuthority(new Path(selectionRoot));
metaPath = new Path(p, Metadata.METADATA_FILENAME);
if (fs.isDirectory(new Path(selectionRoot)) && fs.exists(metaPath)) {
usedMetadataCache = true;
if (parquetTableMetadata == null) {
parquetTableMetadata = Metadata.readBlockMeta(fs, metaPath.toString(), metaContext, formatConfig);
}
if (fileSet != null) {
parquetTableMetadata = removeUnneededRowGroups(parquetTableMetadata);
}
} else {
final List<FileStatus> fileStatuses = Lists.newArrayList();
for (ReadEntryWithPath entry : entries) {
fileStatuses.addAll(DrillFileSystemUtil.listFiles(fs, Path.getPathWithoutSchemeAndAuthority(new Path(entry.getPath())), true));
}
parquetTableMetadata = Metadata.getParquetTableMetadata(fs, fileStatuses, formatConfig);
}
}
if (fileSet == null) {
fileSet = Sets.newHashSet();
for (ParquetFileMetadata file : parquetTableMetadata.getFiles()) {
fileSet.add(file.getPath());
}
}
Map<String, DrillbitEndpoint> hostEndpointMap = Maps.newHashMap();
for (DrillbitEndpoint endpoint : formatPlugin.getContext().getBits()) {
hostEndpointMap.put(endpoint.getAddress(), endpoint);
}
rowGroupInfos = Lists.newArrayList();
for (ParquetFileMetadata file : parquetTableMetadata.getFiles()) {
int rgIndex = 0;
for (RowGroupMetadata rg : file.getRowGroups()) {
RowGroupInfo rowGroupInfo =
new RowGroupInfo(file.getPath(), rg.getStart(), rg.getLength(), rgIndex, rg.getRowCount());
EndpointByteMap endpointByteMap = new EndpointByteMapImpl();
for (String host : rg.getHostAffinity().keySet()) {
if (hostEndpointMap.containsKey(host)) {
endpointByteMap
.add(hostEndpointMap.get(host), (long) (rg.getHostAffinity().get(host) * rg.getLength()));
}
}
rowGroupInfo.setEndpointByteMap(endpointByteMap);
rgIndex++;
rowGroupInfos.add(rowGroupInfo);
}
}
this.endpointAffinities = AffinityCreator.getAffinityMap(rowGroupInfos);
columnValueCounts = Maps.newHashMap();
this.rowCount = 0;
boolean first = true;
for (ParquetFileMetadata file : parquetTableMetadata.getFiles()) {
for (RowGroupMetadata rowGroup : file.getRowGroups()) {
long rowCount = rowGroup.getRowCount();
for (ColumnMetadata column : rowGroup.getColumns()) {
SchemaPath schemaPath = SchemaPath.getCompoundPath(column.getName());
Long previousCount = columnValueCounts.get(schemaPath);
if (previousCount != null) {
if (previousCount != GroupScan.NO_COLUMN_STATS) {
if (column.getNulls() != null) {
Long newCount = rowCount - column.getNulls();
columnValueCounts.put(schemaPath, columnValueCounts.get(schemaPath) + newCount);
}
}
} else {
if (column.getNulls() != null) {
Long newCount = rowCount - column.getNulls();
columnValueCounts.put(schemaPath, newCount);
} else {
columnValueCounts.put(schemaPath, GroupScan.NO_COLUMN_STATS);
}
}
boolean partitionColumn = checkForPartitionColumn(column, first);
if (partitionColumn) {
Map<SchemaPath, Object> map = partitionValueMap.get(file.getPath());
if (map == null) {
map = Maps.newHashMap();
partitionValueMap.put(file.getPath(), map);
}
Object value = map.get(schemaPath);
Object currentValue = column.getMaxValue();
if (value != null) {
if (value != currentValue) {
partitionColTypeMap.remove(schemaPath);
}
} else {
map.put(schemaPath, currentValue);
}
} else {
partitionColTypeMap.remove(schemaPath);
}
}
this.rowCount += rowGroup.getRowCount();
first = false;
}
}
}
private ParquetTableMetadataBase removeUnneededRowGroups(ParquetTableMetadataBase parquetTableMetadata) {
List<ParquetFileMetadata> newFileMetadataList = Lists.newArrayList();
for (ParquetFileMetadata file : parquetTableMetadata.getFiles()) {
if (fileSet.contains(file.getPath())) {
newFileMetadataList.add(file);
}
}
ParquetTableMetadataBase metadata = parquetTableMetadata.clone();
metadata.assignFiles(newFileMetadataList);
return metadata;
}
/**
* Calculates the affinity each endpoint has for this scan, by adding up the affinity each endpoint has for each
* rowGroup
*
* @return a list of EndpointAffinity objects
*/
@Override
public List<EndpointAffinity> getOperatorAffinity() {
return this.endpointAffinities;
}
@Override
public void applyAssignments(List<DrillbitEndpoint> incomingEndpoints) throws PhysicalOperatorSetupException {
this.mappings = AssignmentCreator.getMappings(incomingEndpoints, rowGroupInfos);
}
@Override public ParquetRowGroupScan getSpecificScan(int minorFragmentId) {
assert minorFragmentId < mappings.size() : String
.format("Mappings length [%d] should be longer than minor fragment id [%d] but it isn't.",
mappings.size(), minorFragmentId);
List<RowGroupInfo> rowGroupsForMinor = mappings.get(minorFragmentId);
Preconditions.checkArgument(!rowGroupsForMinor.isEmpty(),
String.format("MinorFragmentId %d has no read entries assigned", minorFragmentId));
return new ParquetRowGroupScan(
getUserName(), formatPlugin, convertToReadEntries(rowGroupsForMinor), columns, selectionRoot, filter);
}
private List<RowGroupReadEntry> convertToReadEntries(List<RowGroupInfo> rowGroups) {
List<RowGroupReadEntry> entries = Lists.newArrayList();
for (RowGroupInfo rgi : rowGroups) {
RowGroupReadEntry entry = new RowGroupReadEntry(rgi.getPath(), rgi.getStart(), rgi.getLength(), rgi.getRowGroupIndex(), rgi.getNumRecordsToRead());
entries.add(entry);
}
return entries;
}
@Override
public int getMaxParallelizationWidth() {
return rowGroupInfos.size();
}
public List<SchemaPath> getColumns() {
return columns;
}
@Override
public ScanStats getScanStats() {
int columnCount = columns == null ? 20 : columns.size();
return new ScanStats(GroupScanProperty.EXACT_ROW_COUNT, rowCount, 1, rowCount * columnCount);
}
@Override
@JsonIgnore
public PhysicalOperator getNewWithChildren(List<PhysicalOperator> children) {
Preconditions.checkArgument(children.isEmpty());
return new ParquetGroupScan(this);
}
@Override
public String getDigest() {
return toString();
}
public void setCacheFileRoot(String cacheFileRoot) {
this.cacheFileRoot = cacheFileRoot;
}
@Override
public String toString() {
String cacheFileString = "";
if (usedMetadataCache) {
// For EXPLAIN, remove the URI prefix from cacheFileRoot. If cacheFileRoot is null, we
// would have read the cache file from selectionRoot
String str = (cacheFileRoot == null) ?
Path.getPathWithoutSchemeAndAuthority(new Path(selectionRoot)).toString() :
Path.getPathWithoutSchemeAndAuthority(new Path(cacheFileRoot)).toString();
cacheFileString = ", cacheFileRoot=" + str;
}
final String filterStr = filter == null || filter.equals(ValueExpressions.BooleanExpression.TRUE) ? "" : ", filter=" + ExpressionStringBuilder.toString(this.filter);
return "ParquetGroupScan [entries=" + entries
+ ", selectionRoot=" + selectionRoot
+ ", numFiles=" + getEntries().size()
+ ", usedMetadataFile=" + usedMetadataCache
+ filterStr
+ cacheFileString
+ ", columns=" + columns
+ "]";
}
@Override
public GroupScan clone(List<SchemaPath> columns) {
ParquetGroupScan newScan = new ParquetGroupScan(this);
newScan.columns = columns;
return newScan;
}
// Based on maxRecords to read for the scan,
// figure out how many rowGroups to read and update number of records to read for each of them.
// Returns total number of rowGroups to read.
private int updateRowGroupInfo(long maxRecords) {
long count = 0;
int index = 0;
for (RowGroupInfo rowGroupInfo : rowGroupInfos) {
long rowCount = rowGroupInfo.getRowCount();
if (count + rowCount <= maxRecords) {
count += rowCount;
rowGroupInfo.setNumRecordsToRead(rowCount);
index++;
continue;
} else if (count < maxRecords) {
rowGroupInfo.setNumRecordsToRead(maxRecords - count);
index++;
}
break;
}
return index;
}
@Override
public ParquetGroupScan clone(FileSelection selection) throws IOException {
ParquetGroupScan newScan = new ParquetGroupScan(this);
newScan.modifyFileSelection(selection);
newScan.setCacheFileRoot(selection.cacheFileRoot);
newScan.init(selection.getMetaContext());
return newScan;
}
public ParquetGroupScan clone(FileSelection selection, long maxRecords) throws IOException {
ParquetGroupScan newScan = clone(selection);
newScan.updateRowGroupInfo(maxRecords);
return newScan;
}
@Override
public boolean supportsLimitPushdown() {
return true;
}
@Override
public GroupScan applyLimit(long maxRecords) {
Preconditions.checkArgument(rowGroupInfos.size() >= 0);
maxRecords = Math.max(maxRecords, 1); // Make sure it request at least 1 row -> 1 rowGroup.
// further optimization : minimize # of files chosen, or the affinity of files chosen.
// Calculate number of rowGroups to read based on maxRecords and update
// number of records to read for each of those rowGroups.
int index = updateRowGroupInfo(maxRecords);
Set<String> fileNames = Sets.newHashSet(); // HashSet keeps a fileName unique.
for (RowGroupInfo rowGroupInfo : rowGroupInfos.subList(0, index)) {
fileNames.add(rowGroupInfo.getPath());
}
// If there is no change in fileSet, no need to create new groupScan.
if (fileNames.size() == fileSet.size() ) {
// There is no reduction of rowGroups. Return the original groupScan.
logger.debug("applyLimit() does not apply!");
return null;
}
try {
FileSelection newSelection = new FileSelection(null, Lists.newArrayList(fileNames), getSelectionRoot(), cacheFileRoot, false);
logger.debug("applyLimit() reduce parquet file # from {} to {}", fileSet.size(), fileNames.size());
return this.clone(newSelection, maxRecords);
} catch (IOException e) {
logger.warn("Could not apply rowcount based prune due to Exception : {}", e);
return null;
}
}
@Override
@JsonIgnore
public boolean canPushdownProjects(List<SchemaPath> columns) {
return true;
}
/**
* Return column value count for the specified column. If does not contain such column, return 0.
*/
@Override
public long getColumnValueCount(SchemaPath column) {
return columnValueCounts.containsKey(column) ? columnValueCounts.get(column) : 0;
}
@Override
public List<SchemaPath> getPartitionColumns() {
return new ArrayList<>(partitionColTypeMap.keySet());
}
public GroupScan applyFilter(LogicalExpression filterExpr, UdfUtilities udfUtilities,
FunctionImplementationRegistry functionImplementationRegistry, OptionManager optionManager) {
if (fileSet.size() == 1 ||
! (parquetTableMetadata.isRowGroupPrunable()) ||
rowGroupInfos.size() > optionManager.getOption(PlannerSettings.PARQUET_ROWGROUP_FILTER_PUSHDOWN_PLANNING_THRESHOLD)
) {
// Stop pruning for 3 cases:
// - 1 single parquet file,
// - metadata does not have proper format to support row group level filter pruning,
// - # of row groups is beyond PARQUET_ROWGROUP_FILTER_PUSHDOWN_PLANNING_THRESHOLD.
return null;
}
final Set<SchemaPath> schemaPathsInExpr = filterExpr.accept(new ParquetRGFilterEvaluator.FieldReferenceFinder(), null);
final List<RowGroupMetadata> qualifiedRGs = new ArrayList<>(parquetTableMetadata.getFiles().size());
Set<String> qualifiedFileNames = Sets.newHashSet(); // HashSet keeps a fileName unique.
ParquetFilterPredicate filterPredicate = null;
for (ParquetFileMetadata file : parquetTableMetadata.getFiles()) {
final ColumnExplorer columnExplorer = new ColumnExplorer(optionManager, this.columns);
Map<String, String> implicitColValues = columnExplorer.populateImplicitColumns(file.getPath(), selectionRoot);
for (RowGroupMetadata rowGroup : file.getRowGroups()) {
ParquetMetaStatCollector statCollector = new ParquetMetaStatCollector(
parquetTableMetadata,
rowGroup.getColumns(),
implicitColValues);
Map<SchemaPath, ColumnStatistics> columnStatisticsMap = statCollector.collectColStat(schemaPathsInExpr);
if (filterPredicate == null) {
ErrorCollector errorCollector = new ErrorCollectorImpl();
LogicalExpression materializedFilter = ExpressionTreeMaterializer.materializeFilterExpr(
filterExpr, columnStatisticsMap, errorCollector, functionImplementationRegistry);
if (errorCollector.hasErrors()) {
logger.error("{} error(s) encountered when materialize filter expression : {}",
errorCollector.getErrorCount(), errorCollector.toErrorString());
return null;
}
// logger.debug("materializedFilter : {}", ExpressionStringBuilder.toString(materializedFilter));
Set<LogicalExpression> constantBoundaries = ConstantExpressionIdentifier.getConstantExpressionSet(materializedFilter);
filterPredicate = (ParquetFilterPredicate) ParquetFilterBuilder.buildParquetFilterPredicate(
materializedFilter, constantBoundaries, udfUtilities);
if (filterPredicate == null) {
return null;
}
}
if (ParquetRGFilterEvaluator.canDrop(filterPredicate, columnStatisticsMap, rowGroup.getRowCount())) {
continue;
}
qualifiedRGs.add(rowGroup);
qualifiedFileNames.add(file.getPath()); // TODO : optimize when 1 file contains m row groups.
}
}
if (qualifiedFileNames.size() == fileSet.size() ) {
// There is no reduction of rowGroups. Return the original groupScan.
logger.debug("applyFilter does not have any pruning!");
return null;
} else if (qualifiedFileNames.size() == 0) {
logger.warn("All rowgroups have been filtered out. Add back one to get schema from scannner");
qualifiedFileNames.add(fileSet.iterator().next());
}
try {
FileSelection newSelection = new FileSelection(null, Lists.newArrayList(qualifiedFileNames), getSelectionRoot(), cacheFileRoot, false);
logger.info("applyFilter {} reduce parquet file # from {} to {}", ExpressionStringBuilder.toString(filterExpr), fileSet.size(), qualifiedFileNames.size());
return this.clone(newSelection);
} catch (IOException e) {
logger.warn("Could not apply filter prune due to Exception : {}", e);
return null;
}
}
}
| apache-2.0 |
virtualdataset/metagen-java | virtdata-lib-basics/src/main/java/io/virtdata/libbasics/shared/from_long/to_time_types/joda/ToMillisAtStartOfYear.java | 1320 | package io.virtdata.libbasics.shared.from_long.to_time_types.joda;
import io.virtdata.annotations.Categories;
import io.virtdata.annotations.Category;
import io.virtdata.annotations.Example;
import io.virtdata.annotations.ThreadSafeMapper;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.util.function.LongUnaryOperator;
/**
* Return the epoch milliseconds at the start of the year for the given
* epoch milliseconds.
*/
@Categories({Category.datetime})
@ThreadSafeMapper
public class ToMillisAtStartOfYear implements LongUnaryOperator {
private final DateTimeZone tz;
@Example({"ToMillisAtStartOfYear()","return millisecond epoch time of the start of the year of the provided millisecond epoch time, assuming UTC"})
public ToMillisAtStartOfYear() {
this(DateTimeZone.UTC.getID());
}
@Example({"ToMillisAtStartOfYear('America/Chicago')","return millisecond epoch time of the start of the year of the provided millisecond epoch time, using timezone America/Chicago"})
public ToMillisAtStartOfYear(String timezoneId) {
this.tz = Timezones.forId(timezoneId);
}
@Override
public long applyAsLong(long operand) {
return new DateTime(operand,tz).withTimeAtStartOfDay().withDayOfMonth(1).withMonthOfYear(1).getMillis();
}
}
| apache-2.0 |
raulrr88/Compiler | src/FileHandler.java | 2184 | /**
* Classe que implementa funcionalidades para manipulacao de arquivos.
* @author Giulliano P. Carnielli
*/
import java.io.*;
public class FileHandler extends BufferedReader {
private long line;
private long column;
private long lastLineSize;
/**
* Construtor que recebe nome do arquivo.
* @param fileName nome do arquivo que deve ser aberto e mantido pela classe
* @throws FileNotFoundException o arquivo nao foi encontrado
*/
public FileHandler(String fileName)
throws FileNotFoundException {
this(new File(fileName));
}
/**
* Construtor que recebe um arquivo.
* @param file arquivo que deve ser aberto e mantido pela classe
* @throws FileNotFoundException o arquivo nao foi encontrado
*/
public FileHandler(File file)
throws FileNotFoundException {
super((new FileReader(file)));
line = 1;
column = 0;
lastLineSize = 0;
}
/**
* Metodo que retorna um caractere da entrada, marcando sua posicao no buffer e testando o final
* do arquivo
* @return proximo caractere do buffer de entrada
* @throws IOException caso um erro de leitura ocorra
* @throws EOFException excessao retornada quando o final do arquivo é atingido.
*/
public char getNextChar()
throws EOFException, IOException {
this.mark(1);
int charValue = this.read();
if (charValue == -1) throw new EOFException();
column++;
if (charValue == Character.LINE_SEPARATOR) {
line++;
lastLineSize = column;
column = 0;
}
return (char) charValue;
}
/**
* Metodo que retorna o ultimo caractere lido.
* @throws IOException
*/
public void resetLastChar() throws IOException {
this.reset();
column--;
if (column < 0) {
column = lastLineSize;
line--;
}
}
/**
* @return the line
*/
public long getLine() {
return line;
}
/**
* @return the column
*/
public long getColumn() {
return column;
}
} | apache-2.0 |
masaki-yamakawa/geode | geode-apis-compatible-with-redis/src/main/java/org/apache/geode/redis/internal/delta/RemsDeltaInfo.java | 1596 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*
*/
package org.apache.geode.redis.internal.delta;
import static org.apache.geode.redis.internal.delta.DeltaType.REMS;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.geode.DataSerializer;
public class RemsDeltaInfo implements DeltaInfo {
private final ArrayList<byte[]> deltas;
public RemsDeltaInfo() {
this.deltas = new ArrayList<>();
}
public RemsDeltaInfo(List<byte[]> deltas) {
this.deltas = new ArrayList<>(deltas);
}
public void add(byte[] delta) {
deltas.add(delta);
}
public void serializeTo(DataOutput out) throws IOException {
DataSerializer.writeEnum(REMS, out);
DataSerializer.writeArrayList(deltas, out);
}
public List<byte[]> getRemoves() {
return deltas;
}
}
| apache-2.0 |
inbloom/APP-dashboard | src/main/java/org/slc/sli/dashboard/web/controller/GenericLayoutController.java | 5745 | /*
* Copyright 2012 Shared Learning Collaborative, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.slc.sli.dashboard.web.controller;
import java.util.Random;
import javax.servlet.http.HttpServletRequest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.servlet.ModelAndView;
import org.slc.sli.dashboard.entity.ModelAndViewConfig;
import org.slc.sli.dashboard.manager.PortalWSManager;
import org.slc.sli.dashboard.manager.component.CustomizationAssemblyFactory;
import org.slc.sli.dashboard.util.Constants;
import org.slc.sli.dashboard.util.JsonConverter;
import org.slc.sli.dashboard.util.SecurityUtil;
/**
* Controller for all types of requests.
*
* @author dwu
*/
@Controller
public abstract class GenericLayoutController {
protected Logger logger = LoggerFactory.getLogger(getClass());
private static final String LAYOUT_DIR = "layout/";
private static final String FTL_EXTENSION = ".ftl";
private CustomizationAssemblyFactory customizationAssemblyFactory;
private static final String GOOGLE_ANALYTICS_TRACKER_CONSTANT = "googleAnalyticsTrackerId";
private static final String MINIFY_JS_CONSTANT = "minifyJs";
@Autowired
@Qualifier("googleAnalyticsTrackerId")
private String googleAnalyticsTrackerId;
@Autowired
@Qualifier(MINIFY_JS_CONSTANT)
private Boolean minifyJs;
protected PortalWSManager portalWSManager;
protected ModelMap getPopulatedModel(String layoutId, Object entityKey, HttpServletRequest request) {
return getPopulatedModel(layoutId, entityKey, request, false);
}
/**
* Populate layout model according to layout defined config for a user/context domain
*
* @param layoutId
* - unique id of the layout
* @param entityId
* - entity id to pass to the child panels
* @return
*/
protected ModelMap getPopulatedModel(String layoutId, Object entityKey, HttpServletRequest request, boolean lazyOverride) {
// set up model map
ModelMap model = new ModelMap();
ModelAndViewConfig modelAndConfig =
customizationAssemblyFactory.getModelAndViewConfig(layoutId, entityKey, lazyOverride);
model.addAttribute(Constants.MM_COMPONENT_ID, layoutId);
model.addAttribute(Constants.MM_ENTITY_ID, entityKey);
model.addAttribute(Constants.MM_KEY_VIEW_CONFIGS, modelAndConfig.getConfig());
model.addAttribute(Constants.MM_KEY_LAYOUT, modelAndConfig.getLayoutItems());
model.addAttribute(Constants.MM_KEY_DATA, modelAndConfig.getData());
model.addAttribute(Constants.MM_VIEW_DATA_CONFIG_JSON, JsonConverter.toJson(modelAndConfig));
model.addAttribute(Constants.MM_KEY_LOGGER, logger);
addCommonData(model, request);
populateModelLegacyItems(model);
return model;
}
protected void addHeaderFooter(ModelMap model) {
boolean isAdmin = isAdmin();
String header = portalWSManager.getHeader(isAdmin);
if (header != null) {
header = header.replace("[$USER_NAME$]", SecurityUtil.getUsername());
model.addAttribute(Constants.ATTR_HEADER_STRING, header);
model.addAttribute(Constants.ATTR_FOOTER_STRING, portalWSManager.getFooter(isAdmin));
}
}
protected void addCommonData(ModelMap model, HttpServletRequest request) {
addHeaderFooter(model);
model.addAttribute(GOOGLE_ANALYTICS_TRACKER_CONSTANT, googleAnalyticsTrackerId);
model.addAttribute(Constants.CONTEXT_ROOT_PATH, request.getContextPath());
model.addAttribute(Constants.CONTEXT_PREVIOUS_PATH, "javascript:history.go(-1)");
model.addAttribute(MINIFY_JS_CONSTANT, minifyJs);
}
public void populateModelLegacyItems(ModelMap model) {
model.addAttribute("random", new Random());
}
protected String getLayoutView(String layoutName) {
return LAYOUT_DIR + layoutName + FTL_EXTENSION;
}
protected ModelAndView getModelView(String layoutName, ModelMap model) {
// Includes the page we want to display in the overall_container page
model.addAttribute(Constants.PAGE_TO_INCLUDE, getLayoutView(layoutName));
return new ModelAndView(Constants.OVERALL_CONTAINER_PAGE, model);
}
@Autowired
public void setCustomizedDataFactory(CustomizationAssemblyFactory customizedDataFactory) {
this.customizationAssemblyFactory = customizedDataFactory;
}
@Autowired
public void setPortalWSManager(PortalWSManager portalWSManager) {
this.portalWSManager = portalWSManager;
}
public String getToken() {
return SecurityUtil.getToken();
}
public boolean isAdmin() {
try {
return SecurityUtil.isAdmin();
} catch (Exception ex) {
return false;
}
}
}
| apache-2.0 |
spodkowinski/cassandra | tools/fqltool/src/org/apache/cassandra/fqltool/commands/Dump.java | 12529 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.fqltool.commands;
import java.io.File;
import java.nio.BufferUnderflowException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import io.airlift.airline.Arguments;
import io.airlift.airline.Command;
import io.airlift.airline.Option;
import io.netty.buffer.Unpooled;
import net.openhft.chronicle.bytes.Bytes;
import net.openhft.chronicle.queue.ChronicleQueue;
import net.openhft.chronicle.queue.ChronicleQueueBuilder;
import net.openhft.chronicle.queue.ExcerptTailer;
import net.openhft.chronicle.queue.RollCycles;
import net.openhft.chronicle.threads.Pauser;
import net.openhft.chronicle.wire.ReadMarshallable;
import net.openhft.chronicle.wire.ValueIn;
import net.openhft.chronicle.wire.WireIn;
import org.apache.cassandra.audit.FullQueryLogger;
import org.apache.cassandra.cql3.QueryOptions;
import org.apache.cassandra.transport.ProtocolVersion;
/**
* Dump the contents of a list of paths containing full query logs
*/
@Command(name = "dump", description = "Dump the contents of a full query log")
public class Dump implements Runnable
{
static final char[] HEXI_DECIMAL = "0123456789ABCDEF".toCharArray();
@Arguments(usage = "<path1> [<path2>...<pathN>]", description = "Path containing the full query logs to dump.", required = true)
private List<String> arguments = new ArrayList<>();
@Option(title = "roll_cycle", name = {"--roll-cycle"}, description = "How often to roll the log file was rolled. May be necessary for Chronicle to correctly parse file names. (MINUTELY, HOURLY, DAILY). Default HOURLY.")
private String rollCycle = "HOURLY";
@Option(title = "follow", name = {"--follow"}, description = "Upon reacahing the end of the log continue indefinitely waiting for more records")
private boolean follow = false;
@Override
public void run()
{
dump(arguments, rollCycle, follow);
}
public static void dump(List<String> arguments, String rollCycle, boolean follow)
{
StringBuilder sb = new StringBuilder();
ReadMarshallable reader = wireIn ->
{
sb.setLength(0);
int version = wireIn.read(FullQueryLogger.VERSION).int16();
if (version != FullQueryLogger.CURRENT_VERSION)
throw new UnsupportedOperationException("Full query log of unexpected version " + version + " encountered");
String type = wireIn.read(FullQueryLogger.TYPE).text();
sb.append("Type: ")
.append(type)
.append(System.lineSeparator());
long queryStartTime = wireIn.read(FullQueryLogger.QUERY_START_TIME).int64();
sb.append("Query start time: ")
.append(queryStartTime)
.append(System.lineSeparator());
int protocolVersion = wireIn.read(FullQueryLogger.PROTOCOL_VERSION).int32();
sb.append("Protocol version: ")
.append(protocolVersion)
.append(System.lineSeparator());
QueryOptions options =
QueryOptions.codec.decode(Unpooled.wrappedBuffer(wireIn.read(FullQueryLogger.QUERY_OPTIONS).bytes()),
ProtocolVersion.decode(protocolVersion));
long generatedTimestamp = wireIn.read(FullQueryLogger.GENERATED_TIMESTAMP).int64();
sb.append("Generated timestamp:")
.append(generatedTimestamp)
.append(System.lineSeparator());
int generatedNowInSeconds = wireIn.read(FullQueryLogger.GENERATED_NOW_IN_SECONDS).int32();
sb.append("Generated nowInSeconds:")
.append(generatedNowInSeconds)
.append(System.lineSeparator());
switch (type)
{
case (FullQueryLogger.SINGLE_QUERY):
dumpQuery(options, wireIn, sb);
break;
case (FullQueryLogger.BATCH):
dumpBatch(options, wireIn, sb);
break;
default:
throw new UnsupportedOperationException("Log entry of unsupported type " + type);
}
System.out.print(sb.toString());
System.out.flush();
};
//Backoff strategy for spinning on the queue, not aggressive at all as this doesn't need to be low latency
Pauser pauser = Pauser.millis(100);
List<ChronicleQueue> queues = arguments.stream().distinct().map(path -> ChronicleQueueBuilder.single(new File(path)).readOnly(true).rollCycle(RollCycles.valueOf(rollCycle)).build()).collect(Collectors.toList());
List<ExcerptTailer> tailers = queues.stream().map(ChronicleQueue::createTailer).collect(Collectors.toList());
boolean hadWork = true;
while (hadWork)
{
hadWork = false;
for (ExcerptTailer tailer : tailers)
{
while (tailer.readDocument(reader))
{
hadWork = true;
}
}
if (follow)
{
if (!hadWork)
{
//Chronicle queue doesn't support blocking so use this backoff strategy
pauser.pause();
}
//Don't terminate the loop even if there wasn't work
hadWork = true;
}
}
}
private static void dumpQuery(QueryOptions options, WireIn wireIn, StringBuilder sb)
{
sb.append("Query: ")
.append(wireIn.read(FullQueryLogger.QUERY).text())
.append(System.lineSeparator());
List<ByteBuffer> values = options.getValues() != null
? options.getValues()
: Collections.emptyList();
sb.append("Values: ")
.append(System.lineSeparator());
appendValuesToStringBuilder(values, sb);
sb.append(System.lineSeparator());
}
private static void dumpBatch(QueryOptions options, WireIn wireIn, StringBuilder sb)
{
sb.append("Batch type: ")
.append(wireIn.read(FullQueryLogger.BATCH_TYPE).text())
.append(System.lineSeparator());
ValueIn in = wireIn.read(FullQueryLogger.QUERIES);
int numQueries = in.int32();
List<String> queries = new ArrayList<>(numQueries);
for (int i = 0; i < numQueries; i++)
queries.add(in.text());
in = wireIn.read(FullQueryLogger.VALUES);
int numValues = in.int32();
for (int i = 0; i < numValues; i++)
{
int numSubValues = in.int32();
List<ByteBuffer> subValues = new ArrayList<>(numSubValues);
for (int j = 0; j < numSubValues; j++)
subValues.add(ByteBuffer.wrap(in.bytes()));
sb.append("Query: ")
.append(queries.get(i))
.append(System.lineSeparator());
sb.append("Values: ")
.append(System.lineSeparator());
appendValuesToStringBuilder(subValues, sb);
}
sb.append(System.lineSeparator());
}
private static void appendValuesToStringBuilder(List<ByteBuffer> values, StringBuilder sb)
{
boolean first = true;
for (ByteBuffer value : values)
{
Bytes bytes = Bytes.wrapForRead(value);
long maxLength2 = Math.min(1024, bytes.readLimit() - bytes.readPosition());
toHexString(bytes, bytes.readPosition(), maxLength2, sb);
if (maxLength2 < bytes.readLimit() - bytes.readPosition())
{
sb.append("... truncated").append(System.lineSeparator());
}
if (first)
{
first = false;
}
else
{
sb.append("-----").append(System.lineSeparator());
}
}
}
//This is from net.openhft.chronicle.bytes, need to pass in the StringBuilder so had to copy
/*
* Copyright 2016 higherfrequencytrading.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* display the hex data of {@link Bytes} from the position() to the limit()
*
* @param bytes the buffer you wish to toString()
* @return hex representation of the buffer, from example [0D ,OA, FF]
*/
public static String toHexString(final Bytes bytes, long offset, long len, StringBuilder builder)
throws BufferUnderflowException
{
if (len == 0)
return "";
int width = 16;
int[] lastLine = new int[width];
String sep = "";
long position = bytes.readPosition();
long limit = bytes.readLimit();
try {
bytes.readPositionRemaining(offset, len);
long start = offset / width * width;
long end = (offset + len + width - 1) / width * width;
for (long i = start; i < end; i += width) {
// check for duplicate rows
if (i + width < end) {
boolean same = true;
for (int j = 0; j < width && i + j < offset + len; j++) {
int ch = bytes.readUnsignedByte(i + j);
same &= (ch == lastLine[j]);
lastLine[j] = ch;
}
if (i > start && same) {
sep = "........\n";
continue;
}
}
builder.append(sep);
sep = "";
String str = Long.toHexString(i);
for (int j = str.length(); j < 8; j++)
builder.append('0');
builder.append(str);
for (int j = 0; j < width; j++) {
if (j == width / 2)
builder.append(' ');
if (i + j < offset || i + j >= offset + len) {
builder.append(" ");
} else {
builder.append(' ');
int ch = bytes.readUnsignedByte(i + j);
builder.append(HEXI_DECIMAL[ch >> 4]);
builder.append(HEXI_DECIMAL[ch & 15]);
}
}
builder.append(' ');
for (int j = 0; j < width; j++) {
if (j == width / 2)
builder.append(' ');
if (i + j < offset || i + j >= offset + len) {
builder.append(' ');
} else {
int ch = bytes.readUnsignedByte(i + j);
if (ch < ' ' || ch > 126)
ch = '\u00B7';
builder.append((char) ch);
}
}
builder.append("\n");
}
return builder.toString();
} finally {
bytes.readLimit(limit);
bytes.readPosition(position);
}
}
}
| apache-2.0 |
dagnir/aws-sdk-java | aws-java-sdk-elasticbeanstalk/src/main/java/com/amazonaws/services/elasticbeanstalk/model/transform/TooManyApplicationsExceptionUnmarshaller.java | 1628 | /*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.elasticbeanstalk.model.transform;
import org.w3c.dom.Node;
import javax.annotation.Generated;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.transform.StandardErrorUnmarshaller;
import com.amazonaws.services.elasticbeanstalk.model.TooManyApplicationsException;
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class TooManyApplicationsExceptionUnmarshaller extends StandardErrorUnmarshaller {
public TooManyApplicationsExceptionUnmarshaller() {
super(TooManyApplicationsException.class);
}
@Override
public AmazonServiceException unmarshall(Node node) throws Exception {
// Bail out if this isn't the right error code that this
// marshaller understands
String errorCode = parseErrorCode(node);
if (errorCode == null || !errorCode.equals("TooManyApplicationsException"))
return null;
TooManyApplicationsException e = (TooManyApplicationsException) super.unmarshall(node);
return e;
}
}
| apache-2.0 |
cuba-platform/cuba | modules/core/src/com/haulmont/cuba/security/app/role/annotation/EntityAccessContainer.java | 960 | /*
* Copyright (c) 2008-2019 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.haulmont.cuba.security.app.role.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Target({ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
public @interface EntityAccessContainer {
EntityAccess[] value();
}
| apache-2.0 |
LegNeato/buck | test/com/facebook/buck/android/AndroidBinaryFilesInfoTest.java | 5778 | /*
* Copyright 2017-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.android;
import com.facebook.buck.android.apkmodule.APKModuleGraph;
import com.facebook.buck.android.dalvik.ZipSplitter.DexSplitStrategy;
import com.facebook.buck.android.exopackage.ExopackageInfo;
import com.facebook.buck.android.exopackage.ExopackageInfo.DexInfo;
import com.facebook.buck.android.exopackage.ExopackageMode;
import com.facebook.buck.android.packageable.AndroidPackageableCollection;
import com.facebook.buck.android.packageable.AndroidPackageableCollector;
import com.facebook.buck.core.description.BuildRuleParams;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.targetgraph.TargetGraph;
import com.facebook.buck.core.sourcepath.SourcePath;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.rules.FakeSourcePath;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.facebook.buck.util.types.Either;
import com.facebook.buck.util.types.Pair;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.util.concurrent.MoreExecutors;
import java.nio.file.Paths;
import java.util.EnumSet;
import java.util.List;
import java.util.Optional;
import java.util.stream.Stream;
import org.hamcrest.Matchers;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class AndroidBinaryFilesInfoTest {
private AndroidBinaryFilesInfo androidBinaryFilesInfo;
private FakePreDexMerge preDexMerge;
@Before
public void setUp() throws Exception {
EnumSet<ExopackageMode> exopackageModes = EnumSet.of(ExopackageMode.MODULES);
BuildTarget apkTarget = BuildTargetFactory.newInstance("//app:app");
APKModuleGraph apkModuleGraph =
new APKModuleGraph(TargetGraph.EMPTY, apkTarget, Optional.empty());
AndroidPackageableCollection collection =
new AndroidPackageableCollector(
apkTarget, ImmutableSet.of(), ImmutableSet.of(), apkModuleGraph)
.build();
preDexMerge = new FakePreDexMerge(apkTarget, apkModuleGraph);
preDexMerge.dexInfo =
new DexFilesInfo(
FakeSourcePath.of("primary.dex"),
ImmutableSortedSet.of(FakeSourcePath.of("secondary_dexes")),
Optional.empty());
AndroidGraphEnhancementResult enhancementResult =
AndroidGraphEnhancementResult.builder()
.setDexMergeRule(Either.ofLeft(preDexMerge))
.setPackageableCollection(collection)
.setPrimaryResourcesApkPath(FakeSourcePath.of("primary_resources.apk"))
.setAndroidManifestPath(FakeSourcePath.of("AndroidManifest.xml"))
.setAPKModuleGraph(apkModuleGraph)
.build();
androidBinaryFilesInfo = new AndroidBinaryFilesInfo(enhancementResult, exopackageModes, false);
}
@Test
public void getExopackageInfo() {
Pair<SourcePath, SourcePath> metadataAndSourcePath =
new Pair<>(
FakeSourcePath.of(Paths.get("module_name", "metadata.txt")),
FakeSourcePath.of(Paths.get("module_name")));
preDexMerge.moduleMetadataAndDexSources = ImmutableList.of(metadataAndSourcePath);
ExopackageInfo info = androidBinaryFilesInfo.getExopackageInfo().get();
ImmutableList<DexInfo> moduleInfo = info.getModuleInfo().get();
Assert.assertThat(moduleInfo, Matchers.hasSize(1));
DexInfo dexInfo = moduleInfo.get(0);
Assert.assertEquals(metadataAndSourcePath.getFirst(), dexInfo.getMetadata());
Assert.assertEquals(metadataAndSourcePath.getSecond(), dexInfo.getDirectory());
}
private class FakePreDexMerge extends PreDexMerge {
DexFilesInfo dexInfo;
List<Pair<SourcePath, SourcePath>> moduleMetadataAndDexSources;
FakePreDexMerge(BuildTarget buildTarget, APKModuleGraph apkModuleGraph) {
super(
buildTarget,
new FakeProjectFilesystem(),
null,
new BuildRuleParams(
ImmutableSortedSet::of, ImmutableSortedSet::of, ImmutableSortedSet.of()),
new DexSplitMode(
/* shouldSplitDex */ true,
DexSplitStrategy.MINIMIZE_PRIMARY_DEX_SIZE,
DexStore.JAR,
/* linearAllocHardLimit */ 4 * 1024 * 1024,
/* primaryDexPatterns */ ImmutableSet.of("List"),
Optional.of(FakeSourcePath.of("the/manifest.txt")),
/* primaryDexScenarioFile */ Optional.empty(),
/* isPrimaryDexScenarioOverflowAllowed */ false,
/* secondaryDexHeadClassesFile */ Optional.empty(),
/* secondaryDexTailClassesFile */ Optional.empty()),
apkModuleGraph,
ImmutableMultimap.of(),
null,
MoreExecutors.newDirectExecutorService(),
Optional.empty(),
Optional.empty(),
"dx");
}
@Override
public DexFilesInfo getDexFilesInfo() {
return dexInfo;
}
@Override
public Stream<Pair<SourcePath, SourcePath>> getModuleMetadataAndDexSourcePaths() {
return moduleMetadataAndDexSources.stream();
}
}
}
| apache-2.0 |
GiggleCorp2017/JoPro | JoProNetBeansSource/tools/src/org/joeffice/tools/CreatePresentationDirectories.java | 1459 | /*
* Copyright 2013 Japplis.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.joeffice.tools;
import java.io.*;
/**
* This class will create the directories for the 30 days development. In each directory, it will create a
* presentation.txt file C:\Java\projects\Joeffice\tools\src>javac org\joeffice\tools\CreatePresentationDirectories.java
* java -cp . org.joeffice.tools.CreatePresentationDirectories
*
* @author Anthony Goubard
*/
public class CreatePresentationDirectories {
public static void main(String[] args) throws IOException {
File presentationsDir = new File("C:\\Java\\projects\\Joeffice\\admin\\marketing\\presentations");
for (int i = 1; i <= 30; i++) {
File dayDir = new File(presentationsDir, "day-" + i);
dayDir.mkdir();
File presentationFile = new File(dayDir, "presentation.txt");
presentationFile.createNewFile();
}
}
}
| apache-2.0 |
ahome-it/ahome-tooling-nativetools | src/main/java/com/ait/tooling/nativetools/client/NObjectJSO.java | 4206 | /*
Copyright (c) 2017 Ahome' Innovation Technologies. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.ait.tooling.nativetools.client;
import com.google.gwt.core.client.JavaScriptObject;
public class NObjectJSO extends NObjectBaseJSO<NObjectJSO>
{
public static final NObjectJSO make()
{
return createNObjectBaseJSO();
}
protected NObjectJSO()
{
}
public static final NObjectJSO cast(final JavaScriptObject jso)
{
if (null != jso)
{
return jso.cast();
}
return null;
}
public final void put(final String name, final int value)
{
put_0(NUtils.doKeyRepair(name), value);
}
public final void put(final String name, final double value)
{
put_0(NUtils.doKeyRepair(name), value);
}
public final void put(final String name, final boolean value)
{
put_0(NUtils.doKeyRepair(name), value);
}
public final void put(final String name, final String value)
{
put_0(NUtils.doKeyRepair(name), value);
}
public final void put(final String name, final NHasJSO<? extends JavaScriptObject> value)
{
if (null != value)
{
put_0(NUtils.doKeyRepair(name), value.getJSO());
}
else
{
put_0(NUtils.doKeyRepair(name), (JavaScriptObject) null);
}
}
public final void put(final String name, final JavaScriptObject value)
{
put_0(NUtils.doKeyRepair(name), value);
}
private final native void put_0(String name, int value)
/*-{
this[name] = value;
}-*/;
private final native void put_0(String name, double value)
/*-{
this[name] = value;
}-*/;
private final native void put_0(String name, boolean value)
/*-{
this[name] = value;
}-*/;
private final native void put_0(String name, String value)
/*-{
this[name] = value;
}-*/;
private final native void put_0(String name, JavaScriptObject value)
/*-{
this[name] = value;
}-*/;
public final NValue<?> getAsNValue(final String name)
{
return getAsNValue_0(NUtils.doKeyRepair(name));
}
public final JavaScriptObject getAsJSO(final String name)
{
return getAsJSO_0(NUtils.doKeyRepair(name));
}
public final int getAsInteger(final String name)
{
return getAsInteger_0(NUtils.doKeyRepair(name));
}
public final double getAsDouble(final String name)
{
return getAsDouble_0(NUtils.doKeyRepair(name));
}
public final boolean getAsBoolean(final String name)
{
return getAsBoolean_0(NUtils.doKeyRepair(name));
}
public final String getAsString(final String name)
{
return getAsString_0(NUtils.doKeyRepair(name));
}
public final String getAsString(final String name, final String otherwise)
{
final String value = getAsString_0(NUtils.doKeyRepair(name));
return ((null != value) ? value : otherwise);
}
private final NValue<?> getAsNValue_0(final String name)
{
return NUtils.Native.getAsNValue(this, name);
}
private final native JavaScriptObject getAsJSO_0(String name)
/*-{
return this[name];
}-*/;
private final native int getAsInteger_0(String name)
/*-{
return (this[name] | 0);
}-*/;
private final native double getAsDouble_0(String name)
/*-{
return this[name];
}-*/;
private final native String getAsString_0(String name)
/*-{
return this[name];
}-*/;
private final native boolean getAsBoolean_0(String name)
/*-{
return this[name];
}-*/;
} | apache-2.0 |
Droid3r/MVPListFetch | app/src/main/java/akshay/example/com/mvplistfetch/ListFetch.java | 1258 | package akshay.example.com.mvplistfetch;
import android.app.Application;
import com.squareup.picasso.OkHttpDownloader;
import com.squareup.picasso.Picasso;
import akshay.example.com.mvplistfetch.di.components.AppComponent;
import akshay.example.com.mvplistfetch.di.components.DaggerAppComponent;
import akshay.example.com.mvplistfetch.di.modules.AppModule;
/**
* Created by akshay on 16/09/16.
*/
public class ListFetch extends android.app.Application {
private AppComponent mAppComponent = createAppComponent();
@Override
public void onCreate() {
super.onCreate();
//flags to check image caching in debug mode
//TODO comment this in production
Picasso.Builder builder = new Picasso.Builder(this);
builder.downloader(new OkHttpDownloader(this, Integer.MAX_VALUE));
Picasso built = builder.build();
//built.setIndicatorsEnabled(true);
//built.setLoggingEnabled(true);
Picasso.setSingletonInstance(built);
}
protected AppComponent createAppComponent() {
return DaggerAppComponent.builder()
.appModule(new AppModule())
.build();
}
public AppComponent getAppComponent() {
return mAppComponent;
}
}
| apache-2.0 |
news-sentiment/news-sentiment-stratosphere | src/test/java/de/tuberlin/dima/impro3/sentiment/SentimentParserTest.java | 1320 | package de.tuberlin.dima.impro3.sentiment;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
import de.tuberlin.dima.impro3.sentiment.SentimentParser;
import de.tuberlin.dima.impro3.tagger.TaggedSentence;
public class SentimentParserTest {
@Test
public void testSentimentParser(){
SentimentParser sentimentParser = SentimentParser.newSentimentParser();
float sentiment = sentimentParser.computeSentimentValue("Die SPD und die CDU haben die Wahl verloren, weil sie schlechte Verlierer sind.");
assertTrue("should be the right sentiment value", sentiment == -0.5573500394821167f);
}
@Test
public void testSentimentParserTagged(){
SentimentParser sentimentParser = SentimentParser.newSentimentParser();
String [] tokens = {"Die", "SPD", "und", "die", "CDU", "und", "die", "CDU", "CSU", "sind", "verlogene", "Lustmolche", "."};
String [] ne = {"O", "I-ORG", "O", "O", "I-ORG", "O", "O", "I-ORG", "I-ORG", "O", "O", "O", "O"};
String [] tags = {"ART", "NE", "KON", "ART", "NE", "KON", "ART", "NE", "NE", "VAFIN", "ADJA", "NN", "$."};
TaggedSentence taggedSentence = new TaggedSentence(tokens, tags, ne);
float sentiment = sentimentParser.computeSentimentValue(taggedSentence);
assertTrue("should be the right sentiment value", sentiment == -0.3355f);
}
}
| apache-2.0 |
vromero/gist-maven-plugin | src/main/java/org/vromero/gist/snippet/SnippetManager.java | 1566 | package org.vromero.gist.snippet;
import org.codehaus.plexus.util.FileUtils;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import static org.codehaus.plexus.util.FileUtils.*;
public class SnippetManager {
private static final String GIST_DIR_PREFIX = "gist-";
private File outputDirectory;
private String encoding;
public SnippetManager(String encoding, File outputDirectory) {
this.encoding = encoding;
this.outputDirectory = outputDirectory;
}
public void createSnippet(File inputFile, String tempGistId, String snippetId) throws IOException {
File gistOutputDirectory = openOrCreateOutputDirectory(tempGistId);
URL url = toURLs(new File[]{inputFile})[0];
SnippetExtractor snippetExtractor = new SnippetExtractor(url, encoding);
File file = new File( gistOutputDirectory, snippetId);
fileWrite(file, encoding, snippetExtractor.readSnippet(snippetId).toString());
}
public String readSnippet(String tempGistId, String snippetId) throws IOException {
File gistOutputDirectory = openOrCreateOutputDirectory(tempGistId);
File snippetFileDescriptor = new File(gistOutputDirectory, snippetId);
return FileUtils.fileRead(snippetFileDescriptor, encoding);
}
private File openOrCreateOutputDirectory(String tempGistId) throws IOException {
File gistOutputDirectory = new File(outputDirectory, GIST_DIR_PREFIX + tempGistId);
forceMkdir(gistOutputDirectory);
return gistOutputDirectory;
}
}
| apache-2.0 |
xiaomozhang/druid | druid-1.0.9/src/main/java/com/alibaba/druid/support/http/stat/WebURIStat.java | 23249 | /*
* Copyright 1999-2011 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.druid.support.http.stat;
import com.alibaba.druid.support.profile.ProfileStat;
import java.util.Date;
import java.util.Map;
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import static com.alibaba.druid.util.JdbcSqlStatUtils.get;
public class WebURIStat {
private final String uri;
private volatile int runningCount;
private volatile int concurrentMax;
private volatile long requestCount;
private volatile long requestTimeNano;
final static AtomicIntegerFieldUpdater<WebURIStat> runningCountUpdater = AtomicIntegerFieldUpdater.newUpdater(WebURIStat.class,
"runningCount");
final static AtomicIntegerFieldUpdater<WebURIStat> concurrentMaxUpdater = AtomicIntegerFieldUpdater.newUpdater(WebURIStat.class,
"concurrentMax");
final static AtomicLongFieldUpdater<WebURIStat> requestCountUpdater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"requestCount");
final static AtomicLongFieldUpdater<WebURIStat> requestTimeNanoUpdater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"requestTimeNano");
private volatile long jdbcFetchRowCount;
private volatile long jdbcFetchRowPeak; // 单次请求读取行数的峰值
final static AtomicLongFieldUpdater<WebURIStat> jdbcFetchRowCountUpdater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"jdbcFetchRowCount");
final static AtomicLongFieldUpdater<WebURIStat> jdbcFetchRowPeakUpdater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"jdbcFetchRowPeak");
private volatile long jdbcUpdateCount;
private volatile long jdbcUpdatePeak; // 单次请求更新行数的峰值
final static AtomicLongFieldUpdater<WebURIStat> jdbcUpdateCountUpdater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"jdbcUpdateCount");
final static AtomicLongFieldUpdater<WebURIStat> jdbcUpdatePeakUpdater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"jdbcUpdatePeak");
private volatile long jdbcExecuteCount;
private volatile long jdbcExecuteErrorCount;
private volatile long jdbcExecutePeak; // 单次请求执行SQL次数的峰值
private volatile long jdbcExecuteTimeNano;
final static AtomicLongFieldUpdater<WebURIStat> jdbcExecuteCountUpdater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"jdbcExecuteCount");
final static AtomicLongFieldUpdater<WebURIStat> jdbcExecuteErrorCountUpdater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"jdbcExecuteErrorCount");
final static AtomicLongFieldUpdater<WebURIStat> jdbcExecutePeakUpdater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"jdbcExecutePeak");
final static AtomicLongFieldUpdater<WebURIStat> jdbcExecuteTimeNanoUpdater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"jdbcExecuteTimeNano");
private volatile long jdbcCommitCount;
private volatile long jdbcRollbackCount;
final static AtomicLongFieldUpdater<WebURIStat> jdbcCommitCountUpdater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"jdbcCommitCount");
final static AtomicLongFieldUpdater<WebURIStat> jdbcRollbackCountUpdater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"jdbcRollbackCount");
private volatile long jdbcPoolConnectionOpenCount;
private volatile long jdbcPoolConnectionCloseCount;
final static AtomicLongFieldUpdater<WebURIStat> jdbcPoolConnectionOpenCountUpdater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"jdbcPoolConnectionOpenCount");
final static AtomicLongFieldUpdater<WebURIStat> jdbcPoolConnectionCloseCountUpdater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"jdbcPoolConnectionCloseCount");
private volatile long jdbcResultSetOpenCount;
private volatile long jdbcResultSetCloseCount;
private volatile long errorCount;
private volatile long lastAccessTimeMillis = -1L;
private volatile ProfileStat profiletat = new ProfileStat();
final static AtomicLongFieldUpdater<WebURIStat> jdbcResultSetOpenCountUpdater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"jdbcResultSetOpenCount");
final static AtomicLongFieldUpdater<WebURIStat> jdbcResultSetCloseCountUpdater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"jdbcResultSetCloseCount");
final static AtomicLongFieldUpdater<WebURIStat> errorCountUpdater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"errorCount");
final static AtomicLongFieldUpdater<WebURIStat> lastAccessTimeMillisUpdater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"lastAccessTimeMillis");
private final static ThreadLocal<WebURIStat> currentLocal = new ThreadLocal<WebURIStat>();
private volatile long histogram_0_1;
private volatile long histogram_1_10;
private volatile long histogram_10_100;
private volatile long histogram_100_1000;
private volatile int histogram_1000_10000;
private volatile int histogram_10000_100000;
private volatile int histogram_100000_1000000;
private volatile int histogram_1000000_more;
final static AtomicLongFieldUpdater<WebURIStat> histogram_0_1_Updater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"histogram_0_1");
final static AtomicLongFieldUpdater<WebURIStat> histogram_1_10_Updater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"histogram_1_10");
final static AtomicLongFieldUpdater<WebURIStat> histogram_10_100_Updater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"histogram_10_100");
final static AtomicLongFieldUpdater<WebURIStat> histogram_100_1000_Updater = AtomicLongFieldUpdater.newUpdater(WebURIStat.class,
"histogram_100_1000");
final static AtomicIntegerFieldUpdater<WebURIStat> histogram_1000_10000_Updater = AtomicIntegerFieldUpdater.newUpdater(WebURIStat.class,
"histogram_1000_10000");
final static AtomicIntegerFieldUpdater<WebURIStat> histogram_10000_100000_Updater = AtomicIntegerFieldUpdater.newUpdater(WebURIStat.class,
"histogram_10000_100000");
final static AtomicIntegerFieldUpdater<WebURIStat> histogram_100000_1000000_Updater = AtomicIntegerFieldUpdater.newUpdater(WebURIStat.class,
"histogram_100000_1000000");
final static AtomicIntegerFieldUpdater<WebURIStat> histogram_1000000_more_Updater = AtomicIntegerFieldUpdater.newUpdater(WebURIStat.class,
"histogram_1000000_more");
public WebURIStat(String uri){
super();
this.uri = uri;
}
public static WebURIStat current() {
return currentLocal.get();
}
public String getUri() {
return uri;
}
public void beforeInvoke() {
currentLocal.set(this);
int running = runningCountUpdater.incrementAndGet(this);
for (;;) {
int max = concurrentMaxUpdater.get(this);
if (running > max) {
if (concurrentMaxUpdater.compareAndSet(this, max, running)) {
break;
}
} else {
break;
}
}
requestCountUpdater.incrementAndGet(this);
WebRequestStat requestStat = WebRequestStat.current();
if (requestStat != null) {
this.setLastAccessTimeMillis(requestStat.getStartMillis());
}
}
public void afterInvoke(Throwable error, long nanos) {
runningCountUpdater.decrementAndGet(this);
requestTimeNanoUpdater.addAndGet(this, nanos);
histogramRecord(nanos);
if (error != null) {
errorCountUpdater.incrementAndGet(this);
}
{
WebRequestStat localStat = WebRequestStat.current();
if (localStat != null) {
{
long fetchRowCount = localStat.getJdbcFetchRowCount();
this.addJdbcFetchRowCount(fetchRowCount);
for (;;) {
long peak = jdbcFetchRowPeakUpdater.get(this);
if (fetchRowCount <= peak) {
break;
}
if (jdbcFetchRowPeakUpdater.compareAndSet(this, peak, fetchRowCount)) {
break;
}
}
}
{
long executeCount = localStat.getJdbcExecuteCount();
this.addJdbcExecuteCount(executeCount);
for (;;) {
long peak = jdbcExecutePeakUpdater.get(this);
if (executeCount <= peak) {
break;
}
if (jdbcExecutePeakUpdater.compareAndSet(this, peak, executeCount)) {
break;
}
}
}
{
long updateCount = localStat.getJdbcUpdateCount();
this.addJdbcUpdateCount(updateCount);
for (;;) {
long peak = jdbcUpdatePeakUpdater.get(this);
if (updateCount <= peak) {
break;
}
if (jdbcUpdatePeakUpdater.compareAndSet(this, peak, updateCount)) {
break;
}
}
}
jdbcExecuteErrorCountUpdater.addAndGet(this, localStat.getJdbcExecuteErrorCount());
jdbcExecuteTimeNanoUpdater.addAndGet(this, localStat.getJdbcExecuteTimeNano());
this.addJdbcPoolConnectionOpenCount(localStat.getJdbcPoolConnectionOpenCount());
this.addJdbcPoolConnectionCloseCount(localStat.getJdbcPoolConnectionCloseCount());
this.addJdbcResultSetOpenCount(localStat.getJdbcResultSetOpenCount());
this.addJdbcResultSetCloseCount(localStat.getJdbcResultSetCloseCount());
}
}
currentLocal.set(null);
}
private void histogramRecord(long nanos) {
final long millis = nanos / 1000 / 1000;
if (millis < 1) {
histogram_0_1_Updater.incrementAndGet(this);
} else if (millis < 10) {
histogram_1_10_Updater.incrementAndGet(this);
} else if (millis < 100) {
histogram_10_100_Updater.incrementAndGet(this);
} else if (millis < 1000) {
histogram_100_1000_Updater.incrementAndGet(this);
} else if (millis < 10000) {
histogram_1000_10000_Updater.incrementAndGet(this);
} else if (millis < 100000) {
histogram_10000_100000_Updater.incrementAndGet(this);
} else if (millis < 1000000) {
histogram_100000_1000000_Updater.incrementAndGet(this);
} else {
histogram_1000000_more_Updater.incrementAndGet(this);
}
}
public int getRunningCount() {
return this.runningCount;
}
public long getConcurrentMax() {
return concurrentMax;
}
public long getRequestCount() {
return requestCount;
}
public long getRequestTimeNano() {
return requestTimeNano;
}
public long getRequestTimeMillis() {
return getRequestTimeNano() / (1000 * 1000);
}
public void addJdbcFetchRowCount(long delta) {
jdbcFetchRowCountUpdater.addAndGet(this, delta);
}
public long getJdbcFetchRowCount() {
return jdbcFetchRowCount;
}
public long getJdbcFetchRowPeak() {
return jdbcFetchRowPeak;
}
public void addJdbcUpdateCount(long updateCount) {
jdbcUpdateCountUpdater.addAndGet(this, updateCount);
}
public long getJdbcUpdateCount() {
return jdbcUpdateCount;
}
public long getJdbcUpdatePeak() {
return jdbcUpdatePeak;
}
public void incrementJdbcExecuteCount() {
jdbcExecuteCountUpdater.incrementAndGet(this);
}
public void addJdbcExecuteCount(long executeCount) {
jdbcExecuteCountUpdater.addAndGet(this, executeCount);
}
public long getJdbcExecuteCount() {
return jdbcExecuteCount;
}
public long getJdbcExecuteErrorCount() {
return jdbcExecuteErrorCount;
}
public long getJdbcExecutePeak() {
return jdbcExecutePeak;
}
public long getJdbcExecuteTimeMillis() {
return getJdbcExecuteTimeNano() / (1000 * 1000);
}
public long getJdbcExecuteTimeNano() {
return jdbcExecuteTimeNano;
}
public void incrementJdbcCommitCount() {
jdbcCommitCountUpdater.incrementAndGet(this);
}
public long getJdbcCommitCount() {
return jdbcCommitCount;
}
public void incrementJdbcRollbackCount() {
jdbcRollbackCountUpdater.incrementAndGet(this);
}
public long getJdbcRollbackCount() {
return jdbcRollbackCount;
}
public void setLastAccessTimeMillis(long lastAccessTimeMillis) {
this.lastAccessTimeMillis = lastAccessTimeMillis;
}
public Date getLastAccessTime() {
if (lastAccessTimeMillis < 0L) {
return null;
}
return new Date(lastAccessTimeMillis);
}
public long getLastAccessTimeMillis() {
return lastAccessTimeMillis;
}
public long getErrorCount() {
return errorCount;
}
public long getJdbcPoolConnectionOpenCount() {
return jdbcPoolConnectionOpenCount;
}
public void addJdbcPoolConnectionOpenCount(long delta) {
jdbcPoolConnectionOpenCountUpdater.addAndGet(this, delta);
}
public void incrementJdbcPoolConnectionOpenCount() {
jdbcPoolConnectionOpenCountUpdater.incrementAndGet(this);
}
public long getJdbcPoolConnectionCloseCount() {
return jdbcPoolConnectionCloseCount;
}
public void addJdbcPoolConnectionCloseCount(long delta) {
jdbcPoolConnectionCloseCountUpdater.addAndGet(this, delta);
}
public void incrementJdbcPoolConnectionCloseCount() {
jdbcPoolConnectionCloseCountUpdater.incrementAndGet(this);
}
public long getJdbcResultSetOpenCount() {
return jdbcResultSetOpenCount;
}
public void addJdbcResultSetOpenCount(long delta) {
jdbcResultSetOpenCountUpdater.addAndGet(this, delta);
}
public long getJdbcResultSetCloseCount() {
return jdbcResultSetCloseCount;
}
public void addJdbcResultSetCloseCount(long delta) {
jdbcResultSetCloseCountUpdater.addAndGet(this, delta);
}
public ProfileStat getProfiletat() {
return profiletat;
}
public long[] getHistogramValues() {
return new long[] {
//
histogram_0_1, //
histogram_1_10, //
histogram_10_100, //
histogram_100_1000, //
histogram_1000_10000, //
histogram_10000_100000, //
histogram_100000_1000000, //
histogram_1000000_more //
};
}
public WebURIStatValue getValue(boolean reset) {
WebURIStatValue val = new WebURIStatValue();
val.setUri(uri);
val.setRunningCount(runningCount);
val.setConcurrentMax(get(this, concurrentMaxUpdater, reset));
val.setRequestCount(get(this, requestCountUpdater, reset));
val.setRequestTimeNano(get(this, requestTimeNanoUpdater, reset));
val.setJdbcFetchRowCount(get(this, jdbcFetchRowCountUpdater, reset));
val.setJdbcFetchRowPeak(get(this, jdbcFetchRowPeakUpdater, reset));
val.setJdbcUpdateCount(get(this, jdbcUpdateCountUpdater, reset));
val.setJdbcUpdatePeak(get(this, jdbcUpdatePeakUpdater, reset));
val.setJdbcExecuteCount(get(this, jdbcExecuteCountUpdater, reset));
val.setJdbcExecuteErrorCount(get(this, jdbcExecuteErrorCountUpdater, reset));
val.setJdbcExecutePeak(get(this, jdbcExecutePeakUpdater, reset));
val.setJdbcExecuteTimeNano(get(this, jdbcExecuteTimeNanoUpdater, reset));
val.setJdbcCommitCount(get(this, jdbcCommitCountUpdater, reset));
val.setJdbcRollbackCount(get(this, jdbcRollbackCountUpdater, reset));
val.setJdbcPoolConnectionOpenCount(get(this, jdbcPoolConnectionOpenCountUpdater, reset));
val.setJdbcPoolConnectionCloseCount(get(this, jdbcPoolConnectionCloseCountUpdater, reset));
val.setJdbcResultSetOpenCount(get(this, jdbcResultSetOpenCountUpdater, reset));
val.setJdbcResultSetCloseCount(get(this, jdbcResultSetCloseCountUpdater, reset));
val.setErrorCount(get(this, errorCountUpdater, reset));
val.setLastAccessTimeMillis(get(this, lastAccessTimeMillisUpdater, reset));
val.setProfileEntryStatValueList(this.getProfiletat().getStatValue(reset));
val.histogram_0_1 = get(this, histogram_0_1_Updater, reset);
val.histogram_1_10 = get(this, histogram_1_10_Updater, reset);
val.histogram_10_100 = get(this, histogram_10_100_Updater, reset);
val.histogram_100_1000 = get(this, histogram_100_1000_Updater, reset);
val.histogram_1000_10000 = get(this, histogram_1000_10000_Updater, reset);
val.histogram_10000_100000 = get(this, histogram_10000_100000_Updater, reset);
val.histogram_100000_1000000 = get(this, histogram_100000_1000000_Updater, reset);
val.histogram_1000000_more = get(this, histogram_1000000_more_Updater, reset);
return val;
}
public Map<String, Object> getStatData() {
return getValue(false).getStatData();
}
}
| apache-2.0 |
mcekovic/tennis-crystal-ball | tennis-stats/src/itest/java/org/strangeforest/tcb/stats/boot/ServiceTest.java | 726 | package org.strangeforest.tcb.stats.boot;
import java.lang.annotation.*;
import org.springframework.boot.test.autoconfigure.jdbc.*;
import org.springframework.boot.test.autoconfigure.jdbc.AutoConfigureTestDatabase.*;
import org.springframework.context.annotation.*;
import org.springframework.stereotype.*;
import org.strangeforest.tcb.stats.visitors.*;
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Documented
@JdbcTest(
includeFilters = @ComponentScan.Filter(type = FilterType.ANNOTATION, classes = Service.class),
excludeFilters = @ComponentScan.Filter(type = FilterType.ANNOTATION, classes = VisitorSupport.class)
)
@AutoConfigureTestDatabase(replace = Replace.NONE)
public @interface ServiceTest {}
| apache-2.0 |
nkasvosve/beyondj | beyondj-data/beyondj-data-mongodb/src/main/java/com/lenox/custom/CustomMongodbSerializer.java | 4592 | package com.lenox.custom;
import com.mongodb.DBObject;
import com.mongodb.DBRef;
import com.mysema.query.mongodb.MongodbSerializer;
import com.mysema.query.types.*;
import org.springframework.data.mapping.context.MappingContext;
import org.springframework.data.mongodb.core.convert.MongoConverter;
import org.springframework.data.mongodb.core.convert.QueryMapper;
import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity;
import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty;
import org.springframework.util.Assert;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.regex.Pattern;
/**
* @author nickk
*/
public class CustomMongodbSerializer extends MongodbSerializer {
private static final String ID_KEY = "_id";
private static final Set<PathType> PATH_TYPES;
static {
Set<PathType> pathTypes = new HashSet<PathType>();
pathTypes.add(PathType.VARIABLE);
pathTypes.add(PathType.PROPERTY);
PATH_TYPES = Collections.unmodifiableSet(pathTypes);
}
private final MongoConverter converter;
private final MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentProperty> mappingContext;
private final QueryMapper mapper;
/**
* Creates a new {@link CustomMongodbSerializer} for the given {@link MappingContext}.
*
* @param converter must not be {@literal null}.
*/
public CustomMongodbSerializer(MongoConverter converter) {
Assert.notNull(converter, "MongoConverter must not be null!");
this.mappingContext = converter.getMappingContext();
this.converter = converter;
this.mapper = new QueryMapper(converter);
}
/*
* (non-Javadoc)
* @see com.mysema.query.mongodb.MongodbSerializer#getKeyForPath(com.mysema.query.types.Path, com.mysema.query.types.PathMetadata)
*/
@Override
protected String getKeyForPath(Path<?> expr, PathMetadata<?> metadata) {
if (!metadata.getPathType().equals(PathType.PROPERTY)) {
return super.getKeyForPath(expr, metadata);
}
Path<?> parent = metadata.getParent();
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(parent.getType());
MongoPersistentProperty property = entity.getPersistentProperty(metadata.getName());
return property == null ? super.getKeyForPath(expr, metadata) : property.getFieldName();
}
/*
* (non-Javadoc)
* @see com.mysema.query.mongodb.MongodbSerializer#asDBObject(java.lang.String, java.lang.Object)
*/
@Override
protected DBObject asDBObject(String key, Object value) {
if (ID_KEY.equals(key)) {
return mapper.getMappedObject(super.asDBObject(key, value), null);
}
return super.asDBObject(key, value instanceof Pattern ? value : converter.convertToMongoType(value));
}
/*
* (non-Javadoc)
* @see com.mysema.query.mongodb.MongodbSerializer#isReference(com.mysema.query.types.Path)
*/
@Override
protected boolean isReference(Path<?> path) {
MongoPersistentProperty property = getPropertyFor(path);
return property == null ? false : property.isAssociation();
}
/*
* (non-Javadoc)
* @see com.mysema.query.mongodb.MongodbSerializer#asReference(java.lang.Object)
*/
@Override
protected DBRef asReference(Object constant) {
return converter.toDBRef(constant, null);
}
/*
* (non-Javadoc)
* @see com.mysema.query.mongodb.MongodbSerializer#asReference(com.mysema.query.types.Operation, int)
*/
@Override
protected DBRef asReference(Operation<?> expr, int constIndex) {
for (Object arg : expr.getArgs()) {
if (arg instanceof Path) {
MongoPersistentProperty property = getPropertyFor((Path<?>) arg);
Object constant = ((Constant<?>) expr.getArg(constIndex)).getConstant();
return converter.toDBRef(constant, property);
}
}
return super.asReference(expr, constIndex);
}
private MongoPersistentProperty getPropertyFor(Path<?> path) {
Path<?> parent = path.getMetadata().getParent();
if (parent == null || !PATH_TYPES.contains(path.getMetadata().getPathType())) {
return null;
}
MongoPersistentEntity<?> entity = mappingContext.getPersistentEntity(parent.getType());
return entity != null ? entity.getPersistentProperty(path.getMetadata().getName()) : null;
}
}
| apache-2.0 |
gdefias/JavaDemo | InitJava/jcip/src/main/java/net/jcip/examples/part2_structapplication/chapter7_cancelclose/PrimeProducer.java | 779 | package net.jcip.examples.part2_structapplication.chapter7_cancelclose;
import java.math.BigInteger;
import java.util.concurrent.BlockingQueue;
/**
* PrimeProducer
通过中断来取消
中断是实现取消的最合理方式
*/
public class PrimeProducer extends Thread {
private final BlockingQueue<BigInteger> queue;
PrimeProducer(BlockingQueue<BigInteger> queue) {
this.queue = queue;
}
public void run() {
try {
BigInteger p = BigInteger.ONE;
while (!Thread.currentThread().isInterrupted())
queue.put(p = p.nextProbablePrime());
} catch (InterruptedException consumed) {
/* Allow thread to exit */
}
}
public void cancel() {
interrupt();
}
}
| apache-2.0 |
gravitydev/closure-stylesheets | src/com/google/common/css/compiler/ast/CssTreeVisitor.java | 6465 | /*
* Copyright 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.css.compiler.ast;
/**
* Visitor interface for CSS abstract syntax trees.
*
*/
public interface CssTreeVisitor extends AtRuleHandler {
/** Called before visiting a {@code CssRootNode}'s sub trees */
boolean enterTree(CssRootNode root);
/** Called after visiting a {@code CssRootNode}'s sub trees */
void leaveTree(CssRootNode root);
/** Called before visiting a {@code CssImportBlockNode}'s sub trees */
boolean enterImportBlock(CssImportBlockNode block);
/** Called after visiting a {@code CssImportBlockNode}'s sub trees */
void leaveImportBlock(CssImportBlockNode block);
/** Called before visiting a {@code CssBlockNode}'s sub trees */
boolean enterBlock(CssBlockNode block);
/** Called after visiting a {@code CssBlockNode}'s sub trees */
void leaveBlock(CssBlockNode block);
/** Called before visiting a {@code CssConditionalBlockNode}'s sub trees */
boolean enterConditionalBlock(CssConditionalBlockNode block);
/** Called after visiting a {@code CssConditionalBlockNode}'s sub trees */
void leaveConditionalBlock(CssConditionalBlockNode block);
/** Called before visiting a {@code CssDeclarationBlockNode}'s sub trees */
boolean enterDeclarationBlock(CssDeclarationBlockNode block);
/** Called after visiting a {@code CssDeclarationBlockNode}'s sub trees */
void leaveDeclarationBlock(CssDeclarationBlockNode block);
/**
* Called before visiting a {@code CssRulesetNode}'s sub trees.
*
* @return whether ruleset children should be visited
*/
boolean enterRuleset(CssRulesetNode ruleset);
/** Called after visiting a {@code CssRulesetNode}'s sub trees */
void leaveRuleset(CssRulesetNode ruleset);
/** Called before visiting a {@code CssSelectorListNode}'s sub trees */
boolean enterSelectorBlock(CssSelectorListNode block);
/** Called after visiting a {@code CssSelectorListNode}'s sub trees */
void leaveSelectorBlock(CssSelectorListNode block);
/** Called before visiting a {@code CssDeclarationNode}'s sub trees */
boolean enterDeclaration(CssDeclarationNode declaration);
/** Called after visiting a {@code CssDeclarationNode}'s sub trees */
void leaveDeclaration(CssDeclarationNode declaration);
/** Called before visiting a {@code CssSelectorNode}'s sub trees */
boolean enterSelector(CssSelectorNode selector);
/** Called after visiting a {@code CssSelectorNode}'s sub trees */
void leaveSelector(CssSelectorNode selector);
/** Called before visiting a {@code CssClassSelectorNode}'s sub trees */
boolean enterClassSelector(CssClassSelectorNode classSelector);
/** Called after visiting a {@code CssClassSelectorNode}'s sub trees */
void leaveClassSelector(CssClassSelectorNode classSelector);
/** Called before visiting a {@code CssIdSelectorNode}'s sub trees */
boolean enterIdSelector(CssIdSelectorNode idSelector);
/** Called after visiting a {@code CssIdSelectorNode}'s sub trees */
void leaveIdSelector(CssIdSelectorNode idSelector);
/** Called before visiting a {@code CssPseudoClassNode}'s sub trees */
boolean enterPseudoClass(CssPseudoClassNode pseudoClass);
/** Called after visiting a {@code CssPseudoClassNode}'s sub trees */
void leavePseudoClass(CssPseudoClassNode pseudoClass);
/** Called before visiting a {@code CssPseudoElementNode}'s sub trees */
boolean enterPseudoElement(CssPseudoElementNode pseudoElement);
/** Called after visiting a {@code CssPseudoElementNode}'s sub trees */
void leavePseudoElement(CssPseudoElementNode pseudoElement);
/** Called before visiting a {@code CssAttributeSelectorNode}'s sub trees */
boolean enterAttributeSelector(CssAttributeSelectorNode attributeSelector);
/** Called after visiting a {@code CssAttributeSelectorNode}'s sub trees */
void leaveAttributeSelector(CssAttributeSelectorNode attributeSelector);
/** Called before visiting a {@code CssPropertyValueNode}'s sub trees */
boolean enterPropertyValue(CssPropertyValueNode propertyValue);
/** Called after visiting a {@code CssPropertyValueNode}'s sub trees */
void leavePropertyValue(CssPropertyValueNode propertyValue);
/** Called before visiting a {@code CssValueNode} */
boolean enterValueNode(CssValueNode value);
/** Called after visiting a {@code CssValueNode} */
void leaveValueNode(CssValueNode value);
/** Called before visiting a {@code CssFunctionNode}'s sub trees */
boolean enterFunctionNode(CssFunctionNode value);
/** Called after visiting a {@code CssFunctionNode}'s sub trees. */
void leaveFunctionNode(CssFunctionNode value);
/** Called before visiting a {@code CssFunctionNode}'s sub trees */
boolean enterArgumentNode(CssValueNode value);
/** Called after visiting a {@code CssFunctionNode}'s sub trees. */
void leaveArgumentNode(CssValueNode value);
/** Called before visiting a {@code CssCombinatorNode}'s sub trees */
boolean enterCombinator(CssCombinatorNode combinator);
/** Called after visiting a {@code CssCombinatorNode}'s sub trees */
void leaveCombinator(CssCombinatorNode combinator);
/** Called before visiting a {@code CssKeyNode}'s sub trees */
boolean enterKey(CssKeyNode key);
/** Called after visiting a {@code CssKeyNode}'s sub trees */
void leaveKey(CssKeyNode key);
/** Called before visiting a {@code CssKeyListNode}'s sub trees */
boolean enterKeyBlock(CssKeyListNode block);
/** Called after visiting a {@code CssKeyListNode}'s sub trees */
void leaveKeyBlock(CssKeyListNode block);
/** Called before visiting a {@code CssKeyframeRulesetNode}'s sub trees */
boolean enterKeyframeRuleset(CssKeyframeRulesetNode key);
/** Called after visiting a {@code CssKeyframeRulesetNode}'s sub trees */
void leaveKeyframeRuleset(CssKeyframeRulesetNode key);
/** Traverse the (sub) tree starting at {@code node} */
void visit(CssNode node);
}
| apache-2.0 |
JNDX25219/XiaoShangXing | app/src/main/java/com/xiaoshangxing/utils/imageUtils/MyGlide.java | 4387 | package com.xiaoshangxing.utils.imageUtils;
import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.support.v4.app.Fragment;
import android.widget.ImageView;
import com.bumptech.glide.Glide;
import com.bumptech.glide.load.engine.DiskCacheStrategy;
import com.bumptech.glide.request.animation.GlideAnimation;
import com.bumptech.glide.request.target.SimpleTarget;
import com.xiaoshangxing.R;
/**
* Created by FengChaoQun
* on 2016/7/14
*/
public class MyGlide {
public static void with(Activity activity, String url, ImageView view) {
Glide.with(activity)
.load(url)
.placeholder(R.color.g0)
.error(R.mipmap.nim_image_download_failed)
.diskCacheStrategy(DiskCacheStrategy.ALL)
.into(view);
}
public static void with(Fragment fragment, String url, ImageView view) {
Glide.with(fragment)
.load(url)
.placeholder(R.color.g0)
.error(R.mipmap.nim_image_download_failed)
.diskCacheStrategy(DiskCacheStrategy.ALL)
.into(view);
}
public static void with(Context context, String url, ImageView view) {
Glide.with(context)
.load(url)
.placeholder(R.color.g0)
.error(R.mipmap.nim_image_download_failed)
.diskCacheStrategy(DiskCacheStrategy.ALL)
.into(view);
}
public static void with_defaul_image(Context context, String url, ImageView view) {
Glide.with(context)
.load(url)
.placeholder(R.color.g0)
.error(R.mipmap.nim_image_download_failed)
.diskCacheStrategy(DiskCacheStrategy.ALL)
.into(view);
}
public static void with_app_log(Context context, String url, ImageView view) {
Glide.with(context)
.load(url)
.placeholder(R.color.g0)
.error(R.mipmap.app_logo)
.diskCacheStrategy(DiskCacheStrategy.ALL)
.into(view);
}
public static void with_icon_head(Context context, String url, ImageView view) {
try {
Glide.with(context)
.load(url)
.placeholder(R.color.g0)
.error(R.mipmap.icon_headimg)
.diskCacheStrategy(DiskCacheStrategy.ALL)
.into(view);
} catch (IllegalArgumentException e) {
e.printStackTrace();
}
}
public static void with_default_head(Context context, String url, ImageView view) {
try {
Glide.with(context)
.load(url)
.placeholder(R.mipmap.default_head)
.error(R.mipmap.default_head)
.diskCacheStrategy(DiskCacheStrategy.ALL)
.into(view);
} catch (IllegalArgumentException e) {
e.printStackTrace();
}
}
public static void with_default_groupIcon(Context context, ImageView view) {
Glide.with(context)
.load(R.mipmap.group_icon)
.into(view);
}
public static void with_default_college(Context context, String url, ImageView view) {
Glide.with(context)
.load(url)
.placeholder(R.mipmap.icon_xueyuan_img)
.error(R.mipmap.icon_xueyuan_img)
.diskCacheStrategy(DiskCacheStrategy.ALL)
.into(view);
}
public static void withBitmap(Context context, String url, final ImageView view) {
Glide.with(context)
.load(url)
.asBitmap()
.placeholder(R.color.g0)
.error(R.mipmap.nim_image_download_failed)
.into(new SimpleTarget<Bitmap>() {
@Override
public void onResourceReady(Bitmap resource, GlideAnimation<? super Bitmap> glideAnimation) {
view.setImageBitmap(resource);
}
});
}
public static void withSrc(Context context, int src, final ImageView imageView) {
Glide.with(context)
.load(src)
// .placeholder(src)
.into(imageView);
}
}
| apache-2.0 |
aebruno/fusim | src/main/java/edu/buffalo/fusim/IntraChromGenerator.java | 2767 | /*
* Copyright 2012 Andrew E. Bruno <aebruno2@buffalo.edu>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package edu.buffalo.fusim;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import cern.colt.list.IntArrayList;
import cern.jet.random.sampling.RandomSamplingAssistant;
public class IntraChromGenerator implements FusionGenerator {
private Log logger = LogFactory.getLog(IntraChromGenerator.class);
private GeneSelector selector;
private GeneSelectionMethod method;
private List<String[]> filters;
private static String[] chroms = new String[]{
"chr1","chr2","chr3","chr4","chr5","chr6","chr7","chr8",
"chr9","chr10","chr11","chr12","chr13","chr14","chr15","chr16",
"chr17","chr18","chr19","chr20","chr21","chr22","chrX","chrY"
};
public List<FusionGene> generate(int nFusions, int genesPerFusion) {
List<FusionGene> fusions = new ArrayList<FusionGene>();
Random r = new Random();
RandomGenerator rg = new RandomGenerator();
rg.setGeneSelector(selector);
rg.setGeneSelectionMethod(method);
for(int i = 0; i < nFusions; i++) {
String chr = chroms[r.nextInt(chroms.length)];
List<String[]> list = new ArrayList<String[]>();
list.add(new String[]{chr});
list.add(new String[]{chr});
rg.setFilters(list);
fusions.addAll(rg.generate(1, genesPerFusion));
}
return fusions;
}
public void setGeneSelector(GeneSelector selector) {
this.selector = selector;
}
public GeneSelector getGeneSelector() {
return this.selector;
}
public void setGeneSelectionMethod(GeneSelectionMethod method) {
this.method = method;
}
public GeneSelectionMethod getGeneSelectionMethod() {
return this.method;
}
public void setFilters(List<String[]> filters) {
this.filters = filters;
}
public List<String[]> getFilters() {
return this.filters;
}
}
| apache-2.0 |
gentics/mesh | mdm/api/src/main/java/com/gentics/mesh/core/data/node/field/HibDisplayField.java | 337 | package com.gentics.mesh.core.data.node.field;
import com.gentics.mesh.core.data.HibField;
/**
* Represents a field that can be used as a display field.
*/
public interface HibDisplayField extends HibField {
/**
* Gets the string representation of the field.
*
* @return Display field value
*/
String getDisplayName();
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-forecast/src/main/java/com/amazonaws/services/forecast/model/transform/UntagResourceResultJsonUnmarshaller.java | 1603 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.forecast.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.forecast.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* UntagResourceResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UntagResourceResultJsonUnmarshaller implements Unmarshaller<UntagResourceResult, JsonUnmarshallerContext> {
public UntagResourceResult unmarshall(JsonUnmarshallerContext context) throws Exception {
UntagResourceResult untagResourceResult = new UntagResourceResult();
return untagResourceResult;
}
private static UntagResourceResultJsonUnmarshaller instance;
public static UntagResourceResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new UntagResourceResultJsonUnmarshaller();
return instance;
}
}
| apache-2.0 |
visionarts/power-jambda | power-jambda-core/src/main/java/com/visionarts/powerjambda/annotations/Route.java | 1232 | /*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.visionarts.powerjambda.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import com.visionarts.powerjambda.cors.CorsConfiguration;
import com.visionarts.powerjambda.cors.DefaultCorsConfiguration;
import com.visionarts.powerjambda.http.HttpMethod;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface Route {
String resourcePath();
HttpMethod[] methods();
Class<? extends CorsConfiguration> cors() default DefaultCorsConfiguration.class;
}
| apache-2.0 |
horiam/ResourceManager | Soapful/src/main/java/org/horiam/ResourceManager/webapp/soapful/TaskWS.java | 2913 | /*
* Copyright (C) 2014 Horia Musat
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.horiam.ResourceManager.webapp.soapful;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.EJB;
import javax.ejb.Stateless;
import javax.jws.WebService;
import org.horiam.ResourceManager.exceptions.AuthorisationException;
import org.horiam.ResourceManager.exceptions.RecordNotFoundException;
import org.horiam.ResourceManager.model.Task;
import org.horiam.ResourceManager.services.TaskService;
import org.horiam.ResourceManager.soap.MessageHolderBean;
import org.horiam.ResourceManager.soap.ResourceManagerFault;
import org.horiam.ResourceManager.soap.TaskSEI;
@Stateless
@WebService(serviceName = "TaskWS",
targetNamespace = "http://ResourceManagerNS/Tasks",
endpointInterface = "org.horiam.ResourceManager.soap.TaskSEI")
public class TaskWS implements TaskSEI {
private static final String CLASS_NAME = TaskWS.class.getName();
private static final Logger log = Logger.getLogger(CLASS_NAME);
@EJB
private TaskService taskService;
@Override
public List<Task> list() {
log.entering(CLASS_NAME, "list");
List<Task> ret = taskService.list();
log.exiting(CLASS_NAME, "list", ret);
return ret;
}
@Override
public boolean exists(String id) {
log.entering(CLASS_NAME, "exists", new Object[] { id });
boolean ret = taskService.exists(id);
log.exiting(CLASS_NAME, "exists", ret);
return ret;
}
@Override
public Task get(String id) throws ResourceManagerFault {
log.entering(CLASS_NAME, "get", new Object[] { id });
try {
Task ret = taskService.get(id);
log.exiting(CLASS_NAME, "get", ret);
return ret;
} catch (AuthorisationException | RecordNotFoundException e) {
log.log(Level.FINEST, e.getMessage(), e);
ResourceManagerFault rmf = new ResourceManagerFault(e.getMessage(),
new MessageHolderBean());
log.throwing(CLASS_NAME, "get", rmf);
throw rmf;
}
}
@Override
public void delete(String id) {
log.entering(CLASS_NAME, "delete", new Object[] { id });
taskService.delete(id);
log.exiting(CLASS_NAME, "delete");
}
}
| apache-2.0 |
donglua/JZAndroidChart | chart/src/main/java/cn/jingzhuan/lib/chart/event/OnLoadMoreKlineListener.java | 146 | package cn.jingzhuan.lib.chart.event;
@FunctionalInterface
public interface OnLoadMoreKlineListener {
void onLoadMoreKline(int scrolledX);
}
| apache-2.0 |
lettuce-io/lettuce-core | src/test/java/io/lettuce/test/resource/DefaultRedisClusterClient.java | 1739 | /*
* Copyright 2018-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.lettuce.test.resource;
import io.lettuce.core.RedisURI;
import io.lettuce.core.cluster.ClusterClientOptions;
import io.lettuce.core.cluster.RedisClusterClient;
import io.lettuce.test.settings.TestSettings;
/**
* @author Mark Paluch
*/
public class DefaultRedisClusterClient {
private static final DefaultRedisClusterClient instance = new DefaultRedisClusterClient();
private RedisClusterClient redisClient;
private DefaultRedisClusterClient() {
redisClient = RedisClusterClient.create(RedisURI.Builder.redis(TestSettings.host(), TestSettings.port(900))
.withClientName("my-client").build());
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
FastShutdown.shutdown(redisClient);
}
});
}
/**
* Do not close the client.
*
* @return the default redis client for the tests.
*/
public static RedisClusterClient get() {
instance.redisClient.setOptions(ClusterClientOptions.create());
return instance.redisClient;
}
}
| apache-2.0 |
softindex/datakernel | examples/tutorials/uikernel-integration/src/main/java/WebappLauncher.java | 2681 | import com.google.gson.Gson;
import io.datakernel.config.Config;
import io.datakernel.config.ConfigModule;
import io.datakernel.di.annotation.Inject;
import io.datakernel.di.annotation.Provides;
import io.datakernel.di.module.Module;
import io.datakernel.eventloop.Eventloop;
import io.datakernel.http.AsyncHttpServer;
import io.datakernel.http.AsyncServlet;
import io.datakernel.http.RoutingServlet;
import io.datakernel.http.StaticServlet;
import io.datakernel.http.loader.StaticLoader;
import io.datakernel.launcher.Launcher;
import io.datakernel.service.ServiceGraphModule;
import io.datakernel.uikernel.UiKernelServlets;
import java.util.concurrent.Executor;
import static io.datakernel.config.ConfigConverters.ofInteger;
import static io.datakernel.config.ConfigConverters.ofString;
import static io.datakernel.di.module.Modules.combine;
import static java.util.concurrent.Executors.newSingleThreadExecutor;
public class WebappLauncher extends Launcher {
private static final int DEFAULT_PORT = 8080;
private static final String DEFAULT_PATH_TO_RESOURCES = "/static";
@Inject
AsyncHttpServer server;
@Provides
Gson gson() {
return new Gson();
}
@Provides
Config config() {
return Config.ofClassPathProperties("configs.properties");
}
@Provides
Eventloop eventloop() {
return Eventloop.create();
}
@Provides
Executor executor() {
return newSingleThreadExecutor();
}
@Provides
StaticLoader staticLoader(Executor executor, Config config) {
return StaticLoader.ofClassPath(executor, config.get(ofString(), "resources", DEFAULT_PATH_TO_RESOURCES));
}
@Provides
AsyncServlet servlet(StaticLoader staticLoader, Gson gson, PersonGridModel model, Config config) {
StaticServlet staticServlet = StaticServlet.create(staticLoader)
.withIndexHtml();
AsyncServlet usersApiServlet = UiKernelServlets.apiServlet(model, gson);
return RoutingServlet.create()
.map("/*", staticServlet) // serves request if no other servlet matches
.map("/api/users/*", usersApiServlet); // our rest crud servlet that would serve the grid
}
@Provides
AsyncHttpServer server(Eventloop eventloop, Config config, AsyncServlet servlet) {
return AsyncHttpServer.create(eventloop, servlet)
.withListenPort(config.get(ofInteger(), "port", DEFAULT_PORT));
}
@Override
protected Module getModule() {
return combine(
ServiceGraphModule.create(),
ConfigModule.create()
.withEffectiveConfigLogger());
}
@Override
protected void run() throws Exception {
awaitShutdown();
}
public static void main(String[] args) throws Exception {
WebappLauncher launcher = new WebappLauncher();
launcher.launch(args);
}
}
| apache-2.0 |
pmk2429/investickation | app/src/main/java/com/sfsu/image/BaseAlbumDirFactory.java | 542 | package com.sfsu.image;
import android.os.Environment;
import java.io.File;
/**
* Holds the path to the base album directory where the images are stored in the phone's storage.
*/
public final class BaseAlbumDirFactory extends AlbumStorageDirFactory {
// Standard storage location for digital camera files
private static final String CAMERA_DIR = "/dcim/";
@Override
public File getAlbumStorageDir(String albumName) {
return new File(Environment.getExternalStorageDirectory() + CAMERA_DIR + albumName);
}
}
| apache-2.0 |
NotFound403/WePay | src/main/java/cn/felord/wepay/ali/sdk/api/request/KoubeiMarketingDataDishdiagnoseBatchqueryRequest.java | 4881 | package cn.felord.wepay.ali.sdk.api.request;
import java.util.Map;
import cn.felord.wepay.ali.sdk.api.AlipayRequest;
import cn.felord.wepay.ali.sdk.api.internal.util.AlipayHashMap;
import cn.felord.wepay.ali.sdk.api.response.KoubeiMarketingDataDishdiagnoseBatchqueryResponse;
import cn.felord.wepay.ali.sdk.api.AlipayObject;
/**
* ALIPAY API: koubei.marketing.data.dishdiagnose.batchquery request
*
* @author auto create
* @version $Id: $Id
*/
public class KoubeiMarketingDataDishdiagnoseBatchqueryRequest implements AlipayRequest<KoubeiMarketingDataDishdiagnoseBatchqueryResponse> {
private AlipayHashMap udfParams; // add user-defined text parameters
private String apiVersion="1.0";
/**
* 根据条件查询推荐菜
*/
private String bizContent;
/**
* <p>Setter for the field <code>bizContent</code>.</p>
*
* @param bizContent a {@link java.lang.String} object.
*/
public void setBizContent(String bizContent) {
this.bizContent = bizContent;
}
/**
* <p>Getter for the field <code>bizContent</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getBizContent() {
return this.bizContent;
}
private String terminalType;
private String terminalInfo;
private String prodCode;
private String notifyUrl;
private String returnUrl;
private boolean needEncrypt=false;
private AlipayObject bizModel=null;
/**
* <p>Getter for the field <code>notifyUrl</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getNotifyUrl() {
return this.notifyUrl;
}
/** {@inheritDoc} */
public void setNotifyUrl(String notifyUrl) {
this.notifyUrl = notifyUrl;
}
/**
* <p>Getter for the field <code>returnUrl</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getReturnUrl() {
return this.returnUrl;
}
/** {@inheritDoc} */
public void setReturnUrl(String returnUrl) {
this.returnUrl = returnUrl;
}
/**
* <p>Getter for the field <code>apiVersion</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getApiVersion() {
return this.apiVersion;
}
/** {@inheritDoc} */
public void setApiVersion(String apiVersion) {
this.apiVersion = apiVersion;
}
/** {@inheritDoc} */
public void setTerminalType(String terminalType){
this.terminalType=terminalType;
}
/**
* <p>Getter for the field <code>terminalType</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getTerminalType(){
return this.terminalType;
}
/** {@inheritDoc} */
public void setTerminalInfo(String terminalInfo){
this.terminalInfo=terminalInfo;
}
/**
* <p>Getter for the field <code>terminalInfo</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getTerminalInfo(){
return this.terminalInfo;
}
/** {@inheritDoc} */
public void setProdCode(String prodCode) {
this.prodCode=prodCode;
}
/**
* <p>Getter for the field <code>prodCode</code>.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getProdCode() {
return this.prodCode;
}
/**
* <p>getApiMethodName.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getApiMethodName() {
return "koubei.marketing.data.dishdiagnose.batchquery";
}
/**
* <p>getTextParams.</p>
*
* @return a {@link java.util.Map} object.
*/
public Map<String, String> getTextParams() {
AlipayHashMap txtParams = new AlipayHashMap();
txtParams.put("biz_content", this.bizContent);
if(udfParams != null) {
txtParams.putAll(this.udfParams);
}
return txtParams;
}
/**
* <p>putOtherTextParam.</p>
*
* @param key a {@link java.lang.String} object.
* @param value a {@link java.lang.String} object.
*/
public void putOtherTextParam(String key, String value) {
if(this.udfParams == null) {
this.udfParams = new AlipayHashMap();
}
this.udfParams.put(key, value);
}
/**
* <p>getResponseClass.</p>
*
* @return a {@link java.lang.Class} object.
*/
public Class<KoubeiMarketingDataDishdiagnoseBatchqueryResponse> getResponseClass() {
return KoubeiMarketingDataDishdiagnoseBatchqueryResponse.class;
}
/**
* <p>isNeedEncrypt.</p>
*
* @return a boolean.
*/
public boolean isNeedEncrypt() {
return this.needEncrypt;
}
/** {@inheritDoc} */
public void setNeedEncrypt(boolean needEncrypt) {
this.needEncrypt=needEncrypt;
}
/**
* <p>Getter for the field <code>bizModel</code>.</p>
*
* @return a {@link cn.felord.wepay.ali.sdk.api.AlipayObject} object.
*/
public AlipayObject getBizModel() {
return this.bizModel;
}
/** {@inheritDoc} */
public void setBizModel(AlipayObject bizModel) {
this.bizModel=bizModel;
}
}
| apache-2.0 |
nucleusbox/nucleus-project | nucleus-data-rest/src/main/java/org/nucleusbox/data/rest/App.java | 187 | package org.nucleusbox.data.rest;
/**
* Hello world!
*
*/
public class App
{
public static void main( String[] args )
{
System.out.println( "Hello World!" );
}
}
| apache-2.0 |
androidthings/robocar | app/src/main/java/com/example/androidthings/robocar/TricolorLed.java | 3980 | /*
* Copyright 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.androidthings.robocar;
import android.support.annotation.IntDef;
import android.util.Log;
import com.example.androidthings.robocar.shared.model.AdvertisingInfo.LedColor;
import com.google.android.things.pio.Gpio;
import com.google.android.things.pio.PeripheralManager;
import java.io.IOException;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.HashMap;
import java.util.Map;
public class TricolorLed implements AutoCloseable {
private static final String TAG = "TricolorLed";
public static final int OFF = 0;
public static final int RED = 1; // __R
public static final int GREEN = 2; // _G_
public static final int YELLOW = 3; // _GR
public static final int BLUE = 4; // B__
public static final int MAGENTA = 5; // B_R
public static final int CYAN = 6; // BG_
public static final int WHITE = 7; // BGR
@IntDef({OFF, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE})
@Retention(RetentionPolicy.SOURCE)
public @interface Tricolor{}
private static final Map<LedColor, Integer> MAP;
static {
MAP = new HashMap<>();
MAP.put(LedColor.RED, RED);
MAP.put(LedColor.GREEN, GREEN);
MAP.put(LedColor.YELLOW, YELLOW);
MAP.put(LedColor.BLUE, BLUE);
MAP.put(LedColor.MAGENTA, MAGENTA);
MAP.put(LedColor.CYAN, CYAN);
MAP.put(LedColor.WHITE, WHITE);
}
public static @Tricolor int ledColorToTricolor(LedColor color) {
Integer v = MAP.get(color);
return v == null ? OFF : v;
}
private Gpio mGpioRed;
private Gpio mGpioGreen;
private Gpio mGpioBlue;
private @Tricolor int mColor = OFF;
public TricolorLed(String redPin, String greenPin, String bluePin) {
mGpioRed = createGpio(redPin);
mGpioGreen = createGpio(greenPin);
mGpioBlue = createGpio(bluePin);
}
private Gpio createGpio(String pin) {
try {
Gpio gpio = PeripheralManager.getInstance().openGpio(pin);
gpio.setDirection(Gpio.DIRECTION_OUT_INITIALLY_HIGH);
return gpio;
} catch (IOException e) {
Log.e(TAG, "Error creating GPIO for pin " + pin, e);
}
return null;
}
@Override
public void close() throws Exception {
setColor(OFF);
closeGpio(mGpioRed);
closeGpio(mGpioGreen);
closeGpio(mGpioBlue);
mGpioRed = mGpioGreen = mGpioBlue = null;
}
private void closeGpio(Gpio gpio) {
if (gpio != null) {
try {
gpio.close();
} catch (IOException e) {
Log.e(TAG, "Error closing gpio", e);
}
}
}
public @Tricolor int getColor() {
return mColor;
}
public void setColor(@Tricolor int color) {
// only care about the 3 LSBs
mColor = color & WHITE;
// Common-Anode uses LOW to activate the color, so unset bits are set HIGH
setGpioValue(mGpioRed, (color & 1) == 0);
setGpioValue(mGpioGreen, (color & 2) == 0);
setGpioValue(mGpioBlue, (color & 4) == 0);
}
private void setGpioValue(Gpio gpio, boolean value) {
if (gpio != null) {
try {
gpio.setValue(value);
} catch (IOException ignored) {
}
}
}
}
| apache-2.0 |
Zoctan/MVP-TestsInfoApp | app/src/main/java/com/zoctan/solar/user/UserJsonUtils.java | 923 | package com.zoctan.solar.user;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.zoctan.solar.beans.UserBean;
import com.zoctan.solar.utils.JsonUtils;
import com.zoctan.solar.utils.LogUtils;
/**
* 将JSON转换为User对象
*/
public class UserJsonUtils {
private final static String TAG = "UserJsonUtils";
public static UserBean readJsonUserBeans(String res) {
UserBean userBean = null;
try {
// 创建一个JsonParser
JsonParser parser = new JsonParser();
// 将res转换成Json对象
JsonObject jsonObj = parser.parse(res).getAsJsonObject();
// 将Json对象转换为User实体
userBean = JsonUtils.deserialize(jsonObj, UserBean.class);
} catch (Exception e) {
LogUtils.e(TAG, "将JSON转换为User对象发生错误" , e);
}
return userBean;
}
}
| apache-2.0 |
sproshev/tcity | app/src/main/java/com/tcity/android/ui/info/BuildArtifactsAdapter.java | 8275 | /*
* Copyright 2014 Semyon Proshev
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tcity.android.ui.info;
import android.content.Context;
import android.text.format.Formatter;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.tcity.android.R;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collections;
import java.util.List;
class BuildArtifactsAdapter extends BaseAdapter {
@NotNull
private final Context myContext;
@NotNull
private final LayoutInflater myInflater;
@NotNull
private final BuildArtifactListener myListener;
@NotNull
private List<BuildArtifact> myData = Collections.emptyList();
BuildArtifactsAdapter(@NotNull Context context, @NotNull BuildArtifactListener listener) {
super();
myContext = context;
myInflater = LayoutInflater.from(context);
myListener = listener;
}
void setData(@NotNull List<BuildArtifact> data) {
myData = data;
}
@Override
public int getCount() {
return myData.size();
}
@Override
public BuildArtifact getItem(int position) {
return myData.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public int getItemViewType(int position) {
BuildArtifact artifact = getItem(position);
if (artifact.childrenHref != null) {
if (artifact.contentHref == null) {
return 0; // dir
} else {
return 1; // archive
}
} else {
if (artifact.contentHref == null) {
throw new IllegalStateException(
"Invalid build artifact: " +
"content href and children href can't be null at the same time"
);
} else {
return 2; // file
}
}
}
@Override
public int getViewTypeCount() {
return 3;
}
@Override
public View getView(int position, @Nullable View convertView, @NotNull ViewGroup parent) {
switch (getItemViewType(position)) {
case 0:
return getDirView(position, convertView, parent);
case 1:
return getArchiveView(position, convertView, parent);
case 2:
return getFileView(position, convertView, parent);
default:
throw new IllegalStateException(
"Unexpected item view type: " + getItemViewType(position)
);
}
}
@NotNull
private View getDirView(int position, @Nullable View convertView, @NotNull ViewGroup parent) {
if (convertView == null) {
convertView = myInflater.inflate(R.layout.build_artifact_dir_item, parent, false);
convertView.setTag(
new DirViewHolder(
(TextView) convertView.findViewById(R.id.build_artifact_name)
)
);
}
final BuildArtifact artifact = getItem(position);
DirViewHolder holder = (DirViewHolder) convertView.getTag();
holder.name.setText(artifact.name);
holder.name.setOnClickListener(
new View.OnClickListener() {
@Override
public void onClick(@NotNull View v) {
myListener.onDescriptionClick(artifact);
}
}
);
return convertView;
}
@NotNull
private View getArchiveView(int position,
@Nullable View convertView,
@NotNull ViewGroup parent) {
if (convertView == null) {
convertView = myInflater.inflate(R.layout.build_artifact_archive_item, parent, false);
convertView.setTag(
new ArchiveOrFileViewHolder(
(LinearLayout) convertView.findViewById(R.id.build_artifact_description),
(TextView) convertView.findViewById(R.id.build_artifact_name),
(TextView) convertView.findViewById(R.id.build_artifact_size),
(ImageButton) convertView.findViewById(R.id.build_artifact_dl)
)
);
}
final BuildArtifact artifact = getItem(position);
ArchiveOrFileViewHolder holder = (ArchiveOrFileViewHolder) convertView.getTag();
holder.name.setText(artifact.name);
holder.size.setText(Formatter.formatShortFileSize(myContext, artifact.size));
holder.description.setOnClickListener(
new View.OnClickListener() {
@Override
public void onClick(@NotNull View v) {
myListener.onDescriptionClick(artifact);
}
}
);
holder.dl.setOnClickListener(
new View.OnClickListener() {
@Override
public void onClick(@NotNull View v) {
myListener.onDownloadClick(artifact);
}
}
);
return convertView;
}
@NotNull
private View getFileView(int position, @Nullable View convertView, @NotNull ViewGroup parent) {
if (convertView == null) {
convertView = myInflater.inflate(R.layout.build_artifact_file_item, parent, false);
convertView.setTag(
new ArchiveOrFileViewHolder(
(LinearLayout) convertView.findViewById(R.id.build_artifact_description),
(TextView) convertView.findViewById(R.id.build_artifact_name),
(TextView) convertView.findViewById(R.id.build_artifact_size),
(ImageButton) convertView.findViewById(R.id.build_artifact_dl)
)
);
}
final BuildArtifact artifact = getItem(position);
ArchiveOrFileViewHolder holder = (ArchiveOrFileViewHolder) convertView.getTag();
holder.name.setText(artifact.name);
holder.size.setText(Formatter.formatShortFileSize(myContext, artifact.size));
holder.dl.setOnClickListener(
new View.OnClickListener() {
@Override
public void onClick(@NotNull View v) {
myListener.onDownloadClick(artifact);
}
}
);
return convertView;
}
private static class DirViewHolder {
@NotNull
public final TextView name;
private DirViewHolder(@NotNull TextView name) {
this.name = name;
}
}
private static class ArchiveOrFileViewHolder {
@NotNull
public final LinearLayout description;
@NotNull
public final TextView name;
@NotNull
public final TextView size;
@NotNull
public final ImageButton dl;
private ArchiveOrFileViewHolder(@NotNull LinearLayout description,
@NotNull TextView name,
@NotNull TextView size,
@NotNull ImageButton dl) {
this.description = description;
this.name = name;
this.size = size;
this.dl = dl;
}
}
}
| apache-2.0 |
aaronwalker/camel | camel-core/src/main/java/org/apache/camel/processor/SendProcessor.java | 7418 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor;
import org.apache.camel.AsyncCallback;
import org.apache.camel.AsyncProcessor;
import org.apache.camel.AsyncProducerCallback;
import org.apache.camel.CamelContext;
import org.apache.camel.Endpoint;
import org.apache.camel.Exchange;
import org.apache.camel.ExchangePattern;
import org.apache.camel.Producer;
import org.apache.camel.ProducerCallback;
import org.apache.camel.Traceable;
import org.apache.camel.impl.InterceptSendToEndpoint;
import org.apache.camel.impl.ProducerCache;
import org.apache.camel.support.ServiceSupport;
import org.apache.camel.util.AsyncProcessorHelper;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.ServiceHelper;
import org.apache.camel.util.URISupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Processor for forwarding exchanges to an endpoint destination.
*
* @version
*/
public class SendProcessor extends ServiceSupport implements AsyncProcessor, Traceable {
protected final transient Logger log = LoggerFactory.getLogger(getClass());
protected final CamelContext camelContext;
protected ProducerCache producerCache;
protected Endpoint destination;
protected ExchangePattern pattern;
public SendProcessor(Endpoint destination) {
ObjectHelper.notNull(destination, "destination");
this.destination = destination;
this.camelContext = destination.getCamelContext();
ObjectHelper.notNull(this.camelContext, "camelContext");
}
public SendProcessor(Endpoint destination, ExchangePattern pattern) {
this(destination);
this.pattern = pattern;
}
@Override
public String toString() {
return "sendTo(" + destination + (pattern != null ? " " + pattern : "") + ")";
}
public void setDestination(Endpoint destination) {
this.destination = destination;
// destination changed so purge the cache
if (producerCache != null) {
producerCache.purge();
}
}
public String getTraceLabel() {
return URISupport.sanitizeUri(destination.getEndpointUri());
}
public void process(final Exchange exchange) throws Exception {
if (!isStarted()) {
throw new IllegalStateException("SendProcessor has not been started: " + this);
}
// we should preserve existing MEP so remember old MEP
// if you want to permanently to change the MEP then use .setExchangePattern in the DSL
final ExchangePattern existingPattern = exchange.getPattern();
// send the exchange to the destination using a producer
producerCache.doInProducer(destination, exchange, pattern, new ProducerCallback<Exchange>() {
public Exchange doInProducer(Producer producer, Exchange exchange, ExchangePattern pattern) throws Exception {
exchange = configureExchange(exchange, pattern);
log.debug(">>>> {} {}", destination, exchange);
try {
producer.process(exchange);
} finally {
// restore previous MEP
exchange.setPattern(existingPattern);
}
return exchange;
}
});
}
public boolean process(Exchange exchange, final AsyncCallback callback) {
if (!isStarted()) {
throw new IllegalStateException("SendProcessor has not been started: " + this);
}
// we should preserve existing MEP so remember old MEP
// if you want to permanently to change the MEP then use .setExchangePattern in the DSL
final ExchangePattern existingPattern = exchange.getPattern();
// send the exchange to the destination using a producer
return producerCache.doInAsyncProducer(destination, exchange, pattern, callback, new AsyncProducerCallback() {
public boolean doInAsyncProducer(Producer producer, AsyncProcessor asyncProducer, final Exchange exchange,
ExchangePattern pattern, final AsyncCallback callback) {
final Exchange target = configureExchange(exchange, pattern);
log.debug(">>>> {} {}", destination, exchange);
return AsyncProcessorHelper.process(asyncProducer, target, new AsyncCallback() {
public void done(boolean doneSync) {
// restore previous MEP
target.setPattern(existingPattern);
// signal we are done
callback.done(doneSync);
}
});
}
});
}
public Endpoint getDestination() {
return destination;
}
public ExchangePattern getPattern() {
return pattern;
}
protected Exchange configureExchange(Exchange exchange, ExchangePattern pattern) {
if (pattern != null) {
exchange.setPattern(pattern);
}
// set property which endpoint we send to
exchange.setProperty(Exchange.TO_ENDPOINT, destination.getEndpointUri());
return exchange;
}
protected void doStart() throws Exception {
if (producerCache == null) {
// use a single producer cache as we need to only hold reference for one destination
producerCache = new ProducerCache(this, camelContext, 1);
// do not add as service as we do not want to manage the producer cache
}
ServiceHelper.startService(producerCache);
// the destination could since have been intercepted by a interceptSendToEndpoint so we got to
// lookup this before we can use the destination
Endpoint lookup = camelContext.hasEndpoint(destination.getEndpointKey());
if (lookup instanceof InterceptSendToEndpoint) {
if (log.isDebugEnabled()) {
log.debug("Intercepted sending to {} -> {}",
URISupport.sanitizeUri(destination.getEndpointUri()), URISupport.sanitizeUri(lookup.getEndpointUri()));
}
destination = lookup;
}
// warm up the producer by starting it so we can fail fast if there was a problem
// however must start endpoint first
ServiceHelper.startService(destination);
producerCache.startProducer(destination);
}
protected void doStop() throws Exception {
ServiceHelper.stopService(producerCache);
}
protected void doShutdown() throws Exception {
ServiceHelper.stopAndShutdownService(producerCache);
}
}
| apache-2.0 |
georgfedermann/compilers | straightline/src/main/java/org/poormanscastle/studies/compilers/utils/grammartools/ast/CodePosition.java | 2789 | package org.poormanscastle.studies.compilers.utils.grammartools.ast;
import org.apache.commons.lang3.StringUtils;
/**
* tracks the position of the current token represented by the current AST item within the source code.
* Using the CodePosition, the parser and other compiler components can give enhanced error messages
* when detecting problems within an AstItem.
* <p/>
* Created by georg on 14.01.16.
*/
public final class CodePosition {
private final int beginLine;
private final int beginColumn;
private final int endLine;
private final int endColumn;
/**
* create a CodePosition object for a symbol found in source code.
*
* @param beginLine
* @param beginColumn
* @param endLine
* @param endColumn
*/
private CodePosition(int beginLine, int beginColumn, int endLine, int endColumn) {
this.beginLine = beginLine;
this.beginColumn = beginColumn;
this.endLine = endLine;
this.endColumn = endColumn;
}
/**
* create a CodePosition object representing the very start of a source code file.
*/
private CodePosition() {
beginLine = 0;
beginColumn = 0;
endLine = 0;
endColumn = 0;
}
@Override
public String toString() {
return StringUtils.join("begin line/column ", beginLine, "/", beginColumn,
"; end line/column ", endLine, "/", endColumn);
}
/**
* inputData is expected to be a Token type automatically generated by JavaCC
* when a JavaCC grammar is evaluated into a lexer and a parser.
*
* @param token
*/
public static CodePosition createFromToken(Object token) {
try {
int beginLine = (Integer) token.getClass().getDeclaredField("beginLine").get(token);
int beginColumn = (Integer) token.getClass().getDeclaredField("beginColumn").get(token);
int endLine = (Integer) token.getClass().getDeclaredField("endLine").get(token);
int endColumn = (Integer) token.getClass().getDeclaredField("endColumn").get(token);
return new CodePosition(beginLine, beginColumn, endLine, endColumn);
} catch (IllegalAccessException | NoSuchFieldException e) {
throw new RuntimeException(e);
}
}
/**
* create a CodePosition representing the very start of a source code file.
*
* @return
*/
public static CodePosition createZeroPosition() {
return new CodePosition();
}
public int getBeginLine() {
return beginLine;
}
public int getBeginColumn() {
return beginColumn;
}
public int getEndLine() {
return endLine;
}
public int getEndColumn() {
return endColumn;
}
}
| apache-2.0 |
mufaddalq/cloudstack-datera-driver | engine/storage/integration-test/test/org/apache/cloudstack/storage/test/DirectAgentManagerSimpleImpl.java | 10528 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cloudstack.storage.test;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.Map;
import javax.inject.Inject;
import javax.naming.ConfigurationException;
import com.cloud.host.Host;
import com.cloud.host.Status;
import com.cloud.utils.fsm.NoTransitionException;
import com.cloud.utils.fsm.StateMachine2;
import org.apache.log4j.Logger;
import com.cloud.agent.AgentManager;
import com.cloud.agent.Listener;
import com.cloud.agent.StartupCommandProcessor;
import com.cloud.agent.api.Answer;
import com.cloud.agent.api.Command;
import com.cloud.agent.api.SetupCommand;
import com.cloud.agent.api.StartupCommand;
import com.cloud.agent.manager.AgentAttache;
import com.cloud.agent.manager.Commands;
import com.cloud.dc.ClusterDetailsDao;
import com.cloud.dc.ClusterVO;
import com.cloud.dc.dao.ClusterDao;
import com.cloud.exception.AgentUnavailableException;
import com.cloud.exception.ConnectionException;
import com.cloud.exception.DiscoveryException;
import com.cloud.exception.OperationTimedoutException;
import com.cloud.host.HostEnvironment;
import com.cloud.host.HostVO;
import com.cloud.host.Status.Event;
import com.cloud.host.dao.HostDao;
import com.cloud.hypervisor.Hypervisor.HypervisorType;
import com.cloud.hypervisor.kvm.resource.LibvirtComputingResource;
import com.cloud.hypervisor.vmware.VmwareServerDiscoverer;
import com.cloud.hypervisor.xen.resource.XcpOssResource;
import com.cloud.resource.ServerResource;
import com.cloud.utils.component.ManagerBase;
import com.cloud.utils.exception.CloudRuntimeException;
public class DirectAgentManagerSimpleImpl extends ManagerBase implements AgentManager {
private static final Logger logger = Logger.getLogger(DirectAgentManagerSimpleImpl.class);
private Map<Long, ServerResource> hostResourcesMap = new HashMap<Long, ServerResource>();
@Inject
HostDao hostDao;
@Inject
ClusterDao clusterDao;
@Inject
ClusterDetailsDao clusterDetailsDao;
@Inject
HostDao _hostDao;
protected StateMachine2<Status, Event, Host> _statusStateMachine = Status.getStateMachine();
@Override
public boolean configure(String name, Map<String, Object> params) throws ConfigurationException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean start() {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean stop() {
// TODO Auto-generated method stub
return false;
}
@Override
public String getName() {
// TODO Auto-generated method stub
return null;
}
@Override
public Answer easySend(Long hostId, Command cmd) {
try {
return this.send(hostId, cmd);
} catch (AgentUnavailableException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (OperationTimedoutException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
protected void loadResource(Long hostId) {
HostVO host = hostDao.findById(hostId);
Map<String, Object> params = new HashMap<String, Object>();
params.put("guid", host.getGuid());
params.put("ipaddress", host.getPrivateIpAddress());
params.put("username", "root");
params.put("password", "password");
params.put("zone", String.valueOf(host.getDataCenterId()));
params.put("pod", String.valueOf(host.getPodId()));
ServerResource resource = null;
if (host.getHypervisorType() == HypervisorType.XenServer) {
resource = new XcpOssResource();
try {
resource.configure(host.getName(), params);
} catch (ConfigurationException e) {
logger.debug("Failed to load resource:" + e.toString());
}
} else if (host.getHypervisorType() == HypervisorType.KVM) {
resource = new LibvirtComputingResource();
try {
params.put("public.network.device", "cloudbr0");
params.put("private.network.device", "cloudbr0");
resource.configure(host.getName(), params);
} catch (ConfigurationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else if (host.getHypervisorType() == HypervisorType.VMware) {
ClusterVO cluster = clusterDao.findById(host.getClusterId());
String url = clusterDetailsDao.findDetail(cluster.getId(), "url").getValue();
URI uri;
try {
uri = new URI(url);
String userName = clusterDetailsDao.findDetail(cluster.getId(), "username").getValue();
String password = clusterDetailsDao.findDetail(cluster.getId(), "password").getValue();
VmwareServerDiscoverer discover = new VmwareServerDiscoverer();
Map<? extends ServerResource, Map<String, String>> resources = discover.find(host.getDataCenterId(),
host.getPodId(), host.getClusterId(), uri, userName, password, null);
for (Map.Entry<? extends ServerResource, Map<String, String>> entry : resources.entrySet()) {
resource = entry.getKey();
}
if (resource == null) {
throw new CloudRuntimeException("can't find resource");
}
} catch (DiscoveryException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (URISyntaxException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
hostResourcesMap.put(hostId, resource);
HostEnvironment env = new HostEnvironment();
SetupCommand cmd = new SetupCommand(env);
cmd.setNeedSetup(true);
resource.executeRequest(cmd);
}
@Override
public synchronized Answer send(Long hostId, Command cmd) throws AgentUnavailableException,
OperationTimedoutException {
ServerResource resource = hostResourcesMap.get(hostId);
if (resource == null) {
loadResource(hostId);
resource = hostResourcesMap.get(hostId);
}
if (resource == null) {
return null;
}
Answer answer = resource.executeRequest(cmd);
return answer;
}
@Override
public Answer[] send(Long hostId, Commands cmds) throws AgentUnavailableException, OperationTimedoutException {
// TODO Auto-generated method stub
return null;
}
@Override
public Answer[] send(Long hostId, Commands cmds, int timeout) throws AgentUnavailableException,
OperationTimedoutException {
// TODO Auto-generated method stub
return null;
}
@Override
public long send(Long hostId, Commands cmds, Listener listener) throws AgentUnavailableException {
// TODO Auto-generated method stub
return 0;
}
@Override
public int registerForHostEvents(Listener listener, boolean connections, boolean commands, boolean priority) {
// TODO Auto-generated method stub
return 0;
}
@Override
public int registerForInitialConnects(StartupCommandProcessor creator, boolean priority) {
// TODO Auto-generated method stub
return 0;
}
@Override
public void unregisterForHostEvents(int id) {
// TODO Auto-generated method stub
}
@Override
public boolean executeUserRequest(long hostId, Event event) throws AgentUnavailableException {
// TODO Auto-generated method stub
return false;
}
@Override
public Answer sendTo(Long dcId, HypervisorType type, Command cmd) {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean tapLoadingAgents(Long hostId, TapAgentsAction action) {
// TODO Auto-generated method stub
return false;
}
@Override
public AgentAttache handleDirectConnectAgent(HostVO host, StartupCommand[] cmds, ServerResource resource,
boolean forRebalance) throws ConnectionException {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean agentStatusTransitTo(HostVO host, Event e, long msId) {
try {
return _statusStateMachine.transitTo(host, e, host.getId(), _hostDao);
} catch (NoTransitionException e1) {
e1.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
return true;
}
@Override
public AgentAttache findAttache(long hostId) {
// TODO Auto-generated method stub
return null;
}
@Override
public void disconnectWithoutInvestigation(long hostId, Event event) {
// TODO Auto-generated method stub
}
@Override
public void pullAgentToMaintenance(long hostId) {
// TODO Auto-generated method stub
}
@Override
public void pullAgentOutMaintenance(long hostId) {
// TODO Auto-generated method stub
}
@Override
public boolean reconnect(long hostId) {
// TODO Auto-generated method stub
return false;
}
@Override
public Answer sendToSSVM(Long dcId, Command cmd) {
// TODO Auto-generated method stub
return null;
}
@Override
public void disconnectWithInvestigation(long hostId, Event event) {
// TODO Auto-generated method stub
}
}
| apache-2.0 |
00wendi00/MyProject | W_eclipse2_1/网络编程/src/code2/Client_TCPTest.java | 1992 | /**
* ÎļþÃû£ºClient_TCPTest.java
* ÏîÄ¿-°ü£ºÍøÂç±à³Ì - code2
* ×÷ÕߣºÕÅÎĵÏ
* ´´½¨ÈÕÆÚ£º2014-6-15
*/
package code2;
/**
* @author ÕÅÎĵÏ
* ÀàÃû£ºClient_TCPTest
* Àà˵Ã÷£º
*/
//Server_TCPTest.java
import java.io.*;
import java.net.*;
public class Client_TCPTest
{
private Socket clientSk = null;// ·þÎñÆ÷Óë¿Í»§¶ËͨÐÅÓÃÌ×½Ó×Ö
private BufferedReader input = null; // ÊäÈëÁ÷£¬À´×Ô¿Í»§¶Ë
private PrintWriter output = null; // Êä³öÁ÷£¬Ö¸Ïò¿Í»§¶Ë
public Client_TCPTest(int port)
{
try
{
clientSk = new Socket("127.0.0.1", port); // 127.0.0.1 Ö¸±¾»ú
input = new BufferedReader(new InputStreamReader(
clientSk.getInputStream())); // »ñµÃÊäÈëÁ÷
output = new PrintWriter(new BufferedWriter(new OutputStreamWriter(
clientSk.getOutputStream())), true); // »ñµÃÊä³öÁ÷
if (input != null && output != null) // Á÷½¨Á¢³É¹¦
{
System.out.println("Óë·þÎñÆ÷Á¬½Ó³É¹¦£¡");
sendResponse("OK");
}
else
{
System.out.println("Óë·þÎñÆ÷Á¬½Óʧ°Ü£¡");
}
//sendResponse("From Server :Welcome......");
getRequest();
}
catch (IOException e)
{
System.out.println("´íÎó£º" + e.getMessage());
}
finally
{
try
{
output.close();
input.close();
clientSk.close();
}
catch (Exception e)
{
// TODO: handle exception
}
// svrSkt.close();
}
}
public String getRequest()
{// »ñÈ¡´Ó¿Í»§¶Ë·¢Ë͵ÄÐÅÏ¢
String frmClt = null;
try
{
frmClt = input.readLine();
System.out.println(frmClt);
}
catch (Exception e)
{
System.out.println("ÎÞ·¨¶ÁÈ¡¶Ë¿Ú.....");
System.exit(0);
}
return frmClt;
}
public void sendResponse(String message)
{// Ïò¿Í»§¶Ë·¢ËÍÐÅÏ¢
try
{
output.println(message);
output.flush();// °Ñµ±Ç°»º³åÇøÖеÄÊä³öÁ÷È«²¿Ë¢µ½Í¨ÐŹܵÀÖС£
}
catch (Exception e)
{
e.printStackTrace();
}
}
//public static void main(String[] args) throws IOException
//{
// Client_TCPTest sa = new Client_TCPTest(8000);
// sa.getRequest();
//}
}
| apache-2.0 |
yeastrc/msdapl | MS_LIBRARY/src/ed/mslib/MzInt.java | 348 | package ed.mslib;
public class MzInt {
public MzInt(double mz, float intensity){
this.mz = mz;
this.intensity = intensity;
}
private double mz;
private float intensity;
public double getmz(){return mz;}
public float getint(){return intensity;}
public void setmz(double m){mz = m;}
public void setint(float i){intensity = i;}
}
| apache-2.0 |
TNG/ArchUnit | archunit/src/test/java/com/tngtech/archunit/core/importer/resolvers/testclasses/firstdependency/FirstDependency.java | 147 | package com.tngtech.archunit.core.importer.resolvers.testclasses.firstdependency;
public class FirstDependency {
public void call() {
}
}
| apache-2.0 |
brandt/GridSphere | src/org/gridsphere/layout/event/PortletTitleBarEvent.java | 1943 | /*
* @author <a href="mailto:novotny@gridsphere.org">Jason Novotny</a>
* @version $Id$
*/
package org.gridsphere.layout.event;
import javax.portlet.PortletMode;
import javax.portlet.WindowState;
/**
* A <code>PortletTitleBarEvent</code> is created by a <code>PortletTitleBar</code>
* when a title bar event has been triggered.
*/
public interface PortletTitleBarEvent extends PortletComponentEvent {
/**
* Action is an immutable representing the window state and portlet mode
* of the portlet title bar.
*/
public static final class TitleBarAction implements ComponentAction {
public static final TitleBarAction WINDOW_MODIFY = new TitleBarAction(1);
public static final TitleBarAction MODE_MODIFY = new TitleBarAction(5);
private int action = 0;
/**
* Action cannot be instantiated outside of this class
*/
private TitleBarAction(int action) {
this.action = action;
}
public int getID() {
return action;
}
}
/**
* Returns the portlet title bar mode
*
* @return mode the portlet title bar mode
*/
public PortletMode getMode();
/**
* Returns the portlet title bar window state
*
* @return the portlet title bar window state
*/
public WindowState getState();
/**
* Returns true if this title bar event signals a window state change
*
* @return true if this title bar event signals a window state change
*/
public boolean hasWindowStateAction();
/**
* Returns true if this title bar event signals a portlet mode change
*
* @return true if this title bar event signals a portlet mode change
*/
public boolean hasPortletModeAction();
/**
* Returns the portlet title bar component id
*
* @return the portlet title bar component id
*/
public int getID();
}
| apache-2.0 |
majetideepak/arrow | java/adapter/avro/src/main/java/org/apache/arrow/AvroToArrow.java | 1863 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.arrow;
import java.io.IOException;
import org.apache.arrow.memory.BaseAllocator;
import org.apache.arrow.util.Preconditions;
import org.apache.arrow.vector.VectorSchemaRoot;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.io.Decoder;
/**
* Utility class to convert Avro objects to columnar Arrow format objects.
*/
public class AvroToArrow {
/**
* Fetch the data from {@link GenericDatumReader} and convert it to Arrow objects.
* @param schema avro schema.
* @param allocator Memory allocator to use.
* @return Arrow Data Objects {@link VectorSchemaRoot}
*/
public static VectorSchemaRoot avroToArrow(Schema schema, Decoder decoder, BaseAllocator allocator)
throws IOException {
Preconditions.checkNotNull(schema, "Avro schema object can not be null");
VectorSchemaRoot root = VectorSchemaRoot.create(
AvroToArrowUtils.avroToArrowSchema(schema), allocator);
AvroToArrowUtils.avroToArrowVectors(decoder, root);
return root;
}
}
| apache-2.0 |
lmjacksoniii/hazelcast | hazelcast/src/test/java/com/hazelcast/nio/tcp/TcpIpConnectionManager_ConnectMemberBaseTest.java | 3887 | package com.hazelcast.nio.tcp;
import com.hazelcast.nio.Connection;
import com.hazelcast.nio.ConnectionType;
import com.hazelcast.test.AssertTask;
import org.junit.Test;
import java.net.UnknownHostException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
/**
* A test that verifies if two members can connect to each other.
*/
public abstract class TcpIpConnectionManager_ConnectMemberBaseTest extends TcpIpConnection_AbstractTest {
@Test
public void testConnectionCount() {
connManagerA.start();
connManagerB.start();
connect(connManagerA, addressB);
assertEquals(1, connManagerA.getConnectionCount());
assertEquals(1, connManagerB.getConnectionCount());
}
// ================== getOrConnect ======================================================
@Test
public void getOrConnect_whenNotConnected_thenEventuallyConnectionAvailable() throws UnknownHostException {
startAllConnectionManagers();
Connection c = connManagerA.getOrConnect(addressB);
assertNull(c);
connect(connManagerA, addressB);
assertEquals(1, connManagerA.getActiveConnectionCount());
assertEquals(1, connManagerB.getActiveConnectionCount());
}
@Test
public void getOrConnect_whenAlreadyConnectedSameConnectionReturned() throws UnknownHostException {
startAllConnectionManagers();
Connection c1 = connect(connManagerA, addressB);
Connection c2 = connManagerA.getOrConnect(addressB);
assertSame(c1, c2);
}
// ================== destroy ======================================================
@Test
public void destroyConnection_whenActive() throws Exception {
startAllConnectionManagers();
final TcpIpConnection connAB = connect(connManagerA, addressB);
final TcpIpConnection connBA = connect(connManagerB, addressA);
connAB.close(null, null);
assertIsDestroyed(connAB);
assertTrueEventually(new AssertTask() {
@Override
public void run() throws Exception {
assertIsDestroyed(connBA);
}
});
}
@Test
public void destroyConnection_whenAlreadyDestroyed_thenCallIgnored() throws Exception {
startAllConnectionManagers();
connManagerA.getOrConnect(addressB);
TcpIpConnection c = connect(connManagerA, addressB);
// first destroy
c.close(null, null);
// second destroy
c.close(null, null);
assertIsDestroyed(c);
}
public void assertIsDestroyed(TcpIpConnection connection) {
TcpIpConnectionManager connectionManager = connection.getConnectionManager();
assertFalse(connection.isAlive());
assertNull(connectionManager.getConnection(connection.getEndPoint()));
}
// ================== connection ======================================================
@Test
public void connect() throws UnknownHostException {
startAllConnectionManagers();
TcpIpConnection connAB = connect(connManagerA, addressB);
assertTrue(connAB.isAlive());
assertEquals(ConnectionType.MEMBER, connAB.getType());
assertEquals(1, connManagerA.getActiveConnectionCount());
TcpIpConnection connBA = (TcpIpConnection) connManagerB.getConnection(addressA);
assertTrue(connBA.isAlive());
assertEquals(ConnectionType.MEMBER, connBA.getType());
assertEquals(1, connManagerB.getActiveConnectionCount());
assertEquals(connManagerA.getIoService().getThisAddress(), connBA.getEndPoint());
assertEquals(connManagerB.getIoService().getThisAddress(), connAB.getEndPoint());
}
}
| apache-2.0 |
prembasumatary/graphhopper | web/src/test/java/com/graphhopper/http/GraphHopperServletIT.java | 14772 | /*
* Licensed to GraphHopper GmbH under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper GmbH licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.http;
import com.fasterxml.jackson.databind.JsonNode;
import com.graphhopper.GHRequest;
import com.graphhopper.GHResponse;
import com.graphhopper.GraphHopperAPI;
import com.graphhopper.PathWrapper;
import com.graphhopper.util.CmdArgs;
import com.graphhopper.util.Helper;
import com.graphhopper.util.details.PathDetail;
import com.graphhopper.util.exceptions.PointOutOfBoundsException;
import com.graphhopper.util.shapes.GHPoint;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.*;
/**
* @author Peter Karich
*/
public class GraphHopperServletIT extends BaseServletTester {
private static final String DIR = "./target/andorra-gh/";
@AfterClass
public static void cleanUp() {
Helper.removeDir(new File(DIR));
shutdownJetty(true);
}
@Before
public void setUp() {
CmdArgs args = new CmdArgs().
put("config", "../config-example.properties").
put("prepare.min_network_size", "0").
put("prepare.min_one_way_network_size", "0").
put("datareader.file", "../core/files/andorra.osm.pbf").
put("graph.location", DIR);
setUpJetty(args);
}
@Test
public void testBasicQuery() throws Exception {
JsonNode json = query("point=42.554851,1.536198&point=42.510071,1.548128", 200);
JsonNode infoJson = json.get("info");
assertFalse(infoJson.has("errors"));
JsonNode path = json.get("paths").get(0);
double distance = path.get("distance").asDouble();
assertTrue("distance wasn't correct:" + distance, distance > 9000);
assertTrue("distance wasn't correct:" + distance, distance < 9500);
}
@Test
public void testQueryWithDirections() throws Exception {
// Note, in general specifying directions does not work with CH, but this is an example where it works
JsonNode json = query("point=42.496696,1.499323&point=42.497257,1.501501&heading=240&heading=240&ch.force_heading=true", 200);
JsonNode infoJson = json.get("info");
assertFalse(infoJson.has("errors"));
JsonNode path = json.get("paths").get(0);
double distance = path.get("distance").asDouble();
assertTrue("distance wasn't correct:" + distance, distance > 960);
assertTrue("distance wasn't correct:" + distance, distance < 970);
}
@Test
public void testQueryWithStraightVia() throws Exception {
// Note, in general specifying straightvia does not work with CH, but this is an example where it works
JsonNode json = query(
"point=42.534133,1.581473&point=42.534781,1.582149&point=42.535042,1.582514&pass_through=true", 200);
JsonNode infoJson = json.get("info");
assertFalse(infoJson.has("errors"));
JsonNode path = json.get("paths").get(0);
double distance = path.get("distance").asDouble();
assertTrue("distance wasn't correct:" + distance, distance > 320);
assertTrue("distance wasn't correct:" + distance, distance < 325);
}
@Test
public void testJsonRounding() throws Exception {
JsonNode json = query("point=42.554851234,1.536198&point=42.510071,1.548128&points_encoded=false", 200);
JsonNode cson = json.get("paths").get(0).get("points");
assertTrue("unexpected precision!", cson.toString().contains("[1.536374,42.554839]"));
}
@Test
public void testFailIfElevationRequestedButNotIncluded() throws Exception {
JsonNode json = query("point=42.554851234,1.536198&point=42.510071,1.548128&points_encoded=false&elevation=true", 400);
assertTrue(json.has("message"));
assertEquals("Elevation not supported!", json.get("message").asText());
assertEquals("Elevation not supported!", json.get("hints").get(0).get("message").asText());
}
@Test
public void testGraphHopperWeb() throws Exception {
GraphHopperAPI hopper = new com.graphhopper.api.GraphHopperWeb();
assertTrue(hopper.load(getTestRouteAPIUrl()));
GHResponse rsp = hopper.route(new GHRequest(42.554851, 1.536198, 42.510071, 1.548128));
assertFalse(rsp.getErrors().toString(), rsp.hasErrors());
assertTrue(rsp.getErrors().toString(), rsp.getErrors().isEmpty());
PathWrapper arsp = rsp.getBest();
assertTrue("distance wasn't correct:" + arsp.getDistance(), arsp.getDistance() > 9000);
assertTrue("distance wasn't correct:" + arsp.getDistance(), arsp.getDistance() < 9500);
rsp = hopper.route(new GHRequest().
addPoint(new GHPoint(42.554851, 1.536198)).
addPoint(new GHPoint(42.531896, 1.553278)).
addPoint(new GHPoint(42.510071, 1.548128)));
assertTrue(rsp.getErrors().toString(), rsp.getErrors().isEmpty());
arsp = rsp.getBest();
assertTrue("distance wasn't correct:" + arsp.getDistance(), arsp.getDistance() > 20000);
assertTrue("distance wasn't correct:" + arsp.getDistance(), arsp.getDistance() < 21000);
List<Map<String, Object>> instructions = arsp.getInstructions().createJson();
assertEquals(26, instructions.size());
assertEquals("Continue onto la Callisa", instructions.get(0).get("text"));
assertEquals("At roundabout, take exit 2", instructions.get(4).get("text"));
assertEquals(true, instructions.get(4).get("exited"));
assertEquals(false, instructions.get(24).get("exited"));
}
@Test
public void testPathDetails() throws Exception {
GraphHopperAPI hopper = new com.graphhopper.api.GraphHopperWeb();
assertTrue(hopper.load(getTestRouteAPIUrl()));
GHRequest request = new GHRequest(42.554851, 1.536198, 42.510071, 1.548128);
request.setPathDetails(Arrays.asList("average_speed", "edge_id", "time"));
GHResponse rsp = hopper.route(request);
assertFalse(rsp.getErrors().toString(), rsp.hasErrors());
assertTrue(rsp.getErrors().toString(), rsp.getErrors().isEmpty());
Map<String, List<PathDetail>> pathDetails = rsp.getBest().getPathDetails();
assertFalse(pathDetails.isEmpty());
assertTrue(pathDetails.containsKey("average_speed"));
assertTrue(pathDetails.containsKey("edge_id"));
assertTrue(pathDetails.containsKey("time"));
List<PathDetail> averageSpeedList = pathDetails.get("average_speed");
assertEquals(9, averageSpeedList.size());
assertEquals(30.0, averageSpeedList.get(0).getValue());
assertEquals(14, averageSpeedList.get(0).getLength());
assertEquals(60.0, averageSpeedList.get(1).getValue());
assertEquals(5, averageSpeedList.get(1).getLength());
List<PathDetail> edgeIdDetails = pathDetails.get("edge_id");
assertEquals(77, edgeIdDetails.size());
assertEquals(3759L, edgeIdDetails.get(0).getValue());
assertEquals(2, edgeIdDetails.get(0).getLength());
assertEquals(881L, edgeIdDetails.get(1).getValue());
assertEquals(8, edgeIdDetails.get(1).getLength());
long expectedTime = rsp.getBest().getTime();
long actualTime = 0;
List<PathDetail> timeDetails = pathDetails.get("time");
for (PathDetail pd: timeDetails) {
actualTime += (Long) pd.getValue();
}
assertEquals(expectedTime, actualTime);
}
@Test
public void testPathDetailsSamePoint() throws Exception {
GraphHopperAPI hopper = new com.graphhopper.api.GraphHopperWeb();
assertTrue(hopper.load(getTestRouteAPIUrl()));
GHRequest request = new GHRequest(42.554851, 1.536198, 42.554851, 1.536198);
request.setPathDetails(Arrays.asList("average_speed", "edge_id", "time"));
GHResponse rsp = hopper.route(request);
assertFalse(rsp.getErrors().toString(), rsp.hasErrors());
assertTrue(rsp.getErrors().toString(), rsp.getErrors().isEmpty());
}
@Test
public void testPathDetailsNoConnection() throws Exception {
GraphHopperAPI hopper = new com.graphhopper.api.GraphHopperWeb();
assertTrue(hopper.load(getTestRouteAPIUrl()));
GHRequest request = new GHRequest(42.542078, 1.45586, 42.537841, 1.439981);
request.setPathDetails(Arrays.asList("average_speed"));
GHResponse rsp = hopper.route(request);
assertTrue(rsp.getErrors().toString(), rsp.hasErrors());
}
@Test
public void testPathDetailsWithoutGraphHopperWeb() throws Exception {
JsonNode json = query("point=42.554851,1.536198&point=42.510071,1.548128&details=average_speed", 200);
JsonNode infoJson = json.get("info");
assertFalse(infoJson.has("errors"));
JsonNode path = json.get("paths").get(0);
assertTrue(path.has("details"));
JsonNode details = path.get("details");
assertTrue(details.has("average_speed"));
JsonNode averageSpeed = details.get("average_speed");
assertEquals(30.0, averageSpeed.get(0).get(2).asDouble(), .01);
assertEquals(14, averageSpeed.get(0).get(1).asInt());
assertEquals(60.0, averageSpeed.get(1).get(2).asDouble(), .01);
assertEquals(19, averageSpeed.get(1).get(1).asInt());
}
@Test
public void testInitInstructionsWithTurnDescription() {
GraphHopperAPI hopper = new com.graphhopper.api.GraphHopperWeb();
assertTrue(hopper.load(getTestRouteAPIUrl()));
GHRequest request = new GHRequest(42.554851, 1.536198, 42.510071, 1.548128);
GHResponse rsp = hopper.route(request);
assertEquals("Continue onto Carrer Antoni Fiter i Rossell", rsp.getBest().getInstructions().get(3).getName());
request.getHints().put("turn_description", false);
rsp = hopper.route(request);
assertEquals("Carrer Antoni Fiter i Rossell", rsp.getBest().getInstructions().get(3).getName());
}
@Test
public void testGraphHopperWebRealExceptions() {
GraphHopperAPI hopper = new com.graphhopper.api.GraphHopperWeb();
assertTrue(hopper.load(getTestRouteAPIUrl()));
// IllegalArgumentException (Wrong Request)
GHResponse rsp = hopper.route(new GHRequest());
assertFalse("Errors expected but not found.", rsp.getErrors().isEmpty());
Throwable ex = rsp.getErrors().get(0);
assertTrue("Wrong exception found: " + ex.getClass().getName()
+ ", IllegalArgumentException expected.", ex instanceof IllegalArgumentException);
// IllegalArgumentException (Wrong Points)
rsp = hopper.route(new GHRequest(0.0, 0.0, 0.0, 0.0));
assertFalse("Errors expected but not found.", rsp.getErrors().isEmpty());
List<Throwable> errs = rsp.getErrors();
for (int i = 0; i < errs.size(); i++) {
assertEquals(((PointOutOfBoundsException) errs.get(i)).getPointIndex(), i);
}
// IllegalArgumentException (Vehicle not supported)
rsp = hopper.route(new GHRequest(42.554851, 1.536198, 42.510071, 1.548128).setVehicle("SPACE-SHUTTLE"));
assertFalse("Errors expected but not found.", rsp.getErrors().isEmpty());
ex = rsp.getErrors().get(0);
assertTrue("Wrong exception found: " + ex.getClass().getName()
+ ", IllegalArgumentException expected.", ex instanceof IllegalArgumentException);
}
@Test
public void testGPX() throws Exception {
String str = queryString("point=42.554851,1.536198&point=42.510071,1.548128&type=gpx", 200);
// For backward compatibility we currently export route and track.
assertTrue(str.contains("<gh:distance>1841.8</gh:distance>"));
assertFalse(str.contains("<wpt lat=\"42.51003\" lon=\"1.548188\"> <name>Finish!</name></wpt>"));
assertTrue(str.contains("<trkpt lat=\"42.554839\" lon=\"1.536374\"><time>"));
}
@Test
public void testGPXWithExcludedRouteSelection() throws Exception {
String str = queryString("point=42.554851,1.536198&point=42.510071,1.548128&type=gpx&gpx.route=false&gpx.waypoints=false", 200);
assertFalse(str.contains("<gh:distance>115.1</gh:distance>"));
assertFalse(str.contains("<wpt lat=\"42.51003\" lon=\"1.548188\"> <name>Finish!</name></wpt>"));
assertTrue(str.contains("<trkpt lat=\"42.554839\" lon=\"1.536374\"><time>"));
}
@Test
public void testGPXWithTrackAndWaypointsSelection() throws Exception {
String str = queryString("point=42.554851,1.536198&point=42.510071,1.548128&type=gpx&gpx.track=true&gpx.route=false&gpx.waypoints=true", 200);
assertFalse(str.contains("<gh:distance>115.1</gh:distance>"));
assertTrue(str.contains("<wpt lat=\"42.51003\" lon=\"1.548188\"> <name>arrive at destination</name></wpt>"));
assertTrue(str.contains("<trkpt lat=\"42.554839\" lon=\"1.536374\"><time>"));
}
@Test
public void testGPXWithError() throws Exception {
String str = queryString("point=42.554851,1.536198&type=gpx", 400);
assertFalse(str, str.contains("<html>"));
assertFalse(str, str.contains("{"));
assertTrue("Expected error but was: " + str, str.contains("<message>At least 2 points have to be specified, but was:1</message>"));
assertTrue("Expected error but was: " + str, str.contains("<hints><error details=\"java"));
}
@Test
public void testUndefinedPointHeading() throws Exception {
JsonNode json = query("point=undefined&heading=0", 400);
assertEquals("You have to pass at least one point", json.get("message").asText());
json = query("point=42.554851,1.536198&point=undefined&heading=0&heading=0", 400);
assertEquals("The number of 'heading' parameters must be <= 1 or equal to the number of points (1)", json.get("message").asText());
}
}
| apache-2.0 |
punkhorn/camel-upstream | components/camel-csv/src/test/java/org/apache/camel/dataformat/csv/CsvMarshalHeaderWithCustomMarshallFactoryTest.java | 5713 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.dataformat.csv;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.camel.Exchange;
import org.apache.camel.Produce;
import org.apache.camel.ProducerTemplate;
import org.apache.camel.RoutesBuilder;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.support.ObjectHelper;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
/**
* <b>Camel</b> based test cases for {@link CsvDataFormat}.
*/
public class CsvMarshalHeaderWithCustomMarshallFactoryTest extends CamelTestSupport {
@Rule
public TemporaryFolder folder = new TemporaryFolder();
@Produce(uri = "direct:start")
private ProducerTemplate producerTemplate;
private File outputFile;
@Override
protected void doPreSetup() throws Exception {
outputFile = new File(folder.newFolder(), "output.csv");
}
@Test
public void testSendBody() throws IOException {
Map<String, String> body = new LinkedHashMap<>();
body.put("first_name", "John");
body.put("last_name", "Doe");
String fileName = outputFile.getName();
assertEquals("output.csv", fileName);
producerTemplate.sendBodyAndHeader(body, Exchange.FILE_NAME, fileName);
body = new LinkedHashMap<>();
body.put("first_name", "Max");
body.put("last_name", "Mustermann");
producerTemplate.sendBodyAndHeader(body, Exchange.FILE_NAME, fileName);
List<String> lines = Files.lines(Paths.get(outputFile.toURI()))
.filter(l -> l.trim().length() > 0).collect(Collectors.toList());
assertEquals(3, lines.size());
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
String uri = String.format("file:%s?charset=utf-8&fileExist=Append", outputFile.getParentFile().getAbsolutePath());
from("direct:start").marshal(createCsvDataFormat()).to(uri);
}
};
}
private static CsvDataFormat createCsvDataFormat() {
CsvDataFormat dataFormat = new CsvDataFormat();
dataFormat.setDelimiter('\t');
dataFormat.setTrim(true);
dataFormat.setIgnoreSurroundingSpaces(true);
dataFormat.setHeader((String[]) Arrays.asList("first_name", "last_name").toArray());
dataFormat.setMarshallerFactory(new CsvMarshallerFactory() {
@Override
public CsvMarshaller create(CSVFormat format, CsvDataFormat dataFormat) {
return new SinglePrinterCsvMarshaller(format);
}
});
return dataFormat;
}
//
// Helper classes
//
private static final class SinglePrinterCsvMarshaller extends CsvMarshaller {
private final CSVPrinter printer;
private SinglePrinterCsvMarshaller(CSVFormat format) {
super(format);
printer = createPrinter(format);
}
private static CSVPrinter createPrinter(CSVFormat format) {
try {
// Headers and header comments are written out in the constructor already.
return format.print(new StringBuilder());
} catch (IOException e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
@SuppressWarnings("unchecked")
public void marshal(Exchange exchange, Object object, OutputStream outputStream) throws IOException {
Iterator<Map<String, String>> it = (Iterator<Map<String, String>>) ObjectHelper.createIterator(object);
synchronized (printer) {
while (it.hasNext()) {
printer.printRecord(getMapRecordValues(it.next()));
}
// Access the 'Appendable'
StringBuilder stringBuilder = (StringBuilder) printer.getOut();
outputStream.write(stringBuilder.toString().getBytes());
// Reset the 'Appendable' for the next exchange.
stringBuilder.setLength(0);
}
}
@Override
protected Iterable<?> getMapRecordValues(Map<?, ?> map) {
List<String> result = new ArrayList<>(map.size());
for (Object key : map.keySet()) {
result.add((String) map.get(key));
}
return result;
}
}
} | apache-2.0 |
redlink-gmbh/redlink-java-sdk | src/main/java/io/redlink/sdk/RedLink.java | 16549 | /**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.redlink.sdk;
import io.redlink.sdk.impl.analysis.AnalysisRequest;
import io.redlink.sdk.impl.analysis.model.Enhancements;
import io.redlink.sdk.impl.data.model.LDPathResult;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.io.Serializable;
import org.apache.marmotta.client.model.sparql.SPARQLResult;
import org.openrdf.model.Model;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFHandlerException;
/**
* RedLink Client API
*
* @author sergio.fernandez@redlink.co
*/
public interface RedLink extends Serializable {
String URI = "uri";
String IN = "in";
String OUT = "out";
/**
* RedLink Analysis API. Any implementation of this interface must have a constructor that receives a {@link Credentials}
* object which will be used for each service request
*/
interface Analysis extends Serializable {
String PATH = "analysis";
String ENHANCE = "enhance";
String CONFIDENCE = "confidence";
String SUMMARY = "summary";
String THUMBNAIL = "thumbnail";
String DEREF_FIELDS = "enhancer.engines.dereference.fields";
String LDPATH = "enhancer.engines.dereference.ldpath";
/**
* Performs an analysis of the content included in the request, getting a {@link Enhancements} object as result
* The analysis result will depend on the configured in the configured application within the used {@link Credentials}
*
* @param request {@link AnalysisRequest} containing the request parameters and the content to be enhanced
* @return Simplified RedLink Enhancement Structure
*/
Enhancements enhance(AnalysisRequest request);
/**
* Performs an analysis of the content included in the request getting as response an instance of the {@link Class}
* passed by parameter. Current implementation supports <ul>
* <li>{@link String}
* <li> {@link InputStream} - make sure to {@link InputStream#close() close} the
* stream after reading the contents from the response
* <li> {@link Enhancements}
* </ul>
*
* If {@link Enhancements} is passed, the request will assume RDF+XML as response format, and will parse the response
* to create the {@link Enhancements} object. If {@link String} is passed as response type, the method will return
* the RAW response in the format specified at the {@link AnalysisRequest} request parameter
*
* @param request {@link AnalysisRequest} containing the request parameters and the content to be enhanced
* @param responseType {@link Class} of the response. Only {@link Enhancements} and {@link String} are supported
* @return An instance of the class passed by parameter wrapping the Analysis Service response
*/
<T> T enhance(AnalysisRequest request, Class<T> responseType);
}
/**
* RedLink LinkedData API. Any implementation of this interface must have a constructor that receives a {@link Credentials}
* object which will be used for each service request
*/
interface Data extends Serializable {
String PATH = "data";
String RESOURCE = "resource";
String SPARQL = "sparql";
String SELECT = "select";
String UPDATE = "update";
String LDPATH = "ldpath";
String RELEASE = "release";
String SPARQL_QUERY_MIME_TYPE = "application/sparql-query";
/**
* Import an RDF {@link Model} into the selected Dataset. The Dataset must exist at the user RedLink account and
* must be configured for the user's RedLink application used in the request.
*
* @param data RDF {@link Model} to be imported
* @param dataset Name of the dataset where the data will be imported
* @return Flag indicating if the importation has been performed successfully
* @throws RDFHandlerException
*/
boolean importDataset(Model data, String dataset) throws RDFHandlerException;
/**
* Import an RDF {@link Model} into the selected Dataset. The Dataset must exist at the user RedLink account and
* must be configured for the user's RedLink application used in the request. If cleanBefore value is true, current dataset at
* user's RedLink application will be cleaned first
*
* @param data RDF {@link Model} to be imported
* @param dataset Name of the dataset where the data will be imported
* @param cleanBefore Flag indicating if the dataset must be cleaned before
* @return Flag indicating if the importation has been performed successfully
* @throws RDFHandlerException
*/
boolean importDataset(Model data, String dataset, boolean cleanBefore) throws RDFHandlerException;
/**
* Import the Model contained in the passed {@link File} into the selected Dataset. The Dataset must exist at the user RedLink account and
* must be configured for the user's RedLink application used in the request
*
* @param file {@link File} containing the RDF Model to be imported
* @param dataset Name of the dataset where the data will be imported
* @return Flag indicating if the importation has been performed successfully
* @throws FileNotFoundException
*/
boolean importDataset(File file, String dataset) throws FileNotFoundException;
/**
* Import the Model contained in the passed {@link File} into the selected Dataset. The Dataset must exist at the user RedLink account and
* must be configured for the user's RedLink application used in the request. If cleanBefore value is true, current dataset at
* user's RedLink application will be cleaned first
*
* @param file {@link File} containing the RDF Model to be imported
* @param dataset Name of the dataset where the data will be imported
* @param cleanBefore Flag indicating if the dataset must be cleaned before
* @return Flag indicating if the importation has been performed successfully
* @throws FileNotFoundException
*/
boolean importDataset(File file, String dataset, boolean cleanBefore) throws FileNotFoundException;
/**
* Import the Model contained in the passed {@link InputStream} into the selected Dataset. The Dataset must exist at the user RedLink account and
* must be configured for the user's RedLink application used in the request
*
* @param in {@link InputStream} containing the RDF Model to be imported
* @param format {@link RDFFormat} indicating the format of the model contained in the InputStream
* @param Dataset Name of the dataset where the data will be imported
* @return Flag indicating if the importation has been performed successfully
*/
boolean importDataset(InputStream in, RDFFormat format, String Dataset);
/**
* Import the Model contained in the passed {@link InputStream} into the selected Dataset. The Dataset must exist at the user RedLink account and
* must be configured for the user's RedLink application used in the request. If cleanBefore value is true, current dataset at
* user's RedLink application will be cleaned first
*
* @param in {@link InputStream} containing the RDF Model to be imported
* @param format {@link RDFFormat} indicating the format of the model contained in the InputStream
* @param Dataset Name of the dataset where the data will be imported
* @param cleanBefore Flag indicating if the dataset must be cleaned before
* @return Flag indicating if the importation has been performed successfully
*/
boolean importDataset(InputStream in, RDFFormat format, String Dataset, boolean cleanBefore);
/**
* Export the user dataset at his RedLink application to a local RDF {@link Model}. The Dataset must exist at the user RedLink account and
* must be configured for the user's RedLink application used in the request
*
* @param dataset Name of the dataset at user's RedLink application to be exported
* @return RDF {@link Model} representing the dataset
*/
Model exportDataset(String dataset);
/**
* Clean (delete all the data) user's dataset. The Dataset must exist at the user RedLink account and
* must be configured for the user's RedLink application used in the request
*
* @param dataset Name of the dataset at user's RedLink application to be cleaned
* @return Flag indicating if the dataset has been cleaned correctly
*/
boolean cleanDataset(String dataset);
/**
* Get resource data by its URI as RDF {@link Model}
*
* @param resource URI (identifier) of the resource
* @return {@link Model} representing the resource and all its properties or null if the resource is not found
*/
Model getResource(String resource);
/**
* Get resource data by its URI as RDF {@link Model} from the user dataset passed by parameter. The Dataset must exist at the user RedLink account and
* must be configured for the user's RedLink application used in the request
*
* @param resource URI (identifier) of the resource
* @param dataset Name of the dataset at user's RedLink application where the resource will be lookup
* @return
*/
Model getResource(String resource, String dataset);
/**
* Import a resource represented by an RDF {@link Model} into the selected Dataset. The Dataset must exist at the user RedLink account and
* must be configured for the user's RedLink application used in the request.
*
* @param resource URI (identifier) of the resource
* @param data Resource data as RDF {@link Model}
* @param dataset Name of the dataset at user's RedLink application where the resource will be imported
* @return Flag indicating if the importation has been performed successfully
*/
boolean importResource(String resource, Model data, String dataset);
/**
* Import a resource represented by an RDF {@link Model} into the selected Dataset. The Dataset must exist at the user RedLink account and
* must be configured for the user's RedLink application used in the request. If cleanBefore value is true, current resource at
* user's dataset will be deleted first
*
* @param resource URI (identifier) of the resource
* @param data Resource data as RDF {@link Model}
* @param dataset Name of the dataset at user's RedLink application where the resource will be imported
* @param cleanBefore Flag indicating if the resource must be deleted before
* @return Flag indicating if the importation has been performed successfully
*/
boolean importResource(String resource, Model data, String dataset, boolean cleanBefore);
/**
* Delete a Resource identified by its URI in the user dataset passed by parameter. The Dataset must exist at the user RedLink account and
* must be configured for the user's RedLink application used in the request
*
* @param resource URI (identifier) of the resource
* @param dataset Name of the dataset at user's RedLink application where the resource will be deleted
* @return Flag indicating if the deletion has been performed successfully
*/
boolean deleteResource(String resource, String dataset);
/**
* Execute a SPARQL tuple query using the dataset passed by paramater as context. The Dataset must exist at the user RedLink account and
* must be configured for the user's RedLink application used in the request
*
* @param query SPARQL tuple query to be executed
* @param dataset Name of the dataset at user's RedLink application where the query will be executed
* @return Result of the query as {@link SPARQLResult} object
*/
SPARQLResult sparqlTupleQuery(String query, String dataset);
/**
* use sparqlTupleQuery() instead
*
*/
@Deprecated
SPARQLResult sparqlSelect(String query, String dataset);
/**
* Execute a SPARQL tuple query using as context all the configured datasets at user's RedLink application
*
* @param query SPARQL tuple query to be executed
* @return Result of the query as {@link SPARQLResult} object
*/
SPARQLResult sparqlTupleQuery(String query);
/**
* use sparqlTupleQuery() instead
*
*/
@Deprecated
SPARQLResult sparqlSelect(String query);
/**
* Execute a SPARQL graph query using the dataset passed by paramater as context. The Dataset must exist at the user RedLink account and
* must be configured for the user's RedLink application used in the request
*
* @param query SPARQL graph query to be executed
* @param dataset ame of the dataset at user's RedLink application where the query will be executed
* @return Result of the query as {@link org.openrdf.model.Model} object
*/
Model sparqlGraphQuery(String query, String dataset);
/**
* Execute a SPARQL graph query using as context all the configured datasets at user's RedLink application
*
* @param query SPARQL graph query to be executed
* @return Result of the query as {@link org.openrdf.model.Model} object
*/
Model sparqlGraphQuery(String query);
/**
* Update dataset's resources using an SPARQL update query. The Dataset must exist at the user RedLink account and
* must be configured for the user's RedLink application used in the request
*
* @param query SPARQL query to be executed
* @param dataset Name of the dataset at user's RedLink application where the query will be executed
* @return Flag indicating whether the update has performed successfully
*/
boolean sparqlUpdate(String query, String dataset);
/**
* Execute a LDPath program using the dataset passed by paramater as context. The Dataset must exist at the user RedLink account and
* must be configured for the user's RedLink application used in the request
*
* @param uri
* @param dataset Name of the dataset at user's RedLink application where the query will be executed
* @param program LDPath program to be executed
* @return Result of the program execution as {@link LDPathResult} object
*/
LDPathResult ldpath(String uri, String dataset, String program);
/**
* Execute a LDPath program using as context all the configured datasets at user's RedLink application
*
* @param uri
* @param program LDPath program to be executed
* @return Result of the program execution as {@link LDPathResult} object
*/
LDPathResult ldpath(String uri, String program);
/**
* Releases the data currently store in the dataset to be used
* later on for analysis purposes.
*
* @param dataset Name of the dataset to release
* @return Result o the operation
*/
boolean release(String dataset);
}
}
| apache-2.0 |
youngor/openclouddb | MyCat-Balance/src/main/java/com/talent/nio/handler/error/client/DefaultErrorPackageHandler.java | 2667 | /*
* Copyright (c) 2013, OpenCloudDB/MyCAT and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software;Designed and Developed mainly by many Chinese
* opensource volunteers. you can redistribute it and/or modify it under the
* terms of the GNU General Public License version 2 only, as published by the
* Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Any questions about this component can be directed to it's project Web address
* https://code.google.com/p/opencloudb/.
*
*/
package com.talent.nio.handler.error.client;
import java.nio.channels.SocketChannel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.talent.nio.api.Nio;
import com.talent.nio.communicate.ChannelContext;
import com.talent.nio.communicate.util.NioUtils;
import com.talent.nio.handler.error.intf.ErrorPackageHandlerIntf;
/**
*
* @author 谭耀武
* @date 2012-08-09
*
*/
public class DefaultErrorPackageHandler implements ErrorPackageHandlerIntf
{
private static final int MAX_COUNT = 1;
private static final Logger log = LoggerFactory.getLogger(DefaultErrorPackageHandler.class);
private static DefaultErrorPackageHandler instance = new DefaultErrorPackageHandler();
public static DefaultErrorPackageHandler getInstance()
{
return instance;
}
private DefaultErrorPackageHandler()
{
}
@Override
public int handle(SocketChannel socketChannel, ChannelContext channelContext, String errorReason)
{
channelContext.getStatVo().getCountOfErrorPackage().incrementAndGet();
log.error("[" + "] received error package, reason is " + errorReason);
if (channelContext.getStatVo().getCountOfErrorPackage().get() >= MAX_COUNT)
{
channelContext.getStatVo().getCountOfErrorPackage().set(0);
Nio.getInstance().disconnect(channelContext, "received an error package");
}
return (int) channelContext.getStatVo().getCountOfErrorPackage().get();
}
} | apache-2.0 |
bwolf/quotes | src/main/java/de/spqrinfo/quotes/backend/security/service/PasswordResetException.java | 1361 | package de.spqrinfo.quotes.backend.security.service;
import de.spqrinfo.quotes.backend.security.beans.PasswordResetAttempt;
import de.spqrinfo.quotes.backend.security.util.LocalizableException;
public class PasswordResetException extends LocalizableException {
private final PasswordResetAttempt passwordResetAttempt;
public PasswordResetException(final String messageKey, final PasswordResetAttempt passwordResetAttempt) {
super(messageKey);
this.passwordResetAttempt = passwordResetAttempt;
}
public PasswordResetException(final String message, final String messageKey, final PasswordResetAttempt passwordResetAttempt) {
super(message, messageKey);
this.passwordResetAttempt = passwordResetAttempt;
}
public PasswordResetException(final String message, final Throwable cause, final String messageKey, final PasswordResetAttempt passwordResetAttempt) {
super(message, cause, messageKey);
this.passwordResetAttempt = passwordResetAttempt;
}
public PasswordResetException(final Throwable cause, final String messageKey, final PasswordResetAttempt passwordResetAttempt) {
super(cause, messageKey);
this.passwordResetAttempt = passwordResetAttempt;
}
public PasswordResetAttempt getPasswordResetAttempt() {
return passwordResetAttempt;
}
}
| apache-2.0 |
pustike/pustike-eventbus | src/main/java/io/github/pustike/eventbus/SubscriberRegistry.java | 8950 | /*
* Copyright (C) 2016-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.pustike.eventbus;
import java.lang.ref.WeakReference;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArraySet;
/**
* Registry of subscribers to a single event bus.
* @author Colin Decker
*/
final class SubscriberRegistry {
/**
* All registered subscribers, indexed by event type.
*
* <p>The {@link CopyOnWriteArraySet} values make it easy and relatively lightweight to get an immutable snapshot
* of all current subscribers to an event without any locking.
*/
private final ConcurrentMap<Integer, CopyOnWriteArraySet<Subscriber>> subscribers;
/**
* The event bus this registry belongs to.
*/
private final EventBus bus;
/**
* The cache for subscriberMethods and eventTypeHierarchy.
*/
private final SubscriberLoader subscriberLoader;
SubscriberRegistry(EventBus bus) {
this(bus, null);
}
SubscriberRegistry(EventBus bus, SubscriberLoader subscriberLoader) {
this.bus = Objects.requireNonNull(bus);
this.subscribers = new ConcurrentHashMap<>();
this.subscriberLoader = subscriberLoader == null ? new DefaultSubscriberLoader() : subscriberLoader;
}
/**
* Registers all subscriber methods on the given listener object.
*/
void register(Object listener) {
Map<Integer, List<Subscriber>> listenerMethods = findAllSubscribers(listener);
for (Map.Entry<Integer, List<Subscriber>> entry : listenerMethods.entrySet()) {
int hashCode = entry.getKey();
Collection<Subscriber> eventMethodsInListener = entry.getValue();
CopyOnWriteArraySet<Subscriber> eventSubscribers = subscribers.get(hashCode);
if (eventSubscribers == null) {
CopyOnWriteArraySet<Subscriber> newSet = new CopyOnWriteArraySet<>();
eventSubscribers = firstNonNull(subscribers.putIfAbsent(hashCode, newSet), newSet);
}
eventSubscribers.addAll(eventMethodsInListener);
}
}
/**
* Unregisters all subscribers on the given listener object.
*/
void unregister(Object listener) {
if (listener instanceof Subscriber) {
Subscriber subscriber = (Subscriber) listener;
CopyOnWriteArraySet<Subscriber> currentSubscribers = subscribers.get(subscriber.registryKey);
if (currentSubscribers != null) {
currentSubscribers.remove(subscriber);
}
} else {
Map<Integer, List<Subscriber>> listenerMethods = findAllSubscribers(listener);
for (Map.Entry<Integer, List<Subscriber>> entry : listenerMethods.entrySet()) {
int hashCode = entry.getKey();
Collection<Subscriber> listenerMethodsForType = entry.getValue();
CopyOnWriteArraySet<Subscriber> currentSubscribers = subscribers.get(hashCode);
if (currentSubscribers != null) {
currentSubscribers.removeAll(listenerMethodsForType);
}
// don't try to remove the set if it's empty; that can't be done safely without a lock
// anyway, if the set is empty it'll just be wrapping an array of length 0
}
}
}
/**
* Gets an iterator representing an immutable snapshot of all subscribers to the given event at the time this method
* is called.
*/
Iterator<Subscriber> getSubscribers(Object event) {
if (event instanceof TypeSupplier) {
Class<?> eventSourceType = ((TypeSupplier) event).getType();
int hashCode = Objects.hash(event.getClass().getName(), eventSourceType.getName());
CopyOnWriteArraySet<Subscriber> eventSubscribers = subscribers.get(hashCode);
return eventSubscribers != null ? eventSubscribers.iterator() : Collections.emptyIterator();
} else {
Set<Class<?>> eventTypes = subscriberLoader.flattenHierarchy(event.getClass());
LinkedList<Iterator<Subscriber>> subscriberIterators = new LinkedList<>();
for (Class<?> eventType : eventTypes) {
int hashCode = eventType.getName().hashCode();
CopyOnWriteArraySet<Subscriber> eventSubscribers = subscribers.get(hashCode);
if (eventSubscribers != null) {// eager no-copy snapshot
subscriberIterators.add(eventSubscribers.iterator());
}
}
return new IteratorAggregator<>(subscriberIterators);
}
}
/**
* Returns all subscribers for the given listener grouped by the type of event they subscribe to.
*/
private Map<Integer, List<Subscriber>> findAllSubscribers(Object listener) {
Map<Integer, List<Subscriber>> methodsInListener = new HashMap<>();
WeakReference<?> weakListener = new WeakReference<>(listener);
Class<?> clazz = listener.getClass();
for (Method method : subscriberLoader.findSubscriberMethods(clazz)) {
int hashCode = computeParameterHashCode(method);
List<Subscriber> subscriberList = methodsInListener.computeIfAbsent(hashCode, k -> new ArrayList<>());
subscriberList.add(Subscriber.create(bus, weakListener, method, hashCode));
}
return methodsInListener;
}
private int computeParameterHashCode(Method method) {
Class<?> parameterClass = method.getParameterTypes()[0];
Type parameterType = method.getGenericParameterTypes()[0];
if (parameterClass.equals(TypedEvent.class) && parameterType instanceof ParameterizedType) {
ParameterizedType firstParam = (ParameterizedType) parameterType;
Type[] typeArguments = firstParam.getActualTypeArguments();
return Objects.hash(firstParam.getRawType().getTypeName(), typeArguments[0].getTypeName());
}
return parameterClass.getName().hashCode();
}
private static <T> T firstNonNull(T first, T second) {
return first != null ? first : Objects.requireNonNull(second);
}
/**
* Clear all subscribers from the cache.
*/
void clear() {
subscribers.clear();
subscriberLoader.invalidateAll();
}
Set<Subscriber> getSubscribersForTesting(Class<?> eventType) {
int hashCode = eventType.getName().hashCode();
/*if (event instanceof TypeSupplier) {
Class<?> eventSourceType = ((TypeSupplier) event).getType();
hashCode = (31 + hashCode) * 31 + eventSourceType.getName().hashCode();
}*/
CopyOnWriteArraySet<Subscriber> eventSubscribers = subscribers.get(hashCode);
return eventSubscribers != null ? eventSubscribers : Set.of();
}
private static final class IteratorAggregator<E> implements Iterator<E> {
private final LinkedList<Iterator<E>> internalIterators;
private Iterator<E> currentIterator = null;
private IteratorAggregator(List<Iterator<E>> iterators) {
internalIterators = new LinkedList<>(iterators);
}
@Override
public boolean hasNext() {
return (currentIterator != null && currentIterator.hasNext()) ||
(!internalIterators.isEmpty() && internalIterators.getFirst().hasNext());
}
@Override
public E next() {
if (currentIterator != null && currentIterator.hasNext()) {
return currentIterator.next();
}
currentIterator = internalIterators.pollFirst();
if (currentIterator != null) {
return currentIterator.next();
}
throw new NoSuchElementException();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
}
| apache-2.0 |
KavaProject/KavaTouch | src/main/java/org/kavaproject/kavatouch/corefoundation/CorePropertyLists.java | 2168 | /*
* Copyright 2013 The Kava Project Developers. See the COPYRIGHT file at the top-level directory of this distribution
* and at http://kavaproject.org/COPYRIGHT.
*
* Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the
* MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option. This file may not be copied,
* modified, or distributed except according to those terms.
*/
package org.kavaproject.kavatouch.corefoundation;
import org.kavaproject.kavatouch.internal.CFunction;
import org.kavaproject.kavatouch.internal.Header;
import org.kavaproject.kavatouch.util.NotImplementedException;
import org.kavaproject.kavatouch.util.OutArg;
import java.io.InputStream;
import java.nio.ByteBuffer;
@Header("CFPropertyList")
public class CorePropertyLists {
@CFunction("CFPropertyListCreateWithData")
public static final CorePropertyList create(ByteBuffer data, CorePropertyListMutabilityOptions options,
OutArg<CorePropertyListFormat> format) throws RuntimeException {
throw new NotImplementedException();
}
@CFunction("CFPropertyListCreateWithStream")
public static final CorePropertyList create(InputStream stream, long streamLength,
CorePropertyListMutabilityOptions options,
OutArg<CorePropertyListFormat> format) throws RuntimeException {
throw new NotImplementedException();
}
@Deprecated
@CFunction("CFPropertyListCreateFromXMLData")
public static final CorePropertyList createFromXMLData(ByteBuffer xmlData, CorePropertyListMutabilityOptions
mutabilityOption, OutArg<String> errorString) {
throw new NotImplementedException();
}
@Deprecated
@CFunction("CFPropertyListCreateFromStream")
public static final CorePropertyList create(InputStream stream, long streamLength, CorePropertyListMutabilityOptions mutabilityOption, CorePropertyListFormat format, OutArg<String> errorString) {
throw new NotImplementedException();
}
}
| apache-2.0 |
MRCHENDQ/JoinOurs | mblog-base/src/main/java/mblog/base/utils/URLUtils.java | 652 | /*
+--------------------------------------------------------------------------
| Mblog [#RELEASE_VERSION#]
| ========================================
| Copyright (c) 2014, 2015 mtons. All Rights Reserved
| http://www.mtons.com
|
+---------------------------------------------------------------------------
*/
package mblog.base.utils;
import java.net.MalformedURLException;
import java.net.URL;
/**
* @author cdq
*
*/
public class URLUtils {
public static String getHost(String link) {
URL url;
String host = "";
try {
url = new URL(link);
host = url.getHost();
} catch (MalformedURLException e) {
}
return host;
}
}
| apache-2.0 |
geosolutions-it/jai-ext | jt-shadedrelief/src/main/java/it/geosolutions/jaiext/shadedrelief/ShadedReliefRIF.java | 2600 | /* JAI-Ext - OpenSource Java Advanced Image Extensions Library
* http://www.geo-solutions.it/
* Copyright 2018 GeoSolutions
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.geosolutions.jaiext.shadedrelief;
import com.sun.media.jai.opimage.RIFUtil;
import it.geosolutions.jaiext.range.Range;
import java.awt.RenderingHints;
import java.awt.image.RenderedImage;
import java.awt.image.renderable.ParameterBlock;
import java.awt.image.renderable.RenderedImageFactory;
import javax.media.jai.ImageLayout;
import javax.media.jai.ROI;
/**
* ShadedRelief processing RenderedImageFactory.
*
*/
public class ShadedReliefRIF implements RenderedImageFactory {
public ShadedReliefRIF() {}
public RenderedImage create(ParameterBlock pb, RenderingHints hints) {
// Getting the Layout
ImageLayout l = RIFUtil.getImageLayoutHint(hints);
// Getting source
RenderedImage img = pb.getRenderedSource(0);
// Getting parameters
int paramIndex = 0;
ROI roi = (ROI) pb.getObjectParameter(paramIndex++);
Range srcNoData = (Range) pb.getObjectParameter(paramIndex++);
double dstNoData = pb.getDoubleParameter(paramIndex++);
double resX = pb.getDoubleParameter(paramIndex++);
double resY = pb.getDoubleParameter(paramIndex++);
double verticalExaggeration = pb.getDoubleParameter(paramIndex++);
double verticalScale = pb.getDoubleParameter(paramIndex++);
double altitude = pb.getDoubleParameter(paramIndex++);
double azimuth = pb.getDoubleParameter(paramIndex++);
ShadedReliefAlgorithm algorithm =
(ShadedReliefAlgorithm) pb.getObjectParameter(paramIndex++);
return new ShadedReliefOpImage(
img,
hints,
l,
roi,
srcNoData,
dstNoData,
resX,
resY,
verticalExaggeration,
verticalScale,
altitude,
azimuth,
algorithm);
}
}
| apache-2.0 |
anuraaga/armeria | core/src/main/java/com/linecorp/armeria/common/stream/EmptyFixedStreamMessage.java | 1478 | /*
* Copyright 2017 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.common.stream;
import com.linecorp.armeria.common.util.UnstableApi;
/**
* A {@link FixedStreamMessage} that publishes no objects, just a close event.
*/
@UnstableApi
public class EmptyFixedStreamMessage<T> extends FixedStreamMessage<T> {
// No objects, so just notify of close as soon as there is demand.
@Override
final void doRequest(SubscriptionImpl subscription, long unused) {
if (requested() != 0) {
// Already have demand so don't need to do anything.
return;
}
setRequested(1);
notifySubscriberOfCloseEvent(subscription, SUCCESSFUL_CLOSE);
}
@Override
public final boolean isEmpty() {
return true;
}
@Override
final void cleanupObjects() {
// Empty streams have no objects to clean.
}
}
| apache-2.0 |
tzaeschke/phtree | src/main/java/ch/ethz/globis/phtree/PhDistanceSF.java | 368 | /*
* Copyright 2011-2016 ETH Zurich. All Rights Reserved.
*
* This software is the proprietary information of ETH Zurich.
* Use is subject to license terms.
*/
package ch.ethz.globis.phtree;
/**
* Common interface for distance functions for rectangle data.
*
* @see PhDistance
*
* @author ztilmann
*/
public interface PhDistanceSF extends PhDistance {
} | apache-2.0 |
ITMAOO/scenic | scenic-wechatserver/src/main/java/com/scenic/wechat/server/handler/MessageHandler.java | 481 | package com.scenic.wechat.server.handler;
import com.scenic.wechat.server.message.request.BaseRequestMessage;
import com.scenic.wechat.server.message.response.BaseResponseMessage;
/**
* 消息处理器接口
* Created by liujie on 2016/8/6 17:36.
*/
public interface MessageHandler {
/**
* 真正的处理消息的方法
*
* @param baseRequestMessage
* @return
*/
BaseResponseMessage doHandleMessage(BaseRequestMessage baseRequestMessage);
}
| apache-2.0 |
lmjacksoniii/hazelcast | hazelcast-client/src/main/java/com/hazelcast/client/spi/package-info.java | 760 | /*
* Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Contains classes/interfaces related to the Hazelcast client Service Provider Interface.
*/
package com.hazelcast.client.spi;
| apache-2.0 |
VirtualGamer/SnowEngine | Dependencies/openal/src/org/lwjgl/openal/ALCCapabilities.java | 6411 | /*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.openal;
import org.lwjgl.system.*;
import java.util.Set;
/** Defines the capabilities of the OpenAL Context API. */
public final class ALCCapabilities {
public final long
alcCaptureCloseDevice,
alcCaptureOpenDevice,
alcCaptureSamples,
alcCaptureStart,
alcCaptureStop,
alcCloseDevice,
alcCreateContext,
alcDestroyContext,
alcDevicePauseSOFT,
alcDeviceResumeSOFT,
alcGetContextsDevice,
alcGetCurrentContext,
alcGetEnumValue,
alcGetError,
alcGetIntegerv,
alcGetProcAddress,
alcGetString,
alcGetStringiSOFT,
alcGetThreadContext,
alcIsExtensionPresent,
alcIsRenderFormatSupportedSOFT,
alcLoopbackOpenDeviceSOFT,
alcMakeContextCurrent,
alcOpenDevice,
alcProcessContext,
alcRenderSamplesSOFT,
alcResetDeviceSOFT,
alcSetThreadContext,
alcSuspendContext;
/** When true, {@link ALC10} is supported. */
public final boolean OpenALC10;
/** When true, {@link ALC11} is supported. */
public final boolean OpenALC11;
/** When true, {@link EnumerateAllExt} is supported. */
public final boolean ALC_ENUMERATE_ALL_EXT;
/**
* An OpenAL 1.1 implementation will always support the {@code ALC_ENUMERATION_EXT} extension. This extension provides for enumeration of the available OpenAL devices
* through {@link ALC10#alcGetString GetString}. An {@link ALC10#alcGetString GetString} query of {@link ALC10#ALC_DEVICE_SPECIFIER DEVICE_SPECIFIER} with a {@code NULL} device passed in will return a list of devices. Each
* device name will be separated by a single {@code NULL} character and the list will be terminated with two {@code NULL} characters.
*/
public final boolean ALC_ENUMERATION_EXT;
/** When true, {@link EXTCapture} is supported. */
public final boolean ALC_EXT_CAPTURE;
/** When true, {@link EXTDedicated} is supported. */
public final boolean ALC_EXT_DEDICATED;
/** When true, {@link EXTDefaultFilterOrder} is supported. */
public final boolean ALC_EXT_DEFAULT_FILTER_ORDER;
/** When true, {@link EXTDisconnect} is supported. */
public final boolean ALC_EXT_disconnect;
/** When true, {@link EXTEfx} is supported. */
public final boolean ALC_EXT_EFX;
/** When true, {@link EXTThreadLocalContext} is supported. */
public final boolean ALC_EXT_thread_local_context;
/** When true, {@link LOKIAudioChannel} is supported. */
public final boolean ALC_LOKI_audio_channel;
/** When true, {@link SOFTHRTF} is supported. */
public final boolean ALC_SOFT_HRTF;
/** When true, {@link SOFTLoopback} is supported. */
public final boolean ALC_SOFT_loopback;
/** When true, {@link SOFTPauseDevice} is supported. */
public final boolean ALC_SOFT_pause_device;
ALCCapabilities(FunctionProviderLocal provider, long device, Set<String> ext) {
alcCaptureCloseDevice = provider.getFunctionAddress("alcCaptureCloseDevice");
alcCaptureOpenDevice = provider.getFunctionAddress("alcCaptureOpenDevice");
alcCaptureSamples = provider.getFunctionAddress("alcCaptureSamples");
alcCaptureStart = provider.getFunctionAddress("alcCaptureStart");
alcCaptureStop = provider.getFunctionAddress("alcCaptureStop");
alcCloseDevice = provider.getFunctionAddress("alcCloseDevice");
alcCreateContext = provider.getFunctionAddress("alcCreateContext");
alcDestroyContext = provider.getFunctionAddress("alcDestroyContext");
alcDevicePauseSOFT = provider.getFunctionAddress(device, "alcDevicePauseSOFT");
alcDeviceResumeSOFT = provider.getFunctionAddress(device, "alcDeviceResumeSOFT");
alcGetContextsDevice = provider.getFunctionAddress("alcGetContextsDevice");
alcGetCurrentContext = provider.getFunctionAddress("alcGetCurrentContext");
alcGetEnumValue = provider.getFunctionAddress("alcGetEnumValue");
alcGetError = provider.getFunctionAddress("alcGetError");
alcGetIntegerv = provider.getFunctionAddress("alcGetIntegerv");
alcGetProcAddress = provider.getFunctionAddress("alcGetProcAddress");
alcGetString = provider.getFunctionAddress("alcGetString");
alcGetStringiSOFT = provider.getFunctionAddress(device, "alcGetStringiSOFT");
alcGetThreadContext = provider.getFunctionAddress(device, "alcGetThreadContext");
alcIsExtensionPresent = provider.getFunctionAddress("alcIsExtensionPresent");
alcIsRenderFormatSupportedSOFT = provider.getFunctionAddress(device, "alcIsRenderFormatSupportedSOFT");
alcLoopbackOpenDeviceSOFT = provider.getFunctionAddress(device, "alcLoopbackOpenDeviceSOFT");
alcMakeContextCurrent = provider.getFunctionAddress("alcMakeContextCurrent");
alcOpenDevice = provider.getFunctionAddress("alcOpenDevice");
alcProcessContext = provider.getFunctionAddress("alcProcessContext");
alcRenderSamplesSOFT = provider.getFunctionAddress(device, "alcRenderSamplesSOFT");
alcResetDeviceSOFT = provider.getFunctionAddress(device, "alcResetDeviceSOFT");
alcSetThreadContext = provider.getFunctionAddress(device, "alcSetThreadContext");
alcSuspendContext = provider.getFunctionAddress("alcSuspendContext");
OpenALC10 = ext.contains("OpenALC10") && ALC.checkExtension("OpenALC10", ALC10.isAvailable(this));
OpenALC11 = ext.contains("OpenALC11") && ALC.checkExtension("OpenALC11", ALC11.isAvailable(this));
ALC_ENUMERATE_ALL_EXT = ext.contains("ALC_ENUMERATE_ALL_EXT");
ALC_ENUMERATION_EXT = ext.contains("ALC_ENUMERATION_EXT");
ALC_EXT_CAPTURE = ext.contains("ALC_EXT_CAPTURE") && ALC.checkExtension("ALC_EXT_CAPTURE", EXTCapture.isAvailable(this));
ALC_EXT_DEDICATED = ext.contains("ALC_EXT_DEDICATED");
ALC_EXT_DEFAULT_FILTER_ORDER = ext.contains("ALC_EXT_DEFAULT_FILTER_ORDER");
ALC_EXT_disconnect = ext.contains("ALC_EXT_disconnect");
ALC_EXT_EFX = ext.contains("ALC_EXT_EFX");
ALC_EXT_thread_local_context = ext.contains("ALC_EXT_thread_local_context") && ALC.checkExtension("ALC_EXT_thread_local_context", EXTThreadLocalContext.isAvailable(this));
ALC_LOKI_audio_channel = ext.contains("ALC_LOKI_audio_channel");
ALC_SOFT_HRTF = ext.contains("ALC_SOFT_HRTF") && ALC.checkExtension("ALC_SOFT_HRTF", SOFTHRTF.isAvailable(this));
ALC_SOFT_loopback = ext.contains("ALC_SOFT_loopback") && ALC.checkExtension("ALC_SOFT_loopback", SOFTLoopback.isAvailable(this));
ALC_SOFT_pause_device = ext.contains("ALC_SOFT_pause_device") && ALC.checkExtension("ALC_SOFT_pause_device", SOFTPauseDevice.isAvailable(this));
}
} | apache-2.0 |
nezirus/elasticsearch | core/src/main/java/org/elasticsearch/index/query/IdsQueryBuilder.java | 6484 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.UidFieldMapper;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Objects;
import java.util.Set;
import static org.elasticsearch.common.xcontent.ObjectParser.fromList;
/**
* A query that will return only documents matching specific ids (and a type).
*/
public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
public static final String NAME = "ids";
private static final ParseField TYPE_FIELD = new ParseField("type", "types", "_type");
private static final ParseField VALUES_FIELD = new ParseField("values");
private final Set<String> ids = new HashSet<>();
private String[] types = Strings.EMPTY_ARRAY;
/**
* Creates a new IdsQueryBuilder with no types specified upfront
*/
public IdsQueryBuilder() {
// nothing to do
}
/**
* Creates a new IdsQueryBuilder by providing the types of the documents to look for
* @deprecated Replaced by {@link #types(String...)}
*/
@Deprecated
public IdsQueryBuilder(String... types) {
types(types);
}
/**
* Read from a stream.
*/
public IdsQueryBuilder(StreamInput in) throws IOException {
super(in);
types = in.readStringArray();
Collections.addAll(ids, in.readStringArray());
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeStringArray(types);
out.writeStringArray(ids.toArray(new String[ids.size()]));
}
/**
* Add types to query
*/
public IdsQueryBuilder types(String... types) {
if (types == null) {
throw new IllegalArgumentException("[" + NAME + "] types cannot be null");
}
this.types = types;
return this;
}
/**
* Returns the types used in this query
*/
public String[] types() {
return this.types;
}
/**
* Adds ids to the query.
*/
public IdsQueryBuilder addIds(String... ids) {
if (ids == null) {
throw new IllegalArgumentException("[" + NAME + "] ids cannot be null");
}
Collections.addAll(this.ids, ids);
return this;
}
/**
* Returns the ids for the query.
*/
public Set<String> ids() {
return this.ids;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.array(TYPE_FIELD.getPreferredName(), types);
builder.startArray(VALUES_FIELD.getPreferredName());
for (String value : ids) {
builder.value(value);
}
builder.endArray();
printBoostAndQueryName(builder);
builder.endObject();
}
private static ObjectParser<IdsQueryBuilder, QueryParseContext> PARSER = new ObjectParser<>(NAME,
() -> new IdsQueryBuilder());
static {
PARSER.declareStringArray(fromList(String.class, IdsQueryBuilder::types), IdsQueryBuilder.TYPE_FIELD);
PARSER.declareStringArray(fromList(String.class, IdsQueryBuilder::addIds), IdsQueryBuilder.VALUES_FIELD);
declareStandardFields(PARSER);
}
public static IdsQueryBuilder fromXContent(QueryParseContext context) {
try {
return PARSER.apply(context.parser(), context);
} catch (IllegalArgumentException e) {
throw new ParsingException(context.parser().getTokenLocation(), e.getMessage(), e);
}
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
Query query;
MappedFieldType uidField = context.fieldMapper(UidFieldMapper.NAME);
if (uidField == null) {
return new MatchNoDocsQuery("No mappings");
}
if (this.ids.isEmpty()) {
query = Queries.newMatchNoDocsQuery("Missing ids in \"" + this.getName() + "\" query.");
} else {
Collection<String> typesForQuery;
if (types.length == 0) {
typesForQuery = context.queryTypes();
} else if (types.length == 1 && MetaData.ALL.equals(types[0])) {
typesForQuery = context.getMapperService().types();
} else {
typesForQuery = new HashSet<>();
Collections.addAll(typesForQuery, types);
}
query = uidField.termsQuery(Arrays.asList(Uid.createUidsForTypesAndIds(typesForQuery, ids)), context);
}
return query;
}
@Override
protected int doHashCode() {
return Objects.hash(ids, Arrays.hashCode(types));
}
@Override
protected boolean doEquals(IdsQueryBuilder other) {
return Objects.equals(ids, other.ids) &&
Arrays.equals(types, other.types);
}
}
| apache-2.0 |
alexeremeev/aeremeev | chapter_011bootcars/src/main/java/ru/job4j/configuration/SpringWebConfig.java | 1118 | package ru.job4j.configuration;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.ViewControllerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
@EnableWebMvc
@Configuration
public class SpringWebConfig extends WebMvcConfigurerAdapter {
private static final String[] CLASSPATH_RESOURCE_LOCATIONS = {
"classpath:/META-INF/resources/", "classpath:/resources/",
"classpath:/static/", "classpath:/public/"
};
@Override
public void addViewControllers(ViewControllerRegistry registry) {
registry.addViewController("/").setViewName("index");
registry.addViewController("/create").setViewName("create");
}
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
registry.addResourceHandler("/**").addResourceLocations(CLASSPATH_RESOURCE_LOCATIONS);
}
}
| apache-2.0 |
wapalxj/Android_C3_4_Thread_AsyncTask | C3_4_Thread_AsyncTask/c3_5_01_web/src/main/java/com/c3/vero/c3_5_01_web/ServiceRuleException.java | 211 | package com.c3.vero.c3_5_01_web;
/**
* Created by vero on 2015/12/4.
*/
public class ServiceRuleException extends Exception {
public ServiceRuleException(String message) {
super(message);
}
}
| apache-2.0 |
xfmysql/xfwdata | totcms/src/tot/dao/jdbc/VideoCommenDaoImplJDBC.java | 5576 | /*
* EduCommenDaoImplJDBC.java
*
* Created on 2008Äê3ÔÂ5ÈÕ, ÏÂÎç7:56
*
* To change this template, choose Tools | Template Manager
* and open the template in the editor.
*/
package tot.dao.jdbc;
import tot.dao.AbstractDao;
import tot.db.DBUtils;
import tot.bean.*;
import tot.exception.ObjectNotFoundException;
import tot.exception.DatabaseException;
import java.sql.*;
import java.util.*;
import java.io.StringReader;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
*
* @author Administrator
*/
public class VideoCommenDaoImplJDBC extends AbstractDao{
private static Log log = LogFactory.getLog(VideoCommenDaoImplJDBC.class);
/** Creates a new instance of VideoCommenDaoImplJDBC */
public VideoCommenDaoImplJDBC() {
}
/** add GuestBook */
public boolean add(int objid,String author,String content,String ip,Timestamp moditime){
Connection conn = null;
PreparedStatement ps = null;
boolean returnValue=true;
String sql="insert into t_video_commen(ObjId,Author,Content,Ip,ModiTime) values(?,?,?,?,?)";
try{
conn = DBUtils.getConnection();
ps=conn.prepareStatement(sql);
ps.setInt(1,objid);
ps.setString(2,author);
if(DBUtils.getDatabaseType()==DBUtils.DATABASE_ORACLE){
ps.setCharacterStream(3, new StringReader(content), content.length());
}else{
ps.setString(3,content);
}
ps.setString(4,ip);
ps.setTimestamp(5,moditime);
if(ps.executeUpdate()!=1) returnValue=false;
} catch(SQLException e){
log.error("add GuestBook error",e);
} finally{
DBUtils.closePrepareStatement(ps);
DBUtils.closeConnection(conn);
}
return returnValue;
}
/*
* mod GuestBook
*/
public boolean mod(int id,String author,String content){
Connection conn = null;
PreparedStatement ps = null;
boolean returnValue=true;
String sql="update t_video_commen set author=?,Content=? where id=?";
try{
conn = DBUtils.getConnection();
ps=conn.prepareStatement(sql);
ps.setString(1,author);
if(DBUtils.getDatabaseType()==DBUtils.DATABASE_ORACLE){
ps.setCharacterStream(2, new StringReader(content), content.length());
}else{
ps.setString(2,content);
}
ps.setInt(3,id);
if(ps.executeUpdate()!=1) returnValue=false;
} catch(SQLException e){
log.error("mod GuestBook error",e);
} finally{
DBUtils.closePrepareStatement(ps);
DBUtils.closeConnection(conn);
}
return returnValue;
}
public DataField get(int id){
return getFirstData("select id,ObjId,Author,Content,Ip,ModiTime from t_video_commen where id="+id,"id,ObjId,Author,Content,Ip,ModiTime");
}
public boolean del(int id) throws ObjectNotFoundException,DatabaseException{
return exe("delete from t_video_commen where id="+id);
}
public void batDel(String[] s){
this.bat("delete from t_video_commen where id=?",s);
}
public void baRecommend(String[] s,int val){
this.bat("update t_video_commen set IsRecommend="+val+" where id=?",s);
}
public Collection getListByLimit(int objid,int isrecommend,int currentpage,int pagesize){
if(DBUtils.getDatabaseType() == DBUtils.DATABASE_MYSQL){
StringBuffer sql=new StringBuffer(512);
sql.append("select t_video_commen.id as tid,t_video_commen.ObjId,t_video_commen.Author as tauthor,t_video_commen.Content as tcontent,t_video_commen.Ip as tip,t_video_commen.ModiTime as tmod,t_video.Title as dtilte from t_video_commen,t_video where t_video_commen.ObjId=t_video.id");
if(objid>0) sql.append(" and t_video_commen.ObjId="+objid);
if(isrecommend>=0) sql.append(" and t_video_commen.IsRecommend="+isrecommend);
sql.append(" order by t_video_commen.id desc");
return getDataList_mysqlLimit(sql.toString(),"id,ObjId,Author,Content,Ip,ModiTime,VideoTitle",pagesize,(currentpage-1)*pagesize);
} else{
StringBuffer sql=new StringBuffer(512);
sql.append("select t_video_commen.id as tid,t_video_commen.ObjId,t_video_commen.Author as tauthor,t_video_commen.Content as tcontent,t_video_commen.Ip as tip,t_video_commen.ModiTime as tmod,t_video.Title as dtilte from t_video_commen,t_video where t_video_commen.ObjId=t_video.id");
if(objid>0) sql.append(" and t_video_commen.ObjId="+objid);
if(isrecommend>=0) sql.append(" and t_video_commen.IsRecommend="+isrecommend);
sql.append(" order by t_video_commen.id desc");
return getDataList_Limit_Normal(sql.toString(),"id,ObjId,Author,Content,Ip,ModiTime,VideoTitle",pagesize,(currentpage-1)*pagesize);
}
}
public int getTotalCount(int objid,int isrecommend){
StringBuffer sql=new StringBuffer(512);
sql.append("select count(*) from t_video_commen,t_video where t_video_commen.ObjId=t_video.id");
if(objid>0){
sql.append(" and t_video_commen.ObjId=");
sql.append(objid);
}
if(isrecommend>=0) sql.append(" and t_video_commen.IsRecommend="+isrecommend);
return(this.getDataCount(sql.toString()));
}
}
| apache-2.0 |
dhutchis/accumulo | core/src/main/java/org/apache/accumulo/core/iterators/user/CfCqSliceSeekingFilter.java | 5637 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.core.iterators.user;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.PartialKey;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.iterators.IteratorEnvironment;
import org.apache.accumulo.core.iterators.OptionDescriber;
import org.apache.accumulo.core.iterators.SortedKeyValueIterator;
import java.io.IOException;
import java.util.Map;
/**
* Filters key/value pairs for a range of column families and a range of column qualifiers. Only keys which fall in both ranges will be passed by the filter.
* Note that if you have a small, well-defined set of column families it will be much more efficient to configure locality groups to isolate that data instead
* of configuring this iterator to seek over it.
*
* This filter may be more efficient than the CfCqSliceFilter or the ColumnSlice filter for small slices of large rows as it will seek to the next potential
* match once it determines that it has iterated past the end of a slice.
*
* @see org.apache.accumulo.core.iterators.user.CfCqSliceOpts for a description of this iterator's options.
*/
public class CfCqSliceSeekingFilter extends SeekingFilter implements OptionDescriber {
private static final FilterResult SKIP_TO_HINT = FilterResult.of(false, AdvanceResult.USE_HINT);
private static final FilterResult SKIP_TO_NEXT = FilterResult.of(false, AdvanceResult.NEXT);
private static final FilterResult SKIP_TO_NEXT_ROW = FilterResult.of(false, AdvanceResult.NEXT_ROW);
private static final FilterResult SKIP_TO_NEXT_CF = FilterResult.of(false, AdvanceResult.NEXT_CF);
private static final FilterResult INCLUDE_AND_NEXT = FilterResult.of(true, AdvanceResult.NEXT);
private static final FilterResult INCLUDE_AND_NEXT_CF = FilterResult.of(true, AdvanceResult.NEXT_CF);
private CfCqSliceOpts cso;
@Override
public void init(SortedKeyValueIterator<Key,Value> source, Map<String,String> options, IteratorEnvironment env) throws IOException {
super.init(source, options, env);
cso = new CfCqSliceOpts(options);
}
@Override
public FilterResult filter(Key k, Value v) {
if (cso.minCf.getLength() > 0) {
int minCfCmp = k.compareColumnFamily(cso.minCf);
if (minCfCmp < 0) {
return SKIP_TO_HINT; // hint will be the min CF in this row.
}
if (minCfCmp == 0 && !cso.minInclusive) {
return SKIP_TO_NEXT;
}
}
if (cso.maxCf.getLength() > 0) {
int maxCfCmp = k.compareColumnFamily(cso.maxCf);
if (maxCfCmp > 0 || (maxCfCmp == 0 && !cso.maxInclusive)) {
return SKIP_TO_NEXT_ROW;
}
}
// at this point we're in the correct CF range, now check the CQ.
if (cso.minCq.getLength() > 0) {
int minCqCmp = k.compareColumnQualifier(cso.minCq);
if (minCqCmp < 0) {
return SKIP_TO_HINT; // hint will be the min CQ in this CF in this row.
}
if (minCqCmp == 0 && !cso.minInclusive) {
return SKIP_TO_NEXT;
}
}
if (cso.maxCq.getLength() > 0) {
int maxCqCmp = k.compareColumnQualifier(cso.maxCq);
if (maxCqCmp > 0 || (maxCqCmp == 0 && !cso.maxInclusive)) {
return SKIP_TO_NEXT_CF;
}
if (maxCqCmp == 0) {
// special-case here: we know we're at the last CQ in the slice, so skip to the next CF in the row.
return INCLUDE_AND_NEXT_CF;
}
}
// at this point we're in the CQ slice.
return INCLUDE_AND_NEXT;
}
@Override
public Key getNextKeyHint(Key k, Value v) throws IllegalArgumentException {
if (cso.minCf.getLength() > 0) {
int minCfCmp = k.compareColumnFamily(cso.minCf);
if (minCfCmp < 0) {
Key hint = new Key(k.getRow(), cso.minCf);
return cso.minInclusive ? hint : hint.followingKey(PartialKey.ROW_COLFAM);
}
}
if (cso.minCq.getLength() > 0) {
int minCqCmp = k.compareColumnQualifier(cso.minCq);
if (minCqCmp < 0) {
Key hint = new Key(k.getRow(), k.getColumnFamily(), cso.minCq);
return cso.minInclusive ? hint : hint.followingKey(PartialKey.ROW_COLFAM_COLQUAL);
}
}
// If we get here it means that we were asked to provide a hint for a key that we
// didn't return USE_HINT for.
throw new IllegalArgumentException("Don't know how to provide hint for key " + k);
}
@Override
public SortedKeyValueIterator<Key,Value> deepCopy(IteratorEnvironment env) {
CfCqSliceSeekingFilter o = (CfCqSliceSeekingFilter) super.deepCopy(env);
o.cso = new CfCqSliceOpts(cso);
return o;
}
@Override
public IteratorOptions describeOptions() {
return new CfCqSliceOpts.Describer().describeOptions();
}
@Override
public boolean validateOptions(Map<String,String> options) {
return new CfCqSliceOpts.Describer().validateOptions(options);
}
}
| apache-2.0 |
waeljammal/j2js | src/main/java/con/rsnm/model/JSClassPropertyMapper.java | 104 | package con.rsnm.model;
/**
* Created by Wael on 27/11/15.
*/
public class JSClassPropertyMapper {
}
| apache-2.0 |
joewalnes/idea-community | platform/platform-impl/src/com/intellij/openapi/editor/impl/softwrap/mapping/OffsetToLogicalCalculationStrategy.java | 8473 | /*
* Copyright 2000-2010 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.editor.impl.softwrap.mapping;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.editor.impl.EditorTextRepresentationHelper;
import com.intellij.openapi.editor.impl.softwrap.SoftWrapsStorage;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
/**
* @author Denis Zhdanov
* @since Sep 9, 2010 9:20:55 AM
*/
class OffsetToLogicalCalculationStrategy extends AbstractMappingStrategy<LogicalPosition> {
private int myTargetOffset;
OffsetToLogicalCalculationStrategy(@NotNull Editor editor, @NotNull SoftWrapsStorage storage, @NotNull List<CacheEntry> cache,
@NotNull EditorTextRepresentationHelper representationHelper)
{
super(editor, storage, cache, representationHelper);
}
public void init(final int targetOffset, final List<CacheEntry> cache) {
reset();
myTargetOffset = targetOffset;
Document document = myEditor.getDocument();
if (targetOffset == 0) {
LogicalPosition eager = new LogicalPosition(0, 0, 0, 0, 0, 0, 0);
setEagerMatch(eager);
return;
}
else if (targetOffset >= document.getTextLength()) {
if (cache.isEmpty()) {
setFirstInitialPosition();
return;
}
else {
// We expect the following possible cases here:
// 1. There is a cache entry for the target line;
// 1.1. Document ends by line feed;
// 1.2. Document ends by the symbol that is not line feed;
// 2. There is no cache entry for the target line;;
CacheEntry lastEntry = cache.get(cache.size() - 1);
if (lastEntry.endOffset >= targetOffset - 1) {
EditorPosition position = lastEntry.buildEndLinePosition();
if (document.getCharsSequence().charAt(document.getTextLength() - 1) == '\n') {
position.onNewLine();
}
setEagerMatch(position.buildLogicalPosition());
return;
}
}
} else if (cache.size() > 0 && cache.get(cache.size() - 1).endOffset < targetOffset) {
EditorPosition position = cache.get(cache.size() - 1).buildEndLinePosition();
position.onNewLine();
setInitialPosition(position);
return;
}
int i = MappingUtil.getCacheEntryIndexForOffset(targetOffset, myEditor.getDocument(), cache);
CacheEntry cacheEntry = null;
if (i >= 0) {
CacheEntry candidate = cache.get(i);
// There is a possible case that target offset points to the start of soft-wrap introduced visual line. We perform eager
// match then.
if (candidate.endOffset == targetOffset && i < cache.size() - 1 && cache.get(i + 1).startOffset == targetOffset) {
EditorPosition position = cache.get(i + 1).buildStartLinePosition();
SoftWrap softWrap = myStorage.getSoftWrap(targetOffset);
if (softWrap != null) {
position.visualColumn = softWrap.getIndentInColumns();
position.softWrapColumnDiff += softWrap.getIndentInColumns();
setEagerMatch(position.buildLogicalPosition());
}
}
else if (candidate.startOffset <= targetOffset) {
cacheEntry = candidate;
}
}
else if (i < -1) {
i = -i - 2;
if (i < myCache.size()) {
cacheEntry = myCache.get(i);
}
}
if (cacheEntry == null) {
setFirstInitialPosition();
}
else if (cacheEntry.startOffset <= targetOffset && cacheEntry.endOffset >= targetOffset) {
setTargetEntry(cacheEntry, true);
}
else {
setInitialPosition(cacheEntry.buildStartLinePosition());
}
}
@Override
protected LogicalPosition buildIfExceeds(EditorPosition position, int offset) {
if (myTargetOffset >= offset) {
return null;
}
Document document = myEditor.getDocument();
int logicalLine = document.getLineNumber(myTargetOffset);
int linesDiff = logicalLine - position.logicalLine;
if (linesDiff > 0) {
position.onNewLine();
int column = myTargetOffset - document.getLineStartOffset(logicalLine);
position.visualColumn = column;
position.logicalColumn = column;
}
else {
int columnsDiff = myTargetOffset - position.offset;
position.logicalColumn += columnsDiff;
position.visualColumn += columnsDiff;
}
position.logicalLine = logicalLine;
position.offset = myTargetOffset;
// Process use-case when target offset points to 'after soft wrap' position.
//SoftWrap softWrap = myStorage.getSoftWrap(offset);
//if (softWrap != null && offset < getAnchorCacheEntry().endOffset) {
// position.visualColumn = softWrap.getIndentInColumns();
// position.softWrapColumnDiff = position.visualColumn - position.logicalColumn;
// return position.buildLogicalPosition();
//}
return position.buildLogicalPosition();
}
@Override
protected LogicalPosition buildIfExceeds(@NotNull EditorPosition position, @NotNull FoldRegion foldRegion) {
if (myTargetOffset >= foldRegion.getEndOffset()) {
return null;
}
Document document = myEditor.getDocument();
int targetLogicalLine = document.getLineNumber(myTargetOffset);
if (targetLogicalLine == position.logicalLine) {
// Target offset is located on the same logical line as folding start.
FoldingData cachedData = getFoldRegionData(foldRegion);
int x = 0;
if (cachedData != null) {
x = cachedData.startX;
}
position.logicalColumn += myRepresentationHelper.toVisualColumnSymbolsNumber(
document.getCharsSequence(), foldRegion.getStartOffset(), myTargetOffset, x
);
}
else {
// Target offset is located on a different line with folding start.
position.logicalColumn = myRepresentationHelper.toVisualColumnSymbolsNumber(
document.getCharsSequence(), foldRegion.getStartOffset(), myTargetOffset, 0
);
position.softWrapColumnDiff = 0;
int linesDiff = document.getLineNumber(myTargetOffset) - document.getLineNumber(foldRegion.getStartOffset());
position.logicalLine += linesDiff;
position.foldedLines += linesDiff;
position.softWrapLinesBefore += position.softWrapLinesCurrent;
position.softWrapLinesCurrent = 0;
}
position.foldingColumnDiff = position.visualColumn - position.softWrapColumnDiff - position.logicalColumn;
position.offset = myTargetOffset;
return position.buildLogicalPosition();
}
@Nullable
@Override
protected LogicalPosition buildIfExceeds(EditorPosition context, TabData tabData) {
if (tabData.offset == myTargetOffset) {
return context.buildLogicalPosition();
}
return null;
}
@Nullable
@Override
public LogicalPosition processSoftWrap(EditorPosition position, SoftWrap softWrap) {
position.visualColumn = softWrap.getIndentInColumns();
position.softWrapColumnDiff += softWrap.getIndentInColumns();
if (softWrap.getStart() == myTargetOffset) {
return position.buildLogicalPosition();
}
else {
return null;
}
}
@NotNull
@Override
public LogicalPosition build(EditorPosition position) {
Document document = myEditor.getDocument();
int logicalLine = document.getLineNumber(myTargetOffset);
int linesDiff = logicalLine - position.logicalLine;
if (linesDiff > 0) {
position.onNewLine();
position.logicalLine = logicalLine;
int column = myTargetOffset - document.getLineStartOffset(logicalLine);
position.logicalColumn = column;
position.visualColumn = column;
}
else {
int columnsDiff = myTargetOffset - position.offset;
position.logicalColumn += columnsDiff;
position.visualColumn += columnsDiff;
}
position.offset = myTargetOffset;
return position.buildLogicalPosition();
}
}
| apache-2.0 |
pvkarthik87/HeyBeach | HeyBeach/app/src/main/java/com/karcompany/heybeach/storage/KeyValueUtils.java | 1546 | package com.karcompany.heybeach.storage;
import android.content.Context;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import android.text.TextUtils;
import com.karcompany.heybeach.config.Constants;
/**
* Created by pvkarthik on 2017-02-23.
*/
public class KeyValueUtils {
public static boolean isLoggedIn(Context ctx) {
if(ctx == null) return false;
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(ctx);
String token = sharedPreferences.getString(Constants.KEY_ACCESS_TOKEN, "");
return !TextUtils.isEmpty(token);
}
public static void updateKey(Context context, String key, String value) {
if(context == null || TextUtils.isEmpty(key) || TextUtils.isEmpty(value)) return;
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context.getApplicationContext());
sharedPreferences.edit().putString(key, value).apply();
}
public static String getKey(Context ctx, String key) {
if(ctx == null || TextUtils.isEmpty(key)) return "";
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(ctx.getApplicationContext());
return sharedPreferences.getString(Constants.KEY_ACCESS_TOKEN, "");
}
public static void removeKey(Context context, String key) {
if(context == null || TextUtils.isEmpty(key)) return;
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context.getApplicationContext());
sharedPreferences.edit().remove(key).apply();
}
}
| apache-2.0 |