repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
6thsolution/ApexNLP
dfalex/src/main/java/com/nobigsoftware/dfalex/DfaFromNfa.java
9517
/* * Copyright 2015 Matthew Timmermans * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.nobigsoftware.dfalex; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import static backport.java.util.function.BackportFuncs.computeIfAbsent; /** * Turns an NFA into a non-minimal RawDfa by powerset construction */ class DfaFromNfa<RESULT> { //inputs private final Nfa<RESULT> m_nfa; private final int[] m_nfaStartStates; private final int[] m_dfaStartStates; private final DfaAmbiguityResolver<? super RESULT> m_ambiguityResolver; //utility private final DfaStateSignatureCodec m_dfaSigCodec = new DfaStateSignatureCodec(); //These fields are scratch space private final IntListKey m_tempStateSignature = new IntListKey(); private final ArrayDeque<Integer> m_tempNfaClosureList = new ArrayDeque<>(); private final HashSet<RESULT> m_tempResultSet = new HashSet<RESULT>(); //accumulators private final HashMap<RESULT, Integer> m_acceptSetMap = new HashMap<>(); private final ArrayList<RESULT> m_acceptSets = new ArrayList<>(); private final HashMap<IntListKey, Integer> m_dfaStateSignatureMap = new HashMap<>(); private final ArrayList<IntListKey> m_dfaStateSignatures = new ArrayList<>(); private final ArrayList<DfaStateInfo> m_dfaStates = new ArrayList<>(); public DfaFromNfa(Nfa<RESULT> nfa, int[] nfaStartStates, DfaAmbiguityResolver<? super RESULT> ambiguityResolver) { m_nfa = nfa; m_nfaStartStates = nfaStartStates; m_dfaStartStates = new int[nfaStartStates.length]; m_ambiguityResolver = ambiguityResolver; m_acceptSets.add(null); _build(); } public RawDfa<RESULT> getDfa() { return new RawDfa<>(m_dfaStates, m_acceptSets, m_dfaStartStates); } private void _build() { final CompactIntSubset nfaStateSet = new CompactIntSubset(m_nfa.numStates()); final ArrayList<NfaTransition> dfaStateTransitions = new ArrayList<>(); final ArrayList<NfaTransition> transitionQ = new ArrayList<>(1000); //Create the DFA start states for (int i = 0; i < m_dfaStartStates.length; ++i) { nfaStateSet.clear(); _addNfaStateAndEpsilonsToSubset(nfaStateSet, m_nfaStartStates[i]); m_dfaStartStates[i] = _getDfaState(nfaStateSet); } //Create the transitions and other DFA states. //m_dfaStateSignatures grows as we discover new states. //m_dfaStates grows as we complete them for (int stateNum = 0; stateNum < m_dfaStateSignatures.size(); ++stateNum) { final IntListKey dfaStateSig = m_dfaStateSignatures.get(stateNum); dfaStateTransitions.clear(); //For each DFA state, combine the NFA transitions for each //distinct character range into a DFA transiton, appending new DFA states //as we discover them. transitionQ.clear(); //dump all the NFA transitions for the state into the Q DfaStateSignatureCodec.expand(dfaStateSig, state -> m_nfa.forStateTransitions(state, transitionQ::add)); //sort all the transitions by first character Collections.sort(transitionQ, (arg0, arg1) -> { if (arg0.m_firstChar != arg1.m_firstChar) { return (arg0.m_firstChar < arg1.m_firstChar ? -1 : 1); } return 0; }); final int tqlen = transitionQ.size(); //first character we haven't accounted for yet char minc = 0; //NFA transitions at index < tqstart are no longer relevant //NFA transitions at index >= tqstart are in first char order OR have first char <= minc //The sequence of NFA transitions contributing the the previous DFA transition starts here int tqstart = 0; //make a range of NFA transitions corresponding to the next DFA transition while (tqstart < tqlen) { NfaTransition trans = transitionQ.get(tqstart); if (trans.m_lastChar < minc) { ++tqstart; continue; } //INVAR - trans contributes to the next DFA transition nfaStateSet.clear(); _addNfaStateAndEpsilonsToSubset(nfaStateSet, trans.m_stateNum); char startc = trans.m_firstChar; char endc = trans.m_lastChar; if (startc < minc) { startc = minc; } //make range of all transitions that include the start character, removing ones //that drop out for (int tqend = tqstart + 1; tqend < tqlen; ++tqend) { trans = transitionQ.get(tqend); if (trans.m_lastChar < startc) { //remove this one transitionQ.set(tqend, transitionQ.get(tqstart++)); continue; } if (trans.m_firstChar > startc) { //this one is for the next transition if (trans.m_firstChar <= endc) { endc = (char) (trans.m_firstChar - 1); } break; } //this one counts if (trans.m_lastChar < endc) { endc = trans.m_lastChar; } _addNfaStateAndEpsilonsToSubset(nfaStateSet, trans.m_stateNum); } dfaStateTransitions.add(new NfaTransition(startc, endc, _getDfaState(nfaStateSet))); minc = (char) (endc + 1); if (minc < endc) { //wrapped around break; } } //INVARIANT: m_dfaStatesOut.size() == stateNum m_dfaStates.add(_createStateInfo(dfaStateSig, dfaStateTransitions)); } } //Add an NFA state to m_currentNFASubset, along with the transitive //closure over its epsilon transitions private void _addNfaStateAndEpsilonsToSubset(CompactIntSubset dest, int stateNum) { m_tempNfaClosureList.clear(); if (dest.add(stateNum)) { m_tempNfaClosureList.add(stateNum); } Integer newNfaState; while ((newNfaState = m_tempNfaClosureList.poll()) != null) { m_nfa.forStateEpsilons(newNfaState, (Integer src) -> { if (dest.add(src)) { m_tempNfaClosureList.add(src); } }); } } private void _addNfaStateToSignatureCodec(int stateNum) { if (m_nfa.hasTransitionsOrAccepts(stateNum)) { m_dfaSigCodec.acceptInt(stateNum); } } //Make a DFA state for a set of simultaneous NFA states private Integer _getDfaState(CompactIntSubset nfaStateSet) { //dump state combination into compressed form m_tempStateSignature.clear(); m_dfaSigCodec.start(m_tempStateSignature::add, nfaStateSet.getSize(), nfaStateSet.getRange()); nfaStateSet.dumpInOrder(this::_addNfaStateToSignatureCodec); m_dfaSigCodec.finish(); //make sure it's in the map Integer dfaStateNum = m_dfaStateSignatureMap.get(m_tempStateSignature); if (dfaStateNum == null) { dfaStateNum = m_dfaStateSignatures.size(); IntListKey newSig = new IntListKey(m_tempStateSignature); m_dfaStateSignatures.add(newSig); m_dfaStateSignatureMap.put(newSig, dfaStateNum); } return dfaStateNum; } @SuppressWarnings("unchecked") private DfaStateInfo _createStateInfo(IntListKey sig, List<NfaTransition> transitions) { //calculate the set of accepts m_tempResultSet.clear(); DfaStateSignatureCodec.expand(sig, nfastate -> { RESULT accept = m_nfa.getAccept(nfastate); if (accept != null) { m_tempResultSet.add(accept); } }); //and get an accept set index for it RESULT dfaAccept = null; if (m_tempResultSet.size() > 1) { dfaAccept = (RESULT) m_ambiguityResolver.apply(m_tempResultSet); } else if (!m_tempResultSet.isEmpty()) { dfaAccept = m_tempResultSet.iterator().next(); } int acceptSetIndex = 0; if (dfaAccept != null) { acceptSetIndex = computeIfAbsent(m_acceptSetMap, dfaAccept, keyset -> { m_acceptSets.add(keyset); return m_acceptSets.size() - 1; }); } return new DfaStateInfo(transitions, acceptSetIndex); } }
apache-2.0
emccode/ecs-cf-service-broker
src/main/java/com/emc/ecs/servicebroker/repository/ServiceInstanceBindingRepository.java
6740
package com.emc.ecs.servicebroker.repository; import com.emc.ecs.servicebroker.exception.EcsManagementClientException; import com.emc.ecs.servicebroker.service.s3.S3Service; import com.emc.ecs.servicebroker.model.Constants; import com.emc.object.s3.bean.GetObjectResult; import com.emc.object.s3.bean.ListObjectsResult; import com.emc.object.s3.bean.S3Object; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.deser.std.StdDeserializer; import com.fasterxml.jackson.databind.module.SimpleModule; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cloud.servicebroker.model.binding.SharedVolumeDevice; import org.springframework.cloud.servicebroker.model.binding.VolumeDevice; import org.springframework.cloud.servicebroker.model.binding.VolumeMount; import javax.annotation.PostConstruct; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.Map; import static java.lang.String.format; @SuppressWarnings("unused") public class ServiceInstanceBindingRepository { static final Logger logger = LoggerFactory.getLogger(ServiceInstanceBindingRepository.class); public static final String FILENAME_PREFIX = "service-instance-binding"; private final ObjectMapper objectMapper = new ObjectMapper(); { // NOTE -- ideally we would not need this code, but for now, the VolumeMount class has // custom serialization that is not matched with corresponding deserialization, so // deserializing serialized volume mounts doesn't work OOTB. SimpleModule module = new SimpleModule(); module.addDeserializer(VolumeMount.DeviceType.class, new DeviceTypeDeserializer()); module.addDeserializer(VolumeMount.Mode.class, new ModeDeserializer()); module.addDeserializer(VolumeDevice.class, new VolumeDeviceDeserializer()); objectMapper.registerModule(module); } @Autowired private S3Service s3; private static String getFilename(String id) { return FILENAME_PREFIX + "/" + id + ".json"; } private static boolean isCorrectFilename (String filename) { return filename.matches(FILENAME_PREFIX + "/.*\\.json"); } private ServiceInstanceBinding findByFilename(String filename) throws IOException { if (!isCorrectFilename(filename)) { String errorMessage = format("Invalid filename of service instance binding provided: %s", filename); throw new IOException(errorMessage); } logger.debug("Loading service instance binding from repository file {}", filename); GetObjectResult<InputStream> input = s3.getObject(filename); return objectMapper.readValue(input.getObject(), ServiceInstanceBinding.class); } ServiceInstanceBinding removeSecretCredentials(ServiceInstanceBinding binding) { Map<String, Object> credentials = binding.getCredentials(); credentials.remove(Constants.S3_URL); credentials.remove(Constants.CREDENTIALS_SECRET_KEY); binding.setCredentials(credentials); return binding; } @PostConstruct public void initialize() throws EcsManagementClientException { logger.info("Service binding file prefix: {}", FILENAME_PREFIX); } public void save(ServiceInstanceBinding binding) throws IOException { String filename = getFilename(binding.getBindingId()); String serialized = objectMapper.writeValueAsString(binding); s3.putObject(filename, serialized); } public ServiceInstanceBinding find(String id) throws IOException { String filename = getFilename(id); return findByFilename(filename); } public ListServiceInstanceBindingsResponse listServiceInstanceBindings(String marker, int pageSize) throws IOException { if (pageSize < 0) { throw new IOException("Page size could not be negative number"); } List<ServiceInstanceBinding> bindings = new ArrayList<>(); ListObjectsResult list = marker != null ? s3.listObjects(FILENAME_PREFIX + "/", getFilename(marker), pageSize) : s3.listObjects(FILENAME_PREFIX + "/", null, pageSize); for (S3Object s3Object: list.getObjects()) { String filename = s3Object.getKey(); if (isCorrectFilename(filename)) { ServiceInstanceBinding binding = findByFilename(filename); bindings.add(removeSecretCredentials(binding)); } } ListServiceInstanceBindingsResponse response = new ListServiceInstanceBindingsResponse(bindings); response.setMarker(list.getMarker()); response.setPageSize(list.getMaxKeys()); response.setNextMarker(list.getNextMarker()); return response; } public void delete(String id) { String filename = getFilename(id); s3.deleteObject(filename); } public static class ModeDeserializer extends StdDeserializer<VolumeMount.Mode> { ModeDeserializer() { this(null); } ModeDeserializer(Class<?> vc) { super(vc); } @Override public VolumeMount.Mode deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException { JsonNode node = jp.getCodec().readTree(jp); String s = node.asText(); if (s.equals("rw")) { return VolumeMount.Mode.READ_WRITE; } else { return VolumeMount.Mode.READ_ONLY; } } } public static class DeviceTypeDeserializer extends StdDeserializer<VolumeMount.DeviceType> { DeviceTypeDeserializer() { this(null); } DeviceTypeDeserializer(Class<?> vc) { super(vc); } @Override public VolumeMount.DeviceType deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException { return VolumeMount.DeviceType.SHARED; } } public static class VolumeDeviceDeserializer extends StdDeserializer<VolumeDevice> { VolumeDeviceDeserializer() { this(null); } VolumeDeviceDeserializer(Class<?> vc) { super(vc); } @Override public VolumeDevice deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException { return jp.getCodec().readValue(jp, SharedVolumeDevice.class); } } }
apache-2.0
kwakutwumasi/Quakearts-JSF-Webtools
qa-boot/src/main/java/com/quakearts/webapp/facelets/bootstrap/renderers/BootTableRenderer.java
15874
/******************************************************************************* * Copyright (C) 2016 Kwaku Twumasi-Afriyie <kwaku.twumasi@quakearts.com>. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Kwaku Twumasi-Afriyie <kwaku.twumasi@quakearts.com> - initial API and implementation ******************************************************************************/ package com.quakearts.webapp.facelets.bootstrap.renderers; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.faces.component.UIColumn; import javax.faces.component.UIComponent; import javax.faces.component.UIData; import javax.faces.context.FacesContext; import javax.faces.context.ResponseWriter; import com.quakearts.webapp.facelets.bootstrap.components.BootTable; import com.quakearts.webapp.facelets.bootstrap.renderkit.Attribute; import com.quakearts.webapp.facelets.bootstrap.renderkit.AttributeManager; import com.quakearts.webapp.facelets.bootstrap.renderkit.html_basic.HtmlBasicRenderer; import com.quakearts.webapp.facelets.util.UtilityMethods; import static com.quakearts.webapp.facelets.bootstrap.renderkit.RenderKitUtils.*; public class BootTableRenderer extends HtmlBasicRenderer { private static final Attribute[] ATTRIBUTES = AttributeManager.getAttributes(AttributeManager.Key.DATATABLE); @Override public void encodeBegin(FacesContext context, UIComponent component) throws IOException { if (!shouldEncode(component)) { return; } BootTable data = (BootTable) component; data.setRowIndex(-1); ResponseWriter writer = context.getResponseWriter(); writer.startElement("table", component); writer.writeAttribute("id", component.getClientId(context), "id"); String styleClass = data.get("styleClass"); writer.writeAttribute("class","table "+(styleClass !=null?" "+styleClass:""), "styleClass"); renderHTML5DataAttributes(context, component); renderPassThruAttributes(context, writer, component, ATTRIBUTES); writer.writeText("\n", component, null); UIComponent caption = getFacet(component, "caption"); if (caption != null) { String captionClass = data.get("captionClass"); String captionStyle = data.get("captionStyle"); writer.startElement("caption", component); if (captionClass != null) { writer.writeAttribute("class", captionClass, "captionClass"); } if (captionStyle != null) { writer.writeAttribute("style", captionStyle, "captionStyle"); } encodeRecursive(context, caption); writer.endElement("caption"); } UIComponent colGroups = getFacet(component, "colgroups"); if (colGroups != null) { encodeRecursive(context, colGroups); } BootMetaInfo info = getMetaInfo(context, component); UIComponent header = getFacet(component, "header"); if (header != null || info.hasHeaderFacets) { String headerClass = data.get("headerClass"); writer.startElement("thead", component); writer.writeText("\n", component, null); if (header != null) { writer.startElement("tr", header); writer.startElement("th", header); if (headerClass != null) { writer.writeAttribute("class", headerClass, "headerClass"); } if (info.columns.size() > 1) { writer.writeAttribute("colspan", String.valueOf(info.columns.size()), null); } writer.writeAttribute("scope", "colgroup", null); encodeRecursive(context, header); writer.endElement("th"); writer.endElement("tr"); writer.write("\n"); } if (info.hasHeaderFacets) { writer.startElement("tr", component); writer.writeText("\n", component, null); for (UIColumn column : info.columns) { String columnHeaderClass = info.getCurrentHeaderClass(); writer.startElement("th", column); if (columnHeaderClass != null) { writer.writeAttribute("class", columnHeaderClass, "columnHeaderClass"); } else if (headerClass != null) { writer.writeAttribute("class", headerClass, "headerClass"); } writer.writeAttribute("scope", "col", null); UIComponent facet = getFacet(column, "header"); if (facet != null) { encodeRecursive(context, facet); } writer.endElement("th"); writer.writeText("\n", component, null); } writer.endElement("tr"); writer.write("\n"); } writer.endElement("thead"); writer.writeText("\n", component, null); } } @Override public void encodeChildren(FacesContext context, UIComponent component) throws IOException { if (!shouldEncodeChildren(component)) { return; } UIData data = (UIData) component; ResponseWriter writer = context.getResponseWriter(); BootMetaInfo info = getMetaInfo(context, data); if(info.columns.isEmpty()) { writer.startElement("tbody", component); renderEmptyTableRow(writer, component); writer.endElement("tbody"); return; } int processed = 0; int rowIndex = data.getFirst() - 1; int rows = data.getRows(); List<Integer> bodyRows = getBodyRows(context.getExternalContext().getApplicationMap(), data); boolean hasBodyRows = (bodyRows != null && !bodyRows.isEmpty()); boolean wroteTableBody = false; if (!hasBodyRows) { writer.startElement("tbody", component); writer.writeText("\n", component, null); } boolean renderedRow = false; while (true) { if ((rows > 0) && (++processed > rows)) { break; } data.setRowIndex(++rowIndex); if (!data.isRowAvailable()) { break; } if (hasBodyRows && bodyRows.contains(data.getRowIndex())) { if (wroteTableBody) { writer.endElement("tbody"); } writer.startElement("tbody", data); wroteTableBody = true; } writer.startElement("tr", component); if (info.rowClasses.length > 0) { writer.writeAttribute("class", info.getCurrentRowClass(), "rowClasses"); } writer.writeText("\n", component, null); info.newRow(); for (UIColumn column : info.columns) { boolean isRowHeader = Boolean.TRUE.equals(column.getAttributes() .get("rowHeader")); if (isRowHeader) { writer.startElement("th", column); writer.writeAttribute("scope", "row", null); } else { writer.startElement("td", column); } String columnClass = info.getCurrentColumnClass(); if (columnClass != null) { writer.writeAttribute("class", columnClass, "columnClasses"); } for (Iterator<UIComponent> gkids = getChildren(column); gkids .hasNext();) { encodeRecursive(context, gkids.next()); } if (isRowHeader) { writer.endElement("th"); } else { writer.endElement("td"); } writer.writeText("\n", component, null); } writer.endElement("tr"); writer.write("\n"); renderedRow = true; } if(!renderedRow) { renderEmptyTableRow(writer, data); } writer.endElement("tbody"); writer.writeText("\n", component, null); data.setRowIndex(-1); } @Override public void encodeEnd(FacesContext context, UIComponent component) throws IOException { if (!shouldEncode(component)) { return; } ResponseWriter writer = context.getResponseWriter(); BootMetaInfo info = getMetaInfo(context, component); UIComponent footer = getFacet(component, "footer"); if (footer != null || info.hasFooterFacets) { String footerClass = (String) component.getAttributes().get("footerClass"); writer.startElement("tfoot", component); writer.writeText("\n", component, null); if (info.hasFooterFacets) { writer.startElement("tr", component); writer.writeText("\n", component, null); for (UIColumn column : info.columns) { String columnFooterClass = (String) column.getAttributes().get( "footerClass"); writer.startElement("td", column); if (columnFooterClass != null) { writer.writeAttribute("class", columnFooterClass, "columnFooterClass"); } else if (footerClass != null) { writer.writeAttribute("class", footerClass, "footerClass"); } UIComponent facet = getFacet(column, "footer"); if (facet != null) { encodeRecursive(context, facet); } writer.endElement("td"); writer.writeText("\n", component, null); } writer.endElement("tr"); writer.write("\n"); } if (footer != null) { writer.startElement("tr", footer); writer.startElement("td", footer); if (footerClass != null) { writer.writeAttribute("class", footerClass, "footerClass"); } if (info.columns.size() > 1) { writer.writeAttribute("colspan", String.valueOf(info.columns.size()), null); } encodeRecursive(context, footer); writer.endElement("td"); writer.endElement("tr"); writer.write("\n"); } writer.endElement("tfoot"); writer.writeText("\n", component, null); } clearMetaInfo(context, component); ((UIData) component).setRowIndex(-1); writer.endElement("table"); writer.writeText("\n", component, null); } private List<Integer> getBodyRows(Map<String, Object> appMap, UIData data) { List<Integer> result = null; String bodyRows = (String) data.getAttributes().get("bodyrows"); if (bodyRows != null) { String [] rows = UtilityMethods.split(appMap, bodyRows, ","); if (rows != null) { result = new ArrayList<Integer>(rows.length); for (String curRow : rows) { result.add(Integer.valueOf(curRow)); } } } return result; } private void renderEmptyTableRow(final ResponseWriter writer, final UIComponent component) throws IOException { writer.startElement("tr", component); writer.startElement("td", component); writer.endElement("td"); writer.endElement("tr"); } protected BootTableRenderer.BootMetaInfo getMetaInfo(FacesContext context, UIComponent table) { String key = createKey(table); Map<Object, Object> attributes = context.getAttributes(); BootMetaInfo info = (BootMetaInfo) attributes .get(key); if (info == null) { info = new BootMetaInfo(table); attributes.put(key, info); } return info; } protected void clearMetaInfo(FacesContext context, UIComponent table) { context.getAttributes().remove(createKey(table)); } protected String createKey(UIComponent table) { return BootMetaInfo.KEY + '_' + table.hashCode(); } private static class BootMetaInfo { private static final UIColumn PLACE_HOLDER_COLUMN = new UIColumn(); private static final String[] EMPTY_STRING_ARRAY = new String[0]; public static final String KEY = BootMetaInfo.class.getName(); public final String[] rowClasses; public final String[] columnClasses; public final String[] headerClasses; public final List<UIColumn> columns; public final boolean hasHeaderFacets; public final boolean hasFooterFacets; public final int columnCount; public int columnStyleCounter; public int headerStyleCounter; public int rowStyleCounter; public BootMetaInfo(UIComponent table) { rowClasses = getRowClasses(table); columnClasses = getColumnClasses(table); headerClasses = getHeaderClasses(table); columns = getColumns(table); columnCount = columns.size(); hasHeaderFacets = hasFacet("header", columns); hasFooterFacets = hasFacet("footer", columns); } public void newRow() { columnStyleCounter = 0; headerStyleCounter = 0; } public String getCurrentColumnClass() { String style = null; if (columnStyleCounter < columnClasses.length && columnStyleCounter <= columnCount) { style = columnClasses[columnStyleCounter++]; } return ((style != null && style.length() > 0) ? style : null); } public String getCurrentHeaderClass() { String style = null; if (headerStyleCounter < headerClasses.length && headerStyleCounter <= columnCount) { style = headerClasses[headerStyleCounter++]; } return ((style != null && style.length() > 0) ? style : null); } public String getCurrentRowClass() { String style = rowClasses[rowStyleCounter++]; if (rowStyleCounter >= rowClasses.length) { rowStyleCounter = 0; } return style; } private static String[] getColumnClasses(UIComponent table) { String values = ((BootTable) table).get("columnClasses"); if (values == null) { return EMPTY_STRING_ARRAY; } Map<String, Object> appMap = FacesContext.getCurrentInstance() .getExternalContext().getApplicationMap(); return UtilityMethods.split(appMap, values.trim(), ","); } private static String[] getHeaderClasses(UIComponent table) { String values = ((BootTable) table).get("headerClasses"); if (values == null) { return EMPTY_STRING_ARRAY; } Map<String, Object> appMap = FacesContext.getCurrentInstance() .getExternalContext().getApplicationMap(); return UtilityMethods.split(appMap, values.trim(), ","); } private static List<UIColumn> getColumns(UIComponent table) { if (table instanceof UIData) { int childCount = table.getChildCount(); if (childCount > 0) { List<UIColumn> results = new ArrayList<UIColumn>(childCount); for (UIComponent kid : table.getChildren()) { if ((kid instanceof UIColumn) && kid.isRendered()) { results.add((UIColumn) kid); } } return results; } else { return Collections.emptyList(); } } else { int count; Object value = table.getAttributes().get("columns"); if ((value != null) && (value instanceof Integer)) { count = ((Integer) value); } else { count = 2; } if (count < 1) { count = 1; } List<UIColumn> result = new ArrayList<UIColumn>(count); for (int i = 0; i < count; i++) { result.add(PLACE_HOLDER_COLUMN); } return result; } } private static boolean hasFacet(String name, List<UIColumn> columns) { if (!columns.isEmpty()) { for (UIColumn column : columns) { if (column.getFacetCount() > 0) { if (column.getFacets().containsKey(name)) { return true; } } } } return false; } private static String[] getRowClasses(UIComponent table) { String values = ((BootTable) table).get("rowClasses"); if (values == null) { return (EMPTY_STRING_ARRAY); } Map<String, Object> appMap = FacesContext.getCurrentInstance() .getExternalContext().getApplicationMap(); return UtilityMethods.split(appMap, values.trim(), ","); } } }
apache-2.0
PUPInitiative/pup-code-poc
pup-code-domain/src/test/java/info/pupcode/model/repo/test/AbstractConcordionFixture.java
658
package info.pupcode.model.repo.test; import org.junit.Before; import org.springframework.context.support.ClassPathXmlApplicationContext; /** * Created by fabientronche1 on 08.11.15. */ public class AbstractConcordionFixture { protected SpringConfigTest springConfigTest; protected ClassPathXmlApplicationContext applicationContext; @Before public void setUp() { applicationContext = new ClassPathXmlApplicationContext("classpath*:applicationContext.xml"); if (springConfigTest == null) { springConfigTest = (SpringConfigTest) applicationContext.getBean(SpringConfigTest.class.getName()); } } }
apache-2.0
ayaseruri/luxunPro
app/src/main/java/pro/luxun/luxunanimation/presenter/adapter/BaseRecyclerAdapter.java
1444
package pro.luxun.luxunanimation.presenter.adapter; import android.support.annotation.UiThread; import android.support.v7.widget.RecyclerView; import android.view.View; import android.view.ViewGroup; import java.util.ArrayList; import java.util.List; /** * Created by wufeiyang on 16/5/7. */ public abstract class BaseRecyclerAdapter<T, V extends View> extends RecyclerView.Adapter<BaseRecyclerAdapter.BaseViewHolder<V>> { protected List<T> mItems = new ArrayList<>(); @Override public BaseViewHolder<V> onCreateViewHolder(ViewGroup parent, int viewType) { return new BaseViewHolder<>(onCreateItemView(parent, viewType)); } @Override public int getItemCount() { return mItems.size(); } @UiThread public void refresh(List<T> datas){ mItems.clear(); add(datas); } @UiThread public void add(List<T> datas){ mItems.addAll(datas); notifyDataSetChanged(); } @Override public void onBindViewHolder(BaseViewHolder holder, int position) { V v = (V) holder.itemView; onBindView(v, mItems.get(position)); } protected abstract V onCreateItemView(ViewGroup parent, int viewType); protected abstract void onBindView(V v, T t); public static class BaseViewHolder<V extends View> extends RecyclerView.ViewHolder{ public BaseViewHolder(V itemView) { super(itemView); } } }
apache-2.0
bingoogolapple/J2EENote
experiment/src/cn/xmut/experiment/service/impl/ExperimentServiceImpl.java
1988
package cn.xmut.experiment.service.impl; import java.util.List; import org.apache.commons.fileupload.FileItem; import cn.xmut.experiment.dao.IExperimentDao; import cn.xmut.experiment.dao.impl.jdbc.ExperimentDaoImpl; import cn.xmut.experiment.domain.Experiment; import cn.xmut.experiment.domain.ShowExperiment; import cn.xmut.experiment.service.IExperimentService; public class ExperimentServiceImpl implements IExperimentService { IExperimentDao experimentDao = new ExperimentDaoImpl(); public boolean addExperiment(Experiment experiment, String docName,String dirPath, FileItem fileItem) { return experimentDao.addExperiment(experiment, docName, dirPath, fileItem); } public boolean updateExperiment(Experiment experiment) { return experimentDao.updateExperiment(experiment); } public String getDocPath(int experimentId) { return experimentDao.getDocPath(experimentId); } public Experiment getExperiment(int experimentId) { return experimentDao.getExperiment(experimentId); } public List<ShowExperiment> queryPass(Experiment experiment) { return experimentDao.queryPass(experiment); } public List<ShowExperiment> queryNodistribute(Experiment experiment) { return experimentDao.queryNodistribute(experiment); } public List<ShowExperiment> expertQueryNoExtimate(Experiment experiment, String expertId) { return experimentDao.expertQueryNoExtimate(experiment, expertId); } public List<ShowExperiment> managerQueryNoExtimate(Experiment experiment) { return experimentDao.managerQueryNoExtimate(experiment); } public List<ShowExperiment> managerQueryNoPass(Experiment experiment) { return experimentDao.managerQueryNoPass(experiment); } public boolean delExperiment(Experiment experiment) { return experimentDao.delExperiment(experiment); } public List<ShowExperiment> headmanQueryNoPass(Experiment experiment) { return experimentDao.headmanQueryNoPass(experiment); } }
apache-2.0
HuangLS/neo4j
community/primitive-collections/src/main/java/org/neo4j/collection/primitive/PrimitiveLongIntVisitor.java
1354
/* * Copyright (c) 2002-2018 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.collection.primitive; public interface PrimitiveLongIntVisitor<E extends Exception> { /** * Visit the given entry. * * @param key The key of the entry. * @param value The value of the entry. * @return 'true' to signal that the iteration should be stopped, 'false' to signal that the iteration should * continue if there are more entries to look at. * @throws E any thrown exception of type 'E' will bubble up through the 'visit' method. */ boolean visited( long key, int value ) throws E; }
apache-2.0
anylineorg/anyline
anyline-core/src/main/java/org/anyline/entity/DataSet.java
115477
package org.anyline.entity; import com.fasterxml.jackson.databind.JsonNode; import org.anyline.util.*; import org.anyline.util.regular.Regular; import org.anyline.util.regular.RegularUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.Serializable; import java.math.BigDecimal; import java.util.*; public class DataSet implements Collection<DataRow>, Serializable { private static final long serialVersionUID = 6443551515441660101L; protected static final Logger log = LoggerFactory.getLogger(DataSet.class); private boolean result = true; // 执行结果 private Exception exception = null; // 异常 private String message = null; // 提示信息 private PageNavi navi = null; // 分页 private List<String> head = null; // 表头 private List<DataRow> rows = null; // 数据 private List<String> primaryKeys = null; // 主键 private String datalink = null; // 数据连接 private String dataSource = null; // 数据源(表|视图|XML定义SQL) private String schema = null; private String table = null; private long createTime = 0; //创建时间 private long expires = -1; //过期时间(毫秒) 从创建时刻计时expires毫秒后过期 private boolean isFromCache = false; //是否来自缓存 private boolean isAsc = false; private boolean isDesc = false; private Map<String, Object> queryParams = new HashMap<String, Object>();//查询条件 /** * 创建索引 * * @param key key * @return return * crateIndex("ID"); * crateIndex("ID:ASC"); */ public DataSet creatIndex(String key) { return this; } public DataSet() { rows = new ArrayList<DataRow>(); createTime = System.currentTimeMillis(); } public DataSet(List<Map<String, Object>> list) { rows = new ArrayList<DataRow>(); if (null == list) return; for (Map<String, Object> map : list) { DataRow row = new DataRow(map); rows.add(row); } } public static DataSet build(Collection<?> list, String ... fields) { return parse(list, fields); } /** * list解析成DataSet * @param list list * @param fields 如果list是二维数据 * fields 下标对应的属性(字段/key)名称 如"ID","CODE","NAME" * 如果不输入则以下标作为DataRow的key 如row.put("0","100").put("1","A01").put("2","张三"); * 如果属性数量超出list长度,取null值存入DataRow * * 如果list是一组数组 * fileds对应条目的属性值 如果不输入 则以条目的属性作DataRow的key 如"USER_ID:id","USER_NM:name" * * @return DataSet */ public static DataSet parse(Collection<?> list, String ... fields) { DataSet set = new DataSet(); if (null != list) { for (Object obj : list) { DataRow row = null; if(obj instanceof Collection){ row = DataRow.parseList((Collection)obj, fields); }else { row = DataRow.parse(obj, fields); } set.add(row); } } return set; } public static DataSet parseJson(DataRow.KEY_CASE keyCase, String json) { if (null != json) { try { return parseJson(keyCase, BeanUtil.JSON_MAPPER.readTree(json)); } catch (Exception e) { } } return null; } public static DataSet parseJson(String json) { return parseJson(DataRow.KEY_CASE.CONFIG, json); } public static DataSet parseJson(DataRow.KEY_CASE keyCase, JsonNode json) { DataSet set = new DataSet(); if (null != json) { if (json.isArray()) { Iterator<JsonNode> items = json.iterator(); while (items.hasNext()) { JsonNode item = items.next(); set.add(DataRow.parseJson(keyCase, item)); } } } return set; } public static DataSet parseJson(JsonNode json) { return parseJson(DataRow.KEY_CASE.CONFIG, json); } public DataSet Camel(){ for(DataRow row:rows){ row.Camel(); } return this; } public DataSet camel(){ for(DataRow row:rows){ row.camel(); } return this; } public DataSet setIsNew(boolean bol) { for (DataRow row : rows) { row.setIsNew(bol); } return this; } /** * 移除每个条目中指定的key * * @param keys keys * @return DataSet */ public DataSet remove(String... keys) { for (DataRow row : rows) { for (String key : keys) { row.remove(key); } } return this; } public DataSet trim(){ for(DataRow row:rows){ row.trim(); } return this; } /** * 添加主键 * * @param applyItem 是否应用到集合中的DataRow 默认true * @param pks pks * @return return */ public DataSet addPrimaryKey(boolean applyItem, String... pks) { if (null != pks) { List<String> list = new ArrayList<>(); for (String pk : pks) { list.add(pk); } addPrimaryKey(applyItem, list); } return this; } public DataSet addPrimaryKey(String... pks) { return addPrimaryKey(true, pks); } public DataSet addPrimaryKey(boolean applyItem, Collection<String> pks) { if (null == primaryKeys) { primaryKeys = new ArrayList<>(); } if (null == pks) { return this; } for (String pk : pks) { if (BasicUtil.isEmpty(pk)) { continue; } pk = key(pk); if (!primaryKeys.contains(pk)) { primaryKeys.add(pk); } } if (applyItem) { for (DataRow row : rows) { row.setPrimaryKey(false, primaryKeys); } } return this; } public DataSet addPrimaryKey(Collection<String> pks) { return addPrimaryKey(true, pks); } /** * 设置主键 * * @param applyItem applyItem * @param pks pks * @return return */ public DataSet setPrimaryKey(boolean applyItem, String... pks) { if (null != pks) { List<String> list = new ArrayList<>(); for (String pk : pks) { list.add(pk); } setPrimaryKey(applyItem, list); } return this; } public DataSet setPrimaryKey(String... pks) { return setPrimaryKey(true, pks); } public DataSet setPrimaryKey(boolean applyItem, Collection<String> pks) { if (null == pks) { return this; } this.primaryKeys = new ArrayList<>(); addPrimaryKey(applyItem, pks); return this; } public DataSet setPrimaryKey(Collection<String> pks) { return setPrimaryKey(true, pks); } public DataSet set(int index, DataRow item) { rows.set(index, item); return this; } /** * 是否有主键 * * @return return */ public boolean hasPrimaryKeys() { if (null != primaryKeys && primaryKeys.size() > 0) { return true; } else { return false; } } /** * 提取主键 * * @return return */ public List<String> getPrimaryKeys() { if (null == primaryKeys) { primaryKeys = new ArrayList<>(); } return primaryKeys; } /** * 添加表头 * * @param col col * @return return */ public DataSet addHead(String col) { if (null == head) { head = new ArrayList<>(); } if ("ROW_NUMBER".equals(col)) { return this; } if (head.contains(col)) { return this; } head.add(col); return this; } /** * 表头 * * @return return */ public List<String> getHead() { return head; } public int indexOf(Object obj) { return rows.indexOf(obj); } /** * 从begin开始截断到end,方法执行将改变原DataSet长度 * * @param begin 开始位置 * @param end 结束位置 * @return DataSet */ public DataSet truncates(int begin, int end) { if (!rows.isEmpty()) { if (begin < 0) { begin = 0; } if (end >= rows.size()) { end = rows.size() - 1; } if (begin >= rows.size()) { begin = rows.size() - 1; } if (end <= 0) { end = 0; } rows = rows.subList(begin, end); } return this; } /** * 从begin开始截断到最后一个 * * @param begin 开始位置 * @return DataSet */ public DataSet truncates(int begin) { if (begin < 0) { begin = rows.size() + begin; int end = rows.size() - 1; return truncates(begin, end); } else { return truncates(begin, rows.size() - 1); } } /** * 从begin开始截断到最后一个并返回其中第一个DataRow * * @param begin 开始位置 * @return DataRow */ public DataRow truncate(int begin) { return truncate(begin, rows.size() - 1); } /** * 从begin开始截断到end位置并返回其中第一个DataRow * * @param begin 开始位置 * @param end 结束位置 * @return DataRow */ public DataRow truncate(int begin, int end) { truncates(begin, end); if (rows.size() > 0) { return rows.get(0); } else { return null; } } /** * 从begin开始截取到最后一个 * * @param begin 开始位置 * 如果输入负数则取后n个,如果造成数量不足,则取全部 * @return DataSet */ public DataSet cuts(int begin) { if (begin < 0) { begin = rows.size() + begin; int end = rows.size() - 1; return cuts(begin, end); } else { return cuts(begin, rows.size() - 1); } } /** * 从begin开始截取到end位置,方法执行时会创建新的DataSet并不改变原有set长度 * * @param begin 开始位置 * @param end 结束位置 * @return DataSet */ public DataSet cuts(int begin, int end) { DataSet result = new DataSet(); if (rows.isEmpty()) { return result; } if (begin < 0) { begin = 0; } if (end >= rows.size()) { end = rows.size() - 1; } if (begin >= rows.size()) { begin = rows.size() - 1; } if (end <= 0) { end = 0; } for (int i = begin; i <= end; i++) { result.add(rows.get(i)); } return result; } /** * 从begin开始截取到最后一个,并返回其中第一个DataRow * * @param begin 开始位置 * @return DataSet */ public DataRow cut(int begin) { return cut(begin, rows.size() - 1); } /** * 从begin开始截取到end位置,并返回其中第一个DataRow,方法执行时会创建新的DataSet并不改变原有set长度 * * @param begin 开始位置 * @param end 结束位置 * @return DataSet */ public DataRow cut(int begin, int end) { DataSet result = cuts(begin, end); if (result.size() > 0) { return result.getRow(0); } return null; } /** * 记录数量 * * @return return */ public int size() { int result = 0; if (null != rows) result = rows.size(); return result; } public int getSize() { return size(); } /** * 是否出现异常 * * @return return */ public boolean isException() { return null != exception; } public boolean isFromCache() { return isFromCache; } public DataSet setIsFromCache(boolean bol) { this.isFromCache = bol; return this; } /** * 返回数据是否为空 * * @return return */ public boolean isEmpty() { boolean result = true; if (null == rows) { result = true; } else if (rows instanceof Collection) { result = ((Collection<?>) rows).isEmpty(); } return result; } /** * 读取一行数据 * * @param index index * @return return */ public DataRow getRow(int index) { DataRow row = null; if (null != rows && index < rows.size()) { row = rows.get(index); } if (null != row) { row.setContainer(this); } return row; } public boolean exists(String ... params){ DataRow row = getRow(0, params); return row != null; } public DataRow getRow(String... params) { return getRow(0, params); } public DataRow getRow(DataRow params) { return getRow(0, params); } public DataRow getRow(List<String> params) { String[] kvs = BeanUtil.list2array(params); return getRow(0, kvs); } public DataRow getRow(int begin, String... params) { DataSet set = getRows(begin, 1, params); if (set.size() > 0) { return set.getRow(0); } return null; } public DataRow getRow(int begin, DataRow params) { DataSet set = getRows(begin, 1, params); if (set.size() > 0) { return set.getRow(0); } return null; } /** * 根据keys去重 * * @param keys keys * @return DataSet */ public DataSet distinct(String... keys) { DataSet result = new DataSet(); if (null != rows) { int size = rows.size(); for (int i = 0; i < size; i++) { DataRow row = rows.get(i); //查看result中是否已存在 String[] params = packParam(row, keys); if (result.getRow(params) == null) { DataRow tmp = new DataRow(); for (String key : keys) { tmp.put(key, row.get(key)); } result.addRow(tmp); } } } result.cloneProperty(this); return result; } public DataSet distinct(List<String> keys) { DataSet result = new DataSet(); if (null != rows) { for (DataRow row:rows) { //查看result中是否已存在 String[] params = packParam(row, keys); if (result.getRow(params) == null) { DataRow tmp = new DataRow(); for (String key : keys) { tmp.put(key, row.get(key)); } result.addRow(tmp); } } } result.cloneProperty(this); return result; } public Object clone() { DataSet set = new DataSet(); List<DataRow> rows = new ArrayList<DataRow>(); for (DataRow row : this.rows) { rows.add((DataRow) row.clone()); } set.setRows(rows); set.cloneProperty(this); return set; } private DataSet cloneProperty(DataSet from) { return cloneProperty(from, this); } public static DataSet cloneProperty(DataSet from, DataSet to) { if (null != from && null != to) { to.exception = from.exception; to.message = from.message; to.navi = from.navi; to.head = from.head; to.primaryKeys = from.primaryKeys; to.dataSource = from.dataSource; to.datalink = from.datalink; to.schema = from.schema; to.table = from.table; } return to; } /** * 指定key转换成number * @param keys keys * @return DataRow */ public DataSet convertNumber(String ... keys){ if(null != keys) { for(DataRow row:rows){ row.convertNumber(keys); } } return this; } public DataSet convertString(String ... keys){ if(null != keys) { for(DataRow row:rows){ row.convertString(keys); } } return this; } public DataSet skip(boolean skip){ for(DataRow row:rows){ row.skip = skip; } return this; } /** * 筛选符合条件的集合 * 注意如果String类型 1与1.0比较不相等, 可以先调用convertNumber转换一下数据类型 * @param params key1,value1,key2:value2,key3,value3 * "NM:zh%","AGE:&gt;20","NM","%zh%" * @param begin begin * @param qty 最多筛选多少个 0表示不限制 * @return return */ public DataSet getRows(int begin, int qty, String... params) { DataSet set = new DataSet(); Map<String, String> kvs = new HashMap<String, String>(); int len = params.length; int i = 0; String srcFlagTag = "srcFlag"; //参数含有{}的 在kvs中根据key值+tag 放入一个新的键值对,如时间格式TIME:{10:10} while (i < len) { String p1 = params[i]; if (BasicUtil.isEmpty(p1)) { i++; continue; } else if (p1.contains(":")) { String ks[] = BeanUtil.parseKeyValue(p1); kvs.put(ks[0], ks[1]); i++; continue; } else { if (i + 1 < len) { String p2 = params[i + 1]; if (BasicUtil.isEmpty(p2) || !p2.contains(":")) { kvs.put(p1, p2); i += 2; continue; } else if (p2.startsWith("{") && p2.endsWith("}")) { p2 = p2.substring(1, p2.length() - 1); kvs.put(p1, p2); kvs.put(p1 + srcFlagTag, "true"); i += 2; continue; } else { String ks[] = BeanUtil.parseKeyValue(p2); kvs.put(ks[0], ks[1]); i += 2; continue; } } } i++; } return getRows(begin, qty, kvs); } public DataSet getRows(int begin, int qty, DataRow kvs) { Map<String,String> map = new HashMap<String,String>(); for(String k:kvs.keySet()){ map.put(k, kvs.getString(k)); } return getRows(begin, qty, map); } public DataSet getRows(int begin, int qty, Map<String, String> kvs) { DataSet set = new DataSet(); String srcFlagTag = "srcFlag"; //参数含有{}的 在kvs中根据key值+tag 放入一个新的键值对 BigDecimal d1; BigDecimal d2; for (DataRow row:rows) { if(row.skip){ continue; } boolean chk = true;//对比结果 for (String k : kvs.keySet()) { boolean srcFlag = false; if (k.endsWith(srcFlagTag)) { continue; } else { String srcFlagValue = kvs.get(k + srcFlagTag); if (BasicUtil.isNotEmpty(srcFlagValue)) { srcFlag = true; } } String v = kvs.get(k); Object value = row.get(k); if(!row.containsKey(k) && null == value){ //注意这里有可能是个复合key chk = false; break; } if (null == v) { if (null != value) { chk = false; break; }else{ continue; } } else { if (null == value) { chk = false; break; } //与SQL.COMPARE_TYPE保持一致 int compare = 10; if (v.startsWith("=")) { compare = 10; v = v.substring(1); } else if (v.startsWith(">")) { compare = 20; v = v.substring(1); } else if (v.startsWith(">=")) { compare = 21; v = v.substring(2); } else if (v.startsWith("<")) { compare = 30; v = v.substring(1); } else if (v.startsWith("<=")) { compare = 31; v = v.substring(2); } else if (v.startsWith("%") && v.endsWith("%")) { compare = 50; v = v.substring(1, v.length() - 1); } else if (v.endsWith("%")) { compare = 51; v = v.substring(0, v.length() - 1); } else if (v.startsWith("%")) { compare = 52; v = v.substring(1); } if(compare <= 31 && value instanceof Number) { try { d1 = new BigDecimal(value.toString()); d2 = new BigDecimal(v); int cr = d1.compareTo(d2); if (compare == 10) { if (cr != 0) { chk = false; break; } } else if (compare == 20) { if (cr <= 0) { chk = false; break; } } else if (compare == 21) { if (cr < 0) { chk = false; break; } } else if (compare == 30) { if (cr >= 0) { chk = false; break; } } else if (compare == 31) { if (cr > 0) { chk = false; break; } } }catch (NumberFormatException e){ chk = false; break; } } String str = value + ""; str = str.toLowerCase(); v = v.toLowerCase(); if (srcFlag) { v = "{" + v + "}"; } if (compare == 10) { if (!v.equals(str)) { chk = false; break; } } else if (compare == 50) { if (!str.contains(v)) { chk = false; break; } } else if (compare == 51) { if (!str.startsWith(v)) { chk = false; break; } } else if (compare == 52) { if (!str.endsWith(v)) { chk = false; break; } } } }//end for kvs if (chk) { set.add(row); if (qty > 0 && set.size() >= qty) { break; } } }//end for rows set.cloneProperty(this); return set; } public DataSet getRows(int begin, String... params) { return getRows(begin, -1, params); } public DataSet getRows(String... params) { return getRows(0, params); } public DataSet getRows(DataSet set, String key) { String kvs[] = new String[set.size()]; int i = 0; for (DataRow row : set) { String value = row.getString(key); if (BasicUtil.isNotEmpty(value)) { kvs[i++] = key + ":" + value; } } return getRows(kvs); } public DataSet getRows(DataRow row, String... keys) { List<String> list = new ArrayList<>(); int i = 0; for (String key : keys) { String value = row.getString(key); if (BasicUtil.isNotEmpty(value)) { list.add(key + ":" + value); } } String[] kvs = BeanUtil.list2array(list); return getRows(kvs); } /** * 数字格式化 * * @param format format * @param cols cols * @return return */ public DataSet formatNumber(String format, String... cols) { if (null == cols || BasicUtil.isEmpty(format)) { return this; } int size = size(); for (int i = 0; i < size; i++) { DataRow row = getRow(i); row.formatNumber(format, cols); } return this; } public DataSet numberFormat(String target, String key, String format){ for(DataRow row: rows){ numberFormat(target, key, format); } return this; } public DataSet numberFormat(String key, String format){ return numberFormat(key, key, format); } /** * 日期格式化 * * @param format format * @param cols cols * @return return */ public DataSet formatDate(String format, String... cols) { if (null == cols || BasicUtil.isEmpty(format)) { return this; } int size = size(); for (int i = 0; i < size; i++) { DataRow row = getRow(i); row.formatDate(format, cols); } return this; } public DataSet dateFormat(String target, String key, String format){ for(DataRow row: rows){ dateFormat(target, key, format); } return this; } public DataSet dateFormat(String key, String format){ return dateFormat(key, key, format); } /** * 提取符合指定属性值的集合 * * @param begin begin * @param end end * @param key key * @param value value * @return return */ public DataSet filter(int begin, int end, String key, String value) { DataSet set = new DataSet(); String tmpValue; int size = size(); if (begin < 0) { begin = 0; } for (int i = begin; i < size && i <= end; i++) { tmpValue = getString(i, key, ""); if ((null == value && null == tmpValue) || (null != value && value.equals(tmpValue))) { set.add(getRow(i)); } } set.cloneProperty(this); return set; } public DataSet getRows(int fr, int to) { DataSet set = new DataSet(); int size = this.size(); if (fr < 0) { fr = 0; } for (int i = fr; i < size && i <= to; i++) { set.addRow(getRow(i)); } return set; } /** * 合计 * @param begin 开始 * @param end 结束 * @param key key * @return BigDecimal */ public BigDecimal sum(int begin, int end, String key) { BigDecimal result = BigDecimal.ZERO; int size = rows.size(); if (begin <= 0) { begin = 0; } for (int i = begin; i < size && i <= end; i++) { BigDecimal tmp = getDecimal(i, key, 0); if (null != tmp) { result = result.add(getDecimal(i, key, 0)); } } return result; } public BigDecimal sum(String key) { BigDecimal result = BigDecimal.ZERO; result = sum(0, size() - 1, key); return result; } /** * 多列合计 * @param result 保存合计结果 * @param keys keys * @return DataRow */ public DataRow sums(DataRow result, String... keys) { if(null == result){ result = new DataRow(); } if (size() > 0) { if (null != keys) { for (String key : keys) { result.put(key, sum(key)); } } else { List<String> numberKeys = getRow(0).numberKeys(); for (String key : numberKeys) { result.put(key, sum(key)); } } } return result; } public DataRow sums(String... keys) { return sums(new DataRow(), keys); } /** * 多列平均值 * * @param result 保存合计结果 * @param keys keys * @return DataRow */ public DataRow avgs(DataRow result, String... keys) { if(null == result){ result = new DataRow(); } if (size() > 0) { if (null != keys) { for (String key : keys) { result.put(key, avg(key)); } } else { List<String> numberKeys = getRow(0).numberKeys(); for (String key : numberKeys) { result.put(key, avg(key)); } } } return result; } public DataRow avgs(String... keys) { return avgs(new DataRow(), keys); } /** * 多列平均值 * @param result 保存合计结果 * @param scale scale * @param round round * @param keys keys * @return DataRow */ public DataRow avgs(DataRow result, int scale, int round, String... keys) { if(null == result){ result = new DataRow(); } if (size() > 0) { if (null != keys) { for (String key : keys) { result.put(key, avg(key, scale, round)); } } else { List<String> numberKeys = getRow(0).numberKeys(); for (String key : numberKeys) { result.put(key, avg(key, scale, round)); } } } return result; } public DataRow avgs(int scale, int round, String... keys) { return avgs(new DataRow(), scale, round, keys); } /** * 最大值 * * @param top 多少行 * @param key key * @return return */ public BigDecimal maxDecimal(int top, String key) { BigDecimal result = null; int size = rows.size(); if (size > top) { size = top; } for (int i = 0; i < size; i++) { BigDecimal tmp = getDecimal(i, key, 0); if (null != tmp && (null == result || tmp.compareTo(result) > 0)) { result = tmp; } } return result; } public BigDecimal maxDecimal(String key) { return maxDecimal(size(), key); } public int maxInt(int top, String key) { BigDecimal result = maxDecimal(top, key); if (null == result) { return 0; } return result.intValue(); } public int maxInt(String key) { return maxInt(size(), key); } public double maxDouble(int top, String key) { BigDecimal result = maxDecimal(top, key); if (null == result) { return 0; } return result.doubleValue(); } public double maxDouble(String key) { return maxDouble(size(), key); } // public BigDecimal max(int top, String key){ // BigDecimal result = maxDecimal(top, key); // return result; // } // public BigDecimal max(String key){ // return maxDecimal(size(), key); // } /** * 最小值 * * @param top 多少行 * @param key key * @return return */ public BigDecimal minDecimal(int top, String key) { BigDecimal result = null; int size = rows.size(); if (size > top) { size = top; } for (int i = 0; i < size; i++) { BigDecimal tmp = getDecimal(i, key, 0); if (null != tmp && (null == result || tmp.compareTo(result) < 0)) { result = tmp; } } return result; } public BigDecimal minDecimal(String key) { return minDecimal(size(), key); } public int minInt(int top, String key) { BigDecimal result = minDecimal(top, key); if (null == result) { return 0; } return result.intValue(); } public int minInt(String key) { return minInt(size(), key); } public double minDouble(int top, String key) { BigDecimal result = minDecimal(top, key); if (null == result) { return 0; } return result.doubleValue(); } public double minDouble(String key) { return minDouble(size(), key); } // public BigDecimal min(int top, String key){ // BigDecimal result = minDecimal(top, key); // return result; // } // public BigDecimal min(String key){ // return minDecimal(size(), key); // } /** * key对应的value最大的一行 * * @param key key * @return return */ public DataRow max(String key) { int size = size(); if (size == 0) { return null; } DataRow row = null; if (isAsc) { row = getRow(size - 1); } else if (isDesc) { row = getRow(0); } else { asc(key); row = getRow(size - 1); } return row; } public DataRow min(String key) { int size = size(); if (size == 0) { return null; } DataRow row = null; if (isAsc) { row = getRow(0); } else if (isDesc) { row = getRow(size - 1); } else { asc(key); row = getRow(0); } return row; } /** * 平均值 空数据不参与加法但参与除法 * * @param top 多少行 * @param key key * @param scale scale * @param round round * @return return */ public BigDecimal avg(int top, String key, int scale, int round) { BigDecimal result = BigDecimal.ZERO; int size = rows.size(); if (size > top) { size = top; } int count = 0; for (int i = 0; i < size; i++) { BigDecimal tmp = getDecimal(i, key, 0); if (null != tmp) { result = result.add(tmp); } count++; } if (count > 0) { result = result.divide(new BigDecimal(count), scale, round); } return result; } public BigDecimal avg(String key, int scale, int round) { BigDecimal result = avg(size(), key, scale ,round); return result; } public BigDecimal avg(String key) { BigDecimal result = avg(size(), key, 2, BigDecimal.ROUND_HALF_UP); return result; } public DataSet addRow(DataRow row) { if (null != row) { rows.add(row); } return this; } public DataSet addRow(int idx, DataRow row) { if (null != row) { rows.add(idx, row); } return this; } /** * 合并key例的值 以connector连接 * * @param key key * @param connector connector * @return return v1,v2,v3 */ public String concat(String key, String connector) { return BasicUtil.concat(getStrings(key), connector); } public String concatNvl(String key, String connector) { return BasicUtil.concat(getNvlStrings(key), connector); } /** * 合并key例的值 以connector连接(不取null值) * * @param key key * @param connector connector * @return return v1,v2,v3 */ public String concatWithoutNull(String key, String connector) { return BasicUtil.concat(getStringsWithoutNull(key), connector); } /** * 合并key例的值 以connector连接(不取空值) * * @param key key * @param connector connector * @return return v1,v2,v3 */ public String concatWithoutEmpty(String key, String connector) { return BasicUtil.concat(getStringsWithoutEmpty(key), connector); } public String concatNvl(String key) { return BasicUtil.concat(getNvlStrings(key), ","); } public String concatWithoutNull(String key) { return BasicUtil.concat(getStringsWithoutNull(key), ","); } public String concatWithoutEmpty(String key) { return BasicUtil.concat(getStringsWithoutEmpty(key), ","); } public String concat(String key) { return BasicUtil.concat(getStrings(key), ","); } /** * 提取单列值 * * @param key key * @return return */ public List<Object> fetchValues(String key) { List<Object> result = new ArrayList<Object>(); for (int i = 0; i < size(); i++) { result.add(get(i, key)); } return result; } /** * 取单列不重复的值 * * @param key key * @return return */ public List<String> fetchDistinctValue(String key) { List<String> result = new ArrayList<>(); for (int i = 0; i < size(); i++) { String value = getString(i, key, ""); if (result.contains(value)) { continue; } result.add(value); } return result; } public List<String> fetchDistinctValues(String key) { return fetchDistinctValue(key); } /** * 分页 * * @param link link * @return return */ public String displayNavi(String link) { String result = ""; if (null != navi) { result = navi.getHtml(); } return result; } public String navi(String link) { return displayNavi(link); } public String displayNavi() { return displayNavi(null); } public String navi() { return displayNavi(null); } public DataSet put(int idx, String key, Object value) { DataRow row = getRow(idx); if (null != row) { row.put(key, value); } return this; } public DataSet removes(String... keys) { for (DataRow row : rows) { row.removes(keys); } return this; } /** * String * * @param index index * @param key key * @return String * @throws Exception Exception */ public String getString(int index, String key) throws Exception { return getRow(index).getString(key); } public String getString(int index, String key, String def) { try { return getString(index, key); } catch (Exception e) { return def; } } public String getString(String key) throws Exception { return getString(0, key); } public String getString(String key, String def) { return getString(0, key, def); } public Object get(int index, String key) { DataRow row = getRow(index); if (null != row) { return row.get(key); } return null; } public List<Object> gets(String key) { List<Object> list = new ArrayList<Object>(); for (DataRow row : rows) { list.add(row.getString(key)); } return list; } public List<DataSet> getSets(String key) { List<DataSet> list = new ArrayList<DataSet>(); for (DataRow row : rows) { DataSet set = row.getSet(key); if (null != set) { list.add(set); } } return list; } public List<String> getStrings(String key) { List<String> result = new ArrayList<>(); for (DataRow row : rows) { result.add(row.getString(key)); } return result; } public List<Integer> getInts(String key) throws Exception { List<Integer> result = new ArrayList<Integer>(); for (DataRow row : rows) { result.add(row.getInt(key)); } return result; } public List<Object> getObjects(String key) { List<Object> result = new ArrayList<Object>(); for (DataRow row : rows) { result.add(row.get(key)); } return result; } public List<String> getDistinctStrings(String key) { return fetchDistinctValue(key); } public List<String> getNvlStrings(String key) { List<String> result = new ArrayList<>(); List<Object> list = fetchValues(key); for (Object val : list) { if (null != val) { result.add(val.toString()); } else { result.add(""); } } return result; } public List<String> getStringsWithoutEmpty(String key) { List<String> result = new ArrayList<>(); List<Object> list = fetchValues(key); for (Object val : list) { if (BasicUtil.isNotEmpty(val)) { result.add(val.toString()); } } return result; } public List<String> getStringsWithoutNull(String key) { List<String> result = new ArrayList<>(); List<Object> list = fetchValues(key); for (Object val : list) { if (null != val) { result.add(val.toString()); } } return result; } public BigDecimal getDecimal(int idx, String key) throws Exception { return getRow(idx).getDecimal(key); } public BigDecimal getDecimal(int idx, String key, double def) { return getDecimal(idx, key, new BigDecimal(def)); } public BigDecimal getDecimal(int idx, String key, BigDecimal def) { try { BigDecimal val = getDecimal(idx, key); if (null == val) { return def; } return val; } catch (Exception e) { return def; } } /** * 抽取指定列生成新的DataSet 新的DataSet只包括指定列的值与分页信息,不包含其他附加信息(如来源表) * @param keys keys * @return DataSet */ public DataSet extract(String ... keys){ DataSet result = new DataSet(); for(DataRow row:rows){ DataRow item = row.extract(keys); result.add(item); } result.navi = this.navi; return result; } public DataSet extract(List<String> keys){ DataSet result = new DataSet(); for(DataRow row:rows){ DataRow item = row.extract(keys); result.add(item); } result.navi = this.navi; return result; } /** * html格式(未实现) * * @param index index * @param key key * @return return * @throws Exception Exception */ public String getHtmlString(int index, String key) throws Exception { return getString(index, key); } public String getHtmlString(int index, String key, String def) { return getString(index, key, def); } public String getHtmlString(String key) throws Exception { return getHtmlString(0, key); } /** * escape String * * @param index index * @param key key * @return return * @throws Exception Exception */ public String getEscapeString(int index, String key) throws Exception { return EscapeUtil.escape(getString(index, key)).toString(); } public String getEscapeString(int index, String key, String def) { try { return getEscapeString(index, key); } catch (Exception e) { return EscapeUtil.escape(def).toString(); } } public String getDoubleEscapeString(int index, String key) throws Exception { return EscapeUtil.doubleEscape(getString(index, key)); } public String getDoubleEscapeString(int index, String key, String def) { try { return getDoubleEscapeString(index, key); } catch (Exception e) { return EscapeUtil.doubleEscape(def); } } public String getEscapeString(String key) throws Exception { return getEscapeString(0, key); } public String getDoubleEscapeString(String key) throws Exception { return getDoubleEscapeString(0, key); } /** * int * * @param index index * @param key key * @return return * @throws Exception Exception */ public int getInt(int index, String key) throws Exception { return getRow(index).getInt(key); } public int getInt(int index, String key, int def) { try { return getInt(index, key); } catch (Exception e) { return def; } } public int getInt(String key) throws Exception { return getInt(0, key); } public int getInt(String key, int def) { return getInt(0, key, def); } /** * double * * @param index index * @param key key * @return return * @throws Exception Exception */ public double getDouble(int index, String key) throws Exception { return getRow(index).getDouble(key); } public double getDouble(int index, String key, double def) { try { return getDouble(index, key); } catch (Exception e) { return def; } } public double getDouble(String key) throws Exception { return getDouble(0, key); } public double getDouble(String key, double def) { return getDouble(0, key, def); } /** * 在key列基础上 +value,如果原来没有key列则默认0并put到target * @param target 计算结果key * @param key key * @param value value * @return this */ public DataSet add(String target, String key, int value){ for(DataRow row:rows){ row.add(target, key, value); } return this; } public DataSet add(String target, String key, double value){ for(DataRow row:rows){ row.add(target, key, value); } return this; } public DataSet add(String target, String key, short value){ for(DataRow row:rows){ row.add(target, key, value); } return this; } public DataSet add(String target, String key, float value){ for(DataRow row:rows){ row.add(target, key, value); } return this; } public DataSet add(String target, String key, BigDecimal value){ for(DataRow row:rows){ row.add(target, key, value); } return this; } public DataSet add(String key, int value){ return add(key, key, value); } public DataSet add(String key, double value){ return add(key, key, value); } public DataSet add(String key, short value){ return add(key, key, value); } public DataSet add(String key, float value){ return add(key, key, value); } public DataSet add(String key, BigDecimal value){ return add(key, key, value); } public DataSet subtract(String target, String key, int value){ for(DataRow row:rows){ row.subtract(target, key, value); } return this; } public DataSet subtract(String target, String key, double value){ for(DataRow row:rows){ row.subtract(target, key, value); } return this; } public DataSet subtract(String target, String key, short value){ for(DataRow row:rows){ row.subtract(target, key, value); } return this; } public DataSet subtract(String target, String key, float value){ for(DataRow row:rows){ row.subtract(target, key, value); } return this; } public DataSet subtract(String target, String key, BigDecimal value){ for(DataRow row:rows){ row.subtract(target, key, value); } return this; } public DataSet subtract(String key, int value){ return subtract(key, key, value); } public DataSet subtract(String key, double value){ return subtract(key, key, value); } public DataSet subtract(String key, short value){ return subtract(key, key, value); } public DataSet subtract(String key, float value){ return subtract(key, key, value); } public DataSet subtract(String key, BigDecimal value){ return subtract(key, key, value); } public DataSet multiply(String target, String key, int value){ for(DataRow row:rows){ row.multiply(target, key, value); } return this; } public DataSet multiply(String target, String key, double value){ for(DataRow row:rows){ row.multiply(target, key, value); } return this; } public DataSet multiply(String target, String key, short value){ for(DataRow row:rows){ row.multiply(target, key, value); } return this; } public DataSet multiply(String target, String key, float value){ for(DataRow row:rows){ row.multiply(target, key, value); } return this; } public DataSet multiply(String target, String key, BigDecimal value){ for(DataRow row:rows){ row.multiply(target, key, value); } return this; } public DataSet multiply(String key, int value){ return multiply(key,key,value); } public DataSet multiply(String key, double value){ return multiply(key,key,value); } public DataSet multiply(String key, short value){ return multiply(key,key,value); } public DataSet multiply(String key, float value){ return multiply(key,key,value); } public DataSet multiply(String key, BigDecimal value){ return multiply(key,key,value); } public DataSet divide(String target, String key, int value){ for(DataRow row:rows){ row.divide(target, key, value); } return this; } public DataSet divide(String target, String key, double value){ for(DataRow row:rows){ row.divide(target, key, value); } return this; } public DataSet divide(String target, String key, short value){ for(DataRow row:rows){ row.divide(target, key, value); } return this; } public DataSet divide(String target, String key, float value){ for(DataRow row:rows){ row.divide(target, key, value); } return this; } public DataSet divide(String target, String key, BigDecimal value, int mode){ for(DataRow row:rows){ row.divide(target, key, value, mode); } return this; } public DataSet divide(String key, int value){ return divide(key,key, value); } public DataSet divide(String key, double value){ return divide(key,key, value); } public DataSet divide(String key, short value){ return divide(key,key, value); } public DataSet divide(String key, float value){ return divide(key,key, value); } public DataSet divide(String key, BigDecimal value, int mode){ return divide(key,key, value, mode); } public DataSet round(String target, String key, int scale, int mode){ for (DataRow row:rows){ row.round(target, key, scale, mode); } return this; } public DataSet round(String key, int scale, int mode){ return round(key, key, scale, mode); } /** * DataSet拆分成size部分 * @param page 拆成多少部分 * @return list */ public List<DataSet> split(int page){ List<DataSet> list = new ArrayList<>(); int size = this.size(); int vol = size / page;//每页多少行 for(int i=0; i<page; i++){ int fr = i*vol; int to = (i+1)*vol-1; if(i == page-1){ to = size-1; } DataSet set = this.cuts(fr, to); list.add(set); } return list; } /** * rows 列表中的数据格式化成json格式 不同与toJSON * map.put("type", "list"); * map.put("result", result); * map.put("message", message); * map.put("rows", rows); * map.put("success", result); * map.put("navi", navi); */ public String toString() { Map<String, Object> map = new HashMap<String, Object>(); map.put("type", "list"); map.put("result", result); map.put("message", message); map.put("rows", rows); map.put("success", result); if(null != navi){ Map<String,Object> navi_ = new HashMap<String,Object>(); navi_.put("page", navi.getCurPage()); navi_.put("pages", navi.getTotalPage()); navi_.put("rows", navi.getTotalRow()); navi_.put("vol", navi.getPageRows()); map.put("navi", navi_); } return BeanUtil.map2json(map); } /** * rows 列表中的数据格式化成json格式 不同与toString * * @return return */ public String toJson() { return BeanUtil.object2json(this); } public String getJson() { return toJSON(); } public String toJSON() { return toJson(); } /** * 根据指定列生成map * * @param key ID,{ID}_{NM} * @return return */ public Map<String, DataRow> toMap(String key) { Map<String, DataRow> maps = new HashMap<String, DataRow>(); for (DataRow row : rows) { maps.put(row.getString(key), row); } return maps; } /** * 子类 * * @param idx idx * @return return */ public Object getChildren(int idx) { DataRow row = getRow(idx); if (null != row) { return row.getChildren(); } return null; } public Object getChildren() { return getChildren(0); } public DataSet setChildren(int idx, Object children) { DataRow row = getRow(idx); if (null != row) { row.setChildren(children); } return this; } public DataSet setChildren(Object children) { setChildren(0, children); return this; } /** * 父类 * * @param idx idx * @return return */ public Object getParent(int idx) { DataRow row = getRow(idx); if (null != row) { return row.getParent(); } return null; } public Object getParent() { return getParent(0); } public DataSet setParent(int idx, Object parent) { DataRow row = getRow(idx); if (null != row) { row.setParent(parent); } return this; } public DataSet setParent(Object parent) { setParent(0, parent); return this; } /** * 转换成对象 * * @param <T> T * @param index index * @param clazz clazz * @return return */ public <T> T entity(int index, Class<T> clazz) { DataRow row = getRow(index); if (null != row) { return row.entity(clazz); } return null; } /** * 转换成对象集合 * * @param <T> T * @param clazz clazz * @return return */ public <T> List<T> entity(Class<T> clazz) { List<T> list = new ArrayList<T>(); if (null != rows) { for (DataRow row : rows) { list.add(row.entity(clazz)); } } return list; } public <T> T entity(Class<T> clazz, int idx) { DataRow row = getRow(idx); if (null != row) { return row.entity(clazz); } return null; } public DataSet setDataSource(String dataSource) { if (null == dataSource) { return this; } this.dataSource = dataSource; if (dataSource.contains(".") && !dataSource.contains(":")) { schema = dataSource.substring(0, dataSource.indexOf(".")); table = dataSource.substring(dataSource.indexOf(".") + 1); } for (DataRow row : rows) { if (BasicUtil.isEmpty(row.getDataSource())) { row.setDataSource(dataSource); } } return this; } /** * 合并 * @param set DataSet * @param keys 根据keys去重 * @return DataSet */ public DataSet union(DataSet set, String... keys) { DataSet result = new DataSet(); if (null != rows) { int size = rows.size(); for (int i = 0; i < size; i++) { result.add(rows.get(i)); } } if (null == keys || keys.length == 0) { keys = new String[1]; keys[0] = ConfigTable.getString("DEFAULT_PRIMARY_KEY"); } int size = set.size(); for (int i = 0; i < size; i++) { DataRow item = set.getRow(i); if (!result.contains(item, keys)) { result.add(item); } } return result; } /** * 合并合并不去重 * * @param set set * @return return */ public DataSet unionAll(DataSet set) { DataSet result = new DataSet(); if (null != rows) { int size = rows.size(); for (int i = 0; i < size; i++) { result.add(rows.get(i)); } } int size = set.size(); for (int i = 0; i < size; i++) { DataRow item = set.getRow(i); result.add(item); } return result; } /** * 是否包含这一行 * * @param row row * @param keys keys * @return return */ public boolean contains(DataRow row, String... keys) { if (null == rows || rows.size() == 0 || null == row) { return false; } if (null == keys || keys.length == 0) { keys = new String[1]; keys[0] = ConfigTable.getString("DEFAULT_PRIMARY_KEY", "ID"); } String params[] = packParam(row, keys); return exists(params); } public String[] packParam(DataRow row, String... keys) { if (null == keys || null == row) { return null; } String params[] = new String[keys.length * 2]; int idx = 0; for (String key : keys) { if (null == key) { continue; } String ks[] = BeanUtil.parseKeyValue(key); params[idx++] = ks[0]; params[idx++] = row.getString(ks[1]); } return params; } /** * 根据数据与属性列表 封装kvs * ["ID","1","CODE","A01"] * @param row 数据 DataRow * @param keys 属性 ID,CODE * @return kvs */ public String[] packParam(DataRow row, List<String> keys) { if (null == keys || null == row) { return null; } String params[] = new String[keys.size() * 2]; int idx = 0; for (String key : keys) { if (null == key) { continue; } String ks[] = BeanUtil.parseKeyValue(key); params[idx++] = ks[0]; params[idx++] = row.getString(ks[1]); } return params; } /** * 从items中按相应的key提取数据 存入 * dispatch("children",items, "DEPAT_CD") * dispatchs("children",items, "CD:BASE_CD") * * @param field 默认"ITEMS" * @param unique 是否只分配一次(同一个条目不能分配到多个组中) * @param recursion 是否递归 * @param items items * @param keys keys ID:DEPT_ID或ID * @return return */ public DataSet dispatchs(String field, boolean unique, boolean recursion, DataSet items, String... keys) { if(null == keys || keys.length == 0){ throw new RuntimeException("未指定对应关系"); } if (null == items) { return this; } if (BasicUtil.isEmpty(field)) { field = "ITEMS"; } for (DataRow row : rows) { if (null == row.get(field)) { String[] kvs = packParam(row, reverseKey(keys)); DataSet set = items.getRows(kvs); if (recursion) { set.dispatchs(field, unique, recursion, items, keys); } if(unique) { set.skip(true); } row.put(field, set); } } items.skip(false); return this; } public DataSet dispatchs(boolean unique, boolean recursion, DataSet items, String... keys) { return dispatchs("ITEMS", unique, recursion, items, keys); } public DataSet dispatchs(String field, DataSet items, String... keys) { return dispatchs(field,false, false, items, keys); } public DataSet dispatchs(DataSet items, String... keys) { return dispatchs("ITEMS", items, keys); } public DataSet dispatchs(boolean unique, boolean recursion, String... keys) { return dispatchs("ITEMS", unique, recursion, this, keys); } public DataSet dispatchs(String field, boolean unique, boolean recursion, String... keys) { return dispatchs(field, unique, recursion, this, keys); } public DataSet dispatch(String field, boolean unique, boolean recursion, DataSet items, String... keys) { if(null == keys || keys.length == 0){ throw new RuntimeException("未指定对应关系"); } if (null == items) { return this; } if (BasicUtil.isEmpty(field)) { field = "ITEM"; } for (DataRow row : rows) { if (null == row.get(field)) { String[] params = packParam(row, reverseKey(keys)); DataRow result = items.getRow(params); if(unique){ result.skip = true; } row.put(field, result); } } items.skip(false); return this; } public DataSet dispatch(String field, DataSet items, String... keys) { return dispatch(field, false, false, items, keys); } public DataSet dispatch(DataSet items, String... keys) { return dispatch("ITEM", items, keys); } public DataSet dispatch(boolean unique, boolean recursion, String... keys) { return dispatch("ITEM", unique, recursion, this, keys); } public DataSet dispatch(String field, boolean unique, boolean recursion, String... keys) { return dispatch(field, unique, recursion, this, keys); } /** * 直接调用dispatchs * @param field 默认"ITEMS" * @param unique 是否只分配一次(同一个条目不能分配到多个组中) * @param recursion 是否递归 * @param items items * @param keys keys ID:DEPT_ID或ID * @return return */ @Deprecated public DataSet dispatchItems(String field, boolean unique, boolean recursion, DataSet items, String... keys) { return dispatchs(field, unique, recursion, items, keys); } @Deprecated public DataSet dispatchItems(boolean unique, boolean recursion, DataSet items, String... keys) { return dispatchs( unique, recursion, items, keys); } @Deprecated public DataSet dispatchItems(String field, DataSet items, String... keys) { return dispatchs(field, items, keys); } @Deprecated public DataSet dispatchItems(DataSet items, String... keys) { return dispatchs(items, keys); } @Deprecated public DataSet dispatchItems(boolean unique, boolean recursion, String... keys) { return dispatchs( unique, recursion, keys); } @Deprecated public DataSet dispatchItems(String field, boolean unique, boolean recursion, String... keys) { return dispatchs(field, unique, recursion, keys); } @Deprecated public DataSet dispatchItem(String field, boolean unique, boolean recursion, DataSet items, String... keys) { return dispatch(field, unique, recursion, items, keys); } @Deprecated public DataSet dispatchItem(String field, DataSet items, String... keys) { return dispatch(field, items, keys); } @Deprecated public DataSet dispatchItem(DataSet items, String... keys) { return dispatch(items, keys); } @Deprecated public DataSet dispatchItem(boolean unique, boolean recursion, String... keys) { return dispatch(unique, recursion, keys); } @Deprecated public DataSet dispatchItem(String field, boolean unique, boolean recursion, String... keys) { return dispatch(field, unique, recursion, keys); } /** * 根据keys列建立关联,并将关联出来的结果拼接到集合的条目上,如果有重复则覆盖条目 * * @param items 被查询的集合 * @param keys 关联条件列 * @return return */ public DataSet join(DataSet items, String... keys) { if (null == items || null == keys || keys.length == 0) { return this; } for (DataRow row : rows) { String[] params = packParam(row, reverseKey(keys)); DataRow result = items.getRow(params); if (null != result) { row.copy(result, result.keys()); } } return this; } public DataSet toLowerKey() { for (DataRow row : rows) { row.toLowerKey(); } return this; } public DataSet toUpperKey() { for (DataRow row : rows) { row.toUpperKey(); } return this; } /** * 按keys分组 * * @param keys keys * @return return */ public DataSet group(String... keys) { DataSet result = distinct(keys); result.dispatchs(true,false, this, keys); return result; } public DataSet or(DataSet set, String... keys) { return this.union(set, keys); } public DataSet getRows(Map<String, String> kvs) { return getRows(0, -1, kvs); } /** * 多个集合的交集 * * @param distinct 是否根据keys抽取不重复的集合 * @param sets 集合 * @param keys 判断依据 * @return DataSet */ public static DataSet intersection(boolean distinct, List<DataSet> sets, String... keys) { DataSet result = null; if (null != sets && sets.size() > 0) { for (DataSet set : sets) { if (null == result) { result = set; } else { result = result.intersection(distinct, set, keys); } } } if (null == result) { result = new DataSet(); } return result; } public static DataSet intersection(List<DataSet> sets, String... keys) { return intersection(false, sets, keys); } /** * 交集 * * @param distinct 是否根据keys抽取不重复的集合(根据keys去重) * @param set set * @param keys 根据keys列比较是否相等,如果列名不一致"ID:USER_ID",ID表示当前DataSet的列,USER_ID表示参数中DataSet的列 * @return return */ public DataSet intersection(boolean distinct, DataSet set, String... keys) { DataSet result = new DataSet(); if (null == set) { return result; } for (DataRow row : rows) { String[] kv = reverseKey(keys); if (set.contains(row, kv)) { //符合交集 if(!result.contains(row, kv)){//result中没有 result.add((DataRow) row.clone()); }else { if(!distinct){//result中有但不要求distinct result.add((DataRow) row.clone()); } } } } return result; } public DataSet intersection(DataSet set, String... keys) { return intersection(false, set, keys); } public DataSet and(boolean distinct, DataSet set, String... keys) { return intersection(distinct, set, keys); } public DataSet and(DataSet set, String... keys) { return intersection(false, set, keys); } /** * 补集 * 在this中,但不在set中 * this作为超集 set作为子集 * * @param distinct 是否根据keys抽取不重复的集合 * @param set set * @param keys keys * @return return */ public DataSet complement(boolean distinct, DataSet set, String... keys) { DataSet result = new DataSet(); for (DataRow row : rows) { String[] kv = reverseKey(keys); if (null == set || !set.contains(row, kv)) { if (!distinct || !result.contains(row, kv)) { result.add((DataRow) row.clone()); } } } return result; } public DataSet complement(DataSet set, String... keys) { return complement(false, set, keys); } /** * 差集 * 从当前集合中删除set中存在的row,生成新的DataSet并不修改当前对象 * this中有 set中没有的 * * @param distinct 是否根据keys抽取不重复的集合 * @param set set * @param keys CD,"CD:WORK_CD" * @return return */ public DataSet difference(boolean distinct, DataSet set, String... keys) { DataSet result = new DataSet(); for (DataRow row : rows) { String[] kv = reverseKey(keys); if (null == set || !set.contains(row, kv)) { if (!distinct || !result.contains(row, kv)) { result.add((DataRow) row.clone()); } } } return result; } public DataSet difference(DataSet set, String... keys) { return difference(false, set, keys); } /** * 颠倒kv-vk * * @param keys kv * @return String[] */ private String[] reverseKey(String[] keys) { if (null == keys) { return new String[0]; } int size = keys.length; String result[] = new String[size]; for (int i = 0; i < size; i++) { String key = keys[i]; if (BasicUtil.isNotEmpty(key) && key.contains(":")) { String ks[] = BeanUtil.parseKeyValue(key); key = ks[1] + ":" + ks[0]; } result[i] = key; } return result; } /** * 清除指定列全为空的行,如果不指定keys,则清除所有列都为空的行 * * @param keys keys * @return DataSet */ public DataSet removeEmptyRow(String... keys) { int size = this.size(); for (int i = size - 1; i >= 0; i--) { DataRow row = getRow(i); if (null == keys || keys.length == 0) { if (row.isEmpty()) { this.remove(row); } } else { boolean isEmpty = true; for (String key : keys) { if (row.isNotEmpty(key)) { isEmpty = false; break; } } if (isEmpty) { this.remove(row); } } } return this; } public DataSet changeKey(String key, String target, boolean remove) { for(DataRow row:rows){ row.changeKey(key, target, remove); } return this; } public DataSet changeKey(String key, String target) { return changeKey(key, target, true); } /** * 删除rows中的columns列 * * @param columns 检测的列,如果不输入则检测所有列 * @return DataSet */ public DataSet removeColumn(String... columns) { if (null != columns) { for (String column : columns) { for (DataRow row : rows) { row.remove(column); } } } return this; } /** * 删除rows中值为空(null|'')的列 * * @param columns 检测的列,如果不输入则检测所有列 * @return DataSet */ public DataSet removeEmptyColumn(String... columns) { for (DataRow row : rows) { row.removeEmpty(columns); } return this; } /** * NULL &gt; "" * * @return DataSet */ public DataSet nvl() { for (DataRow row : rows) { row.nvl(); } return this; } /* ********************************************** 实现接口 *********************************************************** */ public boolean add(DataRow e) { return rows.add((DataRow) e); } @SuppressWarnings({"rawtypes", "unchecked"}) public boolean addAll(Collection c) { return rows.addAll(c); } public void clear() { rows.clear(); } public boolean contains(Object o) { return rows.contains(o); } public boolean containsAll(Collection<?> c) { return rows.containsAll(c); } public Iterator<DataRow> iterator() { return rows.iterator(); } public boolean remove(Object o) { return rows.remove(o); } public boolean removeAll(Collection<?> c) { return rows.removeAll(c); } public boolean retainAll(Collection<?> c) { return rows.retainAll(c); } public Object[] toArray() { return rows.toArray(); } @SuppressWarnings("unchecked") public Object[] toArray(Object[] a) { return rows.toArray(a); } public String getSchema() { return schema; } public DataSet setSchema(String schema) { this.schema = schema; return this; } public String getTable() { return table; } public DataSet setTable(String table) { if (null != table && table.contains(".")) { String[] tbs = table.split("\\."); this.table = tbs[1]; this.schema = tbs[0]; } else { this.table = table; } return this; } /** * 验证是否过期 * 根据当前时间与创建时间对比 * 过期返回 true * * @param millisecond 过期时间(毫秒) millisecond 过期时间(毫秒) * @return boolean */ public boolean isExpire(int millisecond) { if (System.currentTimeMillis() - createTime > millisecond) { return true; } return false; } public boolean isExpire(long millisecond) { if (System.currentTimeMillis() - createTime > millisecond) { return true; } return false; } public boolean isExpire() { if (getExpires() == -1) { return false; } if (System.currentTimeMillis() - createTime > getExpires()) { return true; } return false; } public long getCreateTime() { return createTime; } public List<DataRow> getRows() { return rows; } /************************** getter setter ***************************************/ /** * 过期时间(毫秒) * * @return long */ public long getExpires() { return expires; } public DataSet setExpires(long millisecond) { this.expires = millisecond; return this; } public DataSet setExpires(int millisecond) { this.expires = millisecond; return this; } public boolean isResult() { return result; } public boolean isSuccess() { return result; } public DataSet setResult(boolean result) { this.result = result; return this; } public Exception getException() { return exception; } public DataSet setException(Exception exception) { this.exception = exception; return this; } public String getMessage() { return message; } public DataSet setMessage(String message) { this.message = message; return this; } public PageNavi getNavi() { return navi; } public DataSet setNavi(PageNavi navi) { this.navi = navi; return this; } public DataSet setRows(List<DataRow> rows) { this.rows = rows; return this; } public String getDataSource() { String ds = table; if (BasicUtil.isNotEmpty(ds) && BasicUtil.isNotEmpty(schema)) { ds = schema + "." + ds; } if (BasicUtil.isEmpty(ds)) { ds = dataSource; } return ds; } public DataSet order(final String... keys) { return asc(keys); } public DataSet put(String key, Object value, boolean pk, boolean override) { for (DataRow row : rows) { row.put(key, value, pk, override); } return this; } public DataSet put(String key, Object value, boolean pk) { for (DataRow row : rows) { row.put(key, value, pk); } return this; } public DataSet put(String key, Object value) { for (DataRow row : rows) { row.put(key, value); } return this; } /** * 行转列 * 表结构(编号, 姓名, 年度, 科目, 分数, 等级) * @param pks 唯一标识key(如编号,姓名) * @param classKeys 分类key(如年度,科目) * @param valueKeys 取值key(如分数,等级),如果不指定key则将整行作为value * @return * 如果指定key * 返回结构 [ * {编号:01,姓名:张三,2010-数学-分数:100}, * {编号:01,姓名:张三,2010-数学-等级:A}, * {编号:01,姓名:张三,2010-物理-分数:100} * ] * 如果只有一个valueKey则返回[ * {编号:01,姓名:张三,2010-数学:100}, * {编号:01,姓名:张三,2010-物理:90} * ] * 不指定valuekey则返回 [ * {编号:01,姓名:张三,2010-数学:{分数:100,等级:A}}, * {编号:01,姓名:张三,2010-物理:{分数:100,等级:A}} * ] */ public DataSet pivot(List<String> pks, List<String> classKeys, List<String> valueKeys) { DataSet result = distinct(pks); DataSet classValues = distinct(classKeys); //[{年度:2010,科目:数学},{年度:2010,科目:物理},{年度:2011,科目:数学}] for (DataRow row : result) { for (DataRow classValue : classValues) { DataRow params = new DataRow(); params.copy(row, pks).copy(classValue); DataRow valueRow = getRow(params); if(null != valueRow){ valueRow.skip = true; } String finalKey = concatValue(classValue,"-");//2010-数学 if(null != valueKeys && valueKeys.size() > 0){ if(valueKeys.size() == 1){ if (null != valueRow) { row.put(finalKey, valueRow.get(valueKeys.get(0))); } else { row.put(finalKey, null); } }else { for (String valueKey : valueKeys) { //{2010-数学-分数:100;2010-数学-等级:A} if (null != valueRow) { row.put(finalKey + "-" + valueKey, valueRow.get(valueKey)); } else { row.put(finalKey + "-" + valueKey, null); } } } }else{ if (null != valueRow){ row.put(finalKey, valueRow); }else{ row.put(finalKey, null); } } } } skip(false); return result; } public DataSet pivot(String[] pks, String[] classKeys, String[] valueKeys) { return pivot(Arrays.asList(pks),Arrays.asList(classKeys),Arrays.asList(valueKeys)); } /** * 行转列 * @param pk 唯一标识key(如姓名)多个key以,分隔如(编号,姓名) * @param classKey 分类key(如科目)多个key以,分隔如(科目,年度) * @param valueKey 取值key(如分数)多个key以,分隔如(分数,等级) * @return * 表结构(姓名,科目,分数) * 返回结构 [{姓名:张三,数学:100,物理:90,英语:80},{姓名:李四,数学:100,物理:90,英语:80}] */ public DataSet pivot(String pk, String classKey, String valueKey) { List<String> pks = new ArrayList<>(Arrays.asList(pk.trim().split(","))); List<String> classKeys = new ArrayList<>(Arrays.asList(classKey.trim().split(","))); List<String> valueKeys = new ArrayList<>(Arrays.asList(valueKey.trim().split(","))); return pivot(pks, classKeys, valueKeys); } public DataSet pivot(String pk, String classKey) { List<String> pks = new ArrayList<>(Arrays.asList(pk.trim().split(","))); List<String> classKeys = new ArrayList<>(Arrays.asList(classKey.trim().split(","))); List<String> valueKeys = new ArrayList<>(); return pivot(pks, classKeys, valueKeys); } public DataSet pivot(List<String> pks, List<String> classKeys, String ... valueKeys) { List<String> list = new ArrayList<>(); if(null != valueKeys){ for(String item:valueKeys){ list.add(item); } } return pivot(pks, classKeys, valueKeys); } private String concatValue(DataRow row, String split){ StringBuilder builder = new StringBuilder(); List<String> keys = row.keys(); for(String key:keys){ if(builder.length() > 0){ builder.append(split); } builder.append(row.getString(key)); } return builder.toString(); } private String[] kvs(DataRow row){ List<String> keys = row.keys(); int size = keys.size(); String[] kvs = new String[size*2]; for(int i=0; i<size; i++){ String k = keys.get(i); String v = row.getStringNvl(k); kvs[i*2] = k; kvs[i*2+1] = v; } return kvs; } /** * 排序 * * @param keys keys * @return DataSet */ public DataSet asc(final String... keys) { Collections.sort(rows, new Comparator<DataRow>() { public int compare(DataRow r1, DataRow r2) { int result = 0; for (String key : keys) { Object v1 = r1.get(key); Object v2 = r2.get(key); if (null == v1) { if (null == v2) { continue; } return -1; } else { if (null == v2) { return 1; } } if (BasicUtil.isNumber(v1) && BasicUtil.isNumber(v2)) { BigDecimal num1 = new BigDecimal(v1.toString()); BigDecimal num2 = new BigDecimal(v2.toString()); result = num1.compareTo(num2); } else if (v1 instanceof Date && v2 instanceof Date) { Date date1 = (Date)v1; Date date2 = (Date)v2; result = date1.compareTo(date2); } else { result = v1.toString().compareTo(v2.toString()); } if (result != 0) { return result; } } return 0; } }); isAsc = true; isDesc = false; return this; } public DataSet desc(final String... keys) { Collections.sort(rows, new Comparator<DataRow>() { public int compare(DataRow r1, DataRow r2) { int result = 0; for (String key : keys) { Object v1 = r1.get(key); Object v2 = r2.get(key); if (null == v1) { if (null == v2) { continue; } return 1; } else { if (null == v2) { return -1; } } if (BasicUtil.isNumber(v1) && BasicUtil.isNumber(v2)) { BigDecimal val1 = new BigDecimal(v1.toString()); BigDecimal val2 = new BigDecimal(v2.toString()); result = val2.compareTo(val1); } else if (v1 instanceof Date && v2 instanceof Date) { Date date1 = (Date)v1; Date date2 = (Date)v2; result = date2.compareTo(date1); } else { result = v2.toString().compareTo(v1.toString()); } if (result != 0) { return result; } } return 0; } }); isAsc = false; isDesc = true; return this; } public DataSet addAllUpdateColumns() { for (DataRow row : rows) { row.addAllUpdateColumns(); } return this; } public DataSet clearUpdateColumns() { for (DataRow row : rows) { row.clearUpdateColumns(); } return this; } public DataSet removeNull(String... keys) { for (DataRow row : rows) { row.removeNull(keys); } return this; } private static String key(String key) { if (null != key && ConfigTable.IS_UPPER_KEY) { key = key.toUpperCase(); } return key; } /** * 替换所有NULL值 * * @param value value * @return return */ public DataSet replaceNull(String value) { for (DataRow row : rows) { row.replaceNull(value); } return this; } /** * 替换所有空值 * * @param value value * @return return */ public DataSet replaceEmpty(String value) { for (DataRow row : rows) { row.replaceEmpty(value); } return this; } /** * 替换所有NULL值 * * @param key key * @param value value * @return return */ public DataSet replaceNull(String key, String value) { for (DataRow row : rows) { row.replaceNull(key, value); } return this; } /** * 替换所有空值 * * @param key key * @param value value * @return return */ public DataSet replaceEmpty(String key, String value) { for (DataRow row : rows) { row.replaceEmpty(key, value); } return this; } public DataSet replace(String key, String oldChar, String newChar) { if (null == key || null == oldChar || null == newChar) { return this; } for (DataRow row : rows) { row.replace(key, oldChar, newChar); } return this; } public DataSet replace(String oldChar, String newChar) { for (DataRow row : rows) { row.replace(oldChar, newChar); } return this; } /* ************************* 类sql操作 ************************************** */ /** * 随机取一行 * @return DataRow */ public DataRow random() { DataRow row = null; int size = size(); if (size > 0) { row = getRow(BasicUtil.getRandomNumber(0, size - 1)); } return row; } /** * 随机取qty行 * @param qty 行数 * @return DataSet */ public DataSet randoms(int qty) { DataSet set = new DataSet(); int size = size(); if (qty < 0) { qty = 0; } if (qty > size) { qty = size; } for (int i = 0; i < qty; i++) { while (true) { int idx = BasicUtil.getRandomNumber(0, size - 1); DataRow row = set.getRow(idx); if (!set.contains(row)) { set.add(row); break; } } } set.cloneProperty(this); return set; } /** * 随机取min到max行 * @param min min * @param max max * @return DataSet */ public DataSet randoms(int min, int max) { int qty = BasicUtil.getRandomNumber(min, max); return randoms(qty); } public DataSet unique(String... keys) { return distinct(keys); } /** * 根据正则提取集合 * @param key key * @param regex 正则 * @param mode 匹配方式 * @return DataSet */ public DataSet regex(String key, String regex, Regular.MATCH_MODE mode) { DataSet set = new DataSet(); String tmpValue; for (DataRow row : this) { tmpValue = row.getString(key); if (RegularUtil.match(tmpValue, regex, mode)) { set.add(row); } } set.cloneProperty(this); return set; } public DataSet regex(String key, String regex) { return regex(key, regex, Regular.MATCH_MODE.MATCH); } public boolean checkRequired(String... keys) { for (DataRow row : rows) { if (!row.checkRequired(keys)) { return false; } } return true; } public Map<String, Object> getQueryParams() { return queryParams; } public DataSet setQueryParams(Map<String, Object> params) { this.queryParams = params; return this; } public Object getQueryParam(String key) { return queryParams.get(key); } public DataSet addQueryParam(String key, Object param) { queryParams.put(key, param); return this; } public String getDatalink() { return datalink; } public void setDatalink(String datalink) { this.datalink = datalink; } public class Select implements Serializable { private static final long serialVersionUID = 1L; private boolean ignoreCase = true; //是否忽略大小写 /** * 是否忽略NULL 如果设置成true 在执行equal notEqual like contains进 null与null比较返回false * 左右出现NULL时直接返回false * true会导致一行数据 equal notEqual都筛选不到 */ private boolean ignoreNull = true; public DataSet setIgnoreCase(boolean bol) { this.ignoreCase = bol; return DataSet.this; } public DataSet setIgnoreNull(boolean bol) { this.ignoreNull = bol; return DataSet.this; } /** * 筛选key=value的子集 * * @param key key * @param value value * @return DataSet */ public DataSet equals(String key, String value) { return equals(DataSet.this, key, value); } private DataSet equals(DataSet src, String key, String value) { DataSet set = new DataSet(); String tmpValue; for (DataRow row : src) { tmpValue = row.getString(key); if (ignoreNull) { if (null == tmpValue || null == value) { continue; } } else { if (null == tmpValue && null == value) { set.add(row); continue; } } if (null != tmpValue) { boolean chk = false; if (ignoreCase) { chk = tmpValue.equalsIgnoreCase(value); } else { chk = tmpValue.equals(value); } if (chk) { set.add(row); } } } set.cloneProperty(src); return set; } /** * 筛选key != value的子集 * * @param key key * @param value value * @return DataSet */ public DataSet notEquals(String key, String value) { return notEquals(DataSet.this, key, value); } private DataSet notEquals(DataSet src, String key, String value) { DataSet set = new DataSet(); String tmpValue; for (DataRow row : src) { tmpValue = row.getString(key); if (ignoreNull) { if (null == tmpValue || null == value) { continue; } } else { if (null == tmpValue && null == value) { set.add(row); continue; } } if (null != tmpValue) { boolean chk = false; if (ignoreCase) { chk = !tmpValue.equalsIgnoreCase(value); } else { chk = !tmpValue.equals(value); } if (chk) { set.add(row); } } } set.cloneProperty(src); return set; } /** * 筛选key列的值是否包含value的子集 * * @param key key * @param value value * @return DataSet */ public DataSet contains(String key, String value) { return contains(DataSet.this, key, value); } private DataSet contains(DataSet src, String key, String value) { DataSet set = new DataSet(); String tmpValue; for (DataRow row : src) { tmpValue = row.getString(key); if (ignoreNull) { if (null == tmpValue || null == value) { continue; } } else { if (null == tmpValue && null == value) { set.add(row); continue; } } if (null != tmpValue) { if (null == value) { continue; } if (ignoreCase) { tmpValue = tmpValue.toLowerCase(); value = value.toLowerCase(); } if (tmpValue.contains(value)) { set.add(row); } } } set.cloneProperty(src); return set; } /** * 筛选key列的值like pattern的子集,pattern遵循sql通配符的规则,%表示任意个字符,_表示一个字符 * * @param key 列 * @param pattern 表达式 * @return DataSet */ public DataSet like(String key, String pattern) { return like(DataSet.this, key, pattern); } private DataSet like(DataSet src, String key, String pattern) { DataSet set = new DataSet(); if (null != pattern) { pattern = pattern.replace("!", "^").replace("_", "\\s|\\S").replace("%", "(\\s|\\S)*"); } String tmpValue; for (DataRow row : src) { tmpValue = row.getString(key); if (ignoreNull) { if (null == tmpValue || null == pattern) { continue; } } else { if (null == tmpValue && null == pattern) { set.add(row); continue; } } if (null != tmpValue) { if (null == pattern) { continue; } if (ignoreCase) { tmpValue = tmpValue.toLowerCase(); pattern = pattern.toLowerCase(); } if (RegularUtil.match(tmpValue, pattern, Regular.MATCH_MODE.MATCH)) { set.add(row); } } } set.cloneProperty(src); return set; } public DataSet notLike(String key, String pattern) { return notLike(DataSet.this, key, pattern); } private DataSet notLike(DataSet src, String key, String pattern) { DataSet set = new DataSet(); if (null == pattern) { return set; } pattern = pattern.replace("!", "^").replace("_", "\\s|\\S").replace("%", "(\\s|\\S)*"); String tmpValue; for (DataRow row : src) { tmpValue = row.getString(key); if (ignoreNull) { if (null == tmpValue || null == pattern) { continue; } } else { if (null == tmpValue && null == pattern) { set.add(row); continue; } } if (null != tmpValue) { if (null == pattern) { continue; } if (ignoreCase) { tmpValue = tmpValue.toLowerCase(); pattern = pattern.toLowerCase(); } if (!RegularUtil.match(tmpValue, pattern, Regular.MATCH_MODE.MATCH)) { set.add(row); } } } set.cloneProperty(src); return set; } public DataSet startWith(String key, String prefix) { return startWith(DataSet.this, key, prefix); } private DataSet startWith(DataSet src, String key, String prefix) { DataSet set = new DataSet(); String tmpValue; for (DataRow row : src) { tmpValue = row.getString(key); if (ignoreNull) { if (null == tmpValue || null == prefix) { continue; } } else { if (null == tmpValue && null == prefix) { set.add(row); continue; } } if (null != tmpValue) { if (null == prefix) { continue; } if (ignoreCase) { tmpValue = tmpValue.toLowerCase(); prefix = prefix.toLowerCase(); } if (tmpValue.startsWith(prefix)) { set.add(row); } } } set.cloneProperty(src); return set; } public DataSet endWith(String key, String suffix) { return endWith(DataSet.this, key, suffix); } private DataSet endWith(DataSet src, String key, String suffix) { DataSet set = new DataSet(); String tmpValue; for (DataRow row : src) { tmpValue = row.getString(key); if (ignoreNull) { if (null == tmpValue || null == suffix) { continue; } } else { if (null == tmpValue && null == suffix) { set.add(row); continue; } } if (null != tmpValue) { if (null == suffix) { continue; } if (ignoreCase) { tmpValue = tmpValue.toLowerCase(); suffix = suffix.toLowerCase(); } if (tmpValue.endsWith(suffix)) { set.add(row); } } } set.cloneProperty(src); return set; } public <T> DataSet in(String key, T... values) { return in(DataSet.this, key, BeanUtil.array2list(values)); } public <T> DataSet in(String key, Collection<T> values) { return in(DataSet.this, key, values); } private <T> DataSet in(DataSet src, String key, Collection<T> values) { DataSet set = new DataSet(); for (DataRow row : src) { if (BasicUtil.containsString(ignoreNull, ignoreCase, values, row.getString(key))) { set.add(row); } } set.cloneProperty(src); return set; } public <T> DataSet notIn(String key, T... values) { return notIn(DataSet.this, key, BeanUtil.array2list(values)); } public <T> DataSet notIn(String key, Collection<T> values) { return notIn(DataSet.this, key, values); } private <T> DataSet notIn(DataSet src, String key, Collection<T> values) { DataSet set = new DataSet(); if (null != values) { String tmpValue = null; for (DataRow row : src) { tmpValue = row.getString(key); if (ignoreNull && null == tmpValue) { continue; } if (!BasicUtil.containsString(ignoreNull, ignoreCase, values, tmpValue)) { set.add(row); } } } set.cloneProperty(src); return set; } public DataSet isNull(String... keys) { return isNull(DataSet.this, keys); } private DataSet isNull(DataSet src, String... keys) { DataSet set = src; if (null != keys) { for (String key : keys) { set = isNull(set, key); } } return set; } private DataSet isNull(DataSet src, String key) { DataSet set = new DataSet(); for(DataRow row:src){ if(null == row.get(key)){ set.add(row); } } return set; } public DataSet isNotNull(String... keys) { return isNotNull(DataSet.this, keys); } private DataSet isNotNull(DataSet src, String... keys) { DataSet set = src; if (null != keys) { for (String key : keys) { set = isNotNull(set, key); } } return set; } private DataSet isNotNull(DataSet src, String key) { DataSet set = new DataSet(); for(DataRow row:src){ if(null != row.get(key)){ set.add(row); } } return set; } public DataSet notNull(String... keys) { return isNotNull(keys); } public DataSet isEmpty(String... keys) { return isEmpty(DataSet.this, keys); } private DataSet isEmpty(DataSet src, String... keys) { DataSet set = src; if (null != keys) { for (String key : keys) { set = isEmpty(set, key); } } return set; } private DataSet isEmpty(DataSet src, String key) { DataSet set = new DataSet(); for(DataRow row:src){ if(row.isEmpty(key)){ set.add(row); } } return set; } public DataSet empty(String... keys) { return isEmpty(keys); } public DataSet isNotEmpty(String... keys) { return isNotEmpty(DataSet.this, keys); } private DataSet isNotEmpty(DataSet src, String... keys) { DataSet set = src; if (null != keys) { for (String key : keys) { set = isNotEmpty(set, key); } } return set; } private DataSet isNotEmpty(DataSet src, String key) { DataSet set = new DataSet(); for(DataRow row:src){ if(row.isNotEmpty(key)){ set.add(row); } } return set; } public DataSet notEmpty(String... keys) { return isNotEmpty(keys); } public <T> DataSet less(String key, T value) { return less(DataSet.this, key, value); } private <T> DataSet less(DataSet src, String key, T value) { DataSet set = new DataSet(); if (null == value) { return set; } if (BasicUtil.isNumber(value)) { BigDecimal number = new BigDecimal(value.toString()); for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.getDecimal(key, 0).compareTo(number) < 0) { set.add(row); } } } else if (BasicUtil.isDate(value) || BasicUtil.isDateTime(value)) { Date date = DateUtil.parse(value.toString()); for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.isNotEmpty(key) && DateUtil.diff(DateUtil.DATE_PART_MILLISECOND, date, row.getDate(key, new Date())) < 0) { set.add(row); } } } else { for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.getString(key).compareTo(value.toString()) < 0) { set.add(row); } } } set.cloneProperty(src); return set; } public <T> DataSet lessEqual(String key, T value) { return lessEqual(DataSet.this, key, value); } private <T> DataSet lessEqual(DataSet src, String key, T value) { DataSet set = new DataSet(); if (null == value) { return set; } if (BasicUtil.isNumber(value)) { BigDecimal number = new BigDecimal(value.toString()); for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.getDecimal(key, 0).compareTo(number) <= 0) { set.add(row); } } } else if (BasicUtil.isDate(value) || BasicUtil.isDateTime(value)) { Date date = DateUtil.parse(value.toString()); for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.isNotEmpty(key) && DateUtil.diff(DateUtil.DATE_PART_MILLISECOND, date, row.getDate(key, new Date())) <= 0) { set.add(row); } } } else { for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.getString(key).compareTo(value.toString()) >= 0) { set.add(row); } } } set.cloneProperty(src); return set; } public <T> DataSet greater(String key, T value) { return greater(DataSet.this, key, value); } private <T> DataSet greater(DataSet src, String key, T value) { DataSet set = new DataSet(); if (null == value) { return set; } if (BasicUtil.isNumber(value)) { BigDecimal number = new BigDecimal(value.toString()); for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.getDecimal(key, 0).compareTo(number) > 0) { set.add(row); } } } else if (BasicUtil.isDate(value) || BasicUtil.isDateTime(value)) { Date date = DateUtil.parse(value.toString()); for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.isNotEmpty(key) && DateUtil.diff(DateUtil.DATE_PART_MILLISECOND, date, row.getDate(key, new Date())) > 0) { set.add(row); } } } else { for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.getString(key).compareTo(value.toString()) > 0) { set.add(row); } } } set.cloneProperty(src); return set; } public <T> DataSet greaterEqual(String key, T value) { return greaterEqual(DataSet.this, key, value); } private <T> DataSet greaterEqual(DataSet src, String key, T value) { DataSet set = new DataSet(); if (null == value) { return set; } if (BasicUtil.isNumber(value)) { BigDecimal number = new BigDecimal(value.toString()); for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.getDecimal(key, 0).compareTo(number) >= 0) { set.add(row); } } } else if (BasicUtil.isDate(value) || BasicUtil.isDateTime(value)) { Date date = DateUtil.parse(value.toString()); for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.isNotEmpty(key) && DateUtil.diff(DateUtil.DATE_PART_MILLISECOND, date, row.getDate(key, new Date())) >= 0) { set.add(row); } } } else { for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.getString(key).compareTo(value.toString()) >= 0) { set.add(row); } } } set.cloneProperty(src); return set; } public <T> DataSet between(String key, T min, T max) { return between(DataSet.this, key, min, max); } private <T> DataSet between(DataSet src, String key, T min, T max) { DataSet set = greaterEqual(src, key, min); set = lessEqual(set, key, max); return set; } } public Select select = new Select(); }
apache-2.0
Krok3/junior
chapter_001/src/main/java/ua/job4j/loop/Factorial.java
728
package ua.job4j.loop; /** * Class Класс для вычисления факториала заданного числа. * @author vfrundin * @since 05.11.2017 * @version 1.0 */ public class Factorial { /** * Метод должен вычислять факториал поданного на вход числа. * @param n Число для которого нужно определить факториал. * @return result - найденный факториал числа n. */ public int calc(int n) { int result = 1; if (n != 0) { for (int i = 1; i <= n; i++) { result *= i; } } return result; } }
apache-2.0
LearnLib/automatalib
core/src/main/java/net/automatalib/graphs/base/compact/CompactBidiGraph.java
1632
/* Copyright (C) 2013-2022 TU Dortmund * This file is part of AutomataLib, http://www.automatalib.net/. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.automatalib.graphs.base.compact; import net.automatalib.commons.smartcollections.ResizingArrayStorage; import org.checkerframework.checker.nullness.qual.Nullable; public class CompactBidiGraph<@Nullable NP, @Nullable EP> extends AbstractCompactBidiGraph<NP, EP> { private final ResizingArrayStorage<NP> nodeProperties; public CompactBidiGraph() { this.nodeProperties = new ResizingArrayStorage<>(Object.class); } public CompactBidiGraph(int initialCapacity) { super(initialCapacity); this.nodeProperties = new ResizingArrayStorage<>(Object.class, initialCapacity); } @Override public void setNodeProperty(int node, @Nullable NP property) { nodeProperties.ensureCapacity(node + 1); nodeProperties.array[node] = property; } @Override public NP getNodeProperty(int node) { return node < nodeProperties.array.length ? nodeProperties.array[node] : null; } }
apache-2.0
Wechat-Group/WxJava
weixin-java-pay/src/test/java/com/github/binarywang/wxpay/service/impl/WxEntrustPapServiceTest.java
7346
package com.github.binarywang.wxpay.service.impl; import com.github.binarywang.wxpay.bean.request.*; import com.github.binarywang.wxpay.bean.result.*; import com.github.binarywang.wxpay.exception.WxPayException; import com.github.binarywang.wxpay.service.WxPayService; import com.github.binarywang.wxpay.testbase.ApiTestModule; import com.google.common.base.Joiner; import com.google.inject.Inject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testng.annotations.Guice; import org.testng.annotations.Test; /** * @author chenliang * @date 2021-08-02 6:45 下午 */ @Test @Guice(modules = ApiTestModule.class) public class WxEntrustPapServiceTest { private final Logger logger = LoggerFactory.getLogger(this.getClass()); @Inject private WxPayService payService; /** * 公众号纯签约 */ @Test public void testMpSign() { String contractCode = "222200002222"; String displayAccount = Joiner.on("").join("陈*", "(", "10000014", ")"); WxMpEntrustRequest wxMpEntrust = WxMpEntrustRequest.newBuilder() .planId("142323") //模板ID:跟微信申请 .contractCode(contractCode) .contractDisplayAccount(displayAccount) .notifyUrl("http://domain.com/api/wxpay/sign/callback.do") .requestSerial(6L) //.returnWeb(1) .version("1.0") .timestamp(String.valueOf(System.currentTimeMillis() / 1000)) .outerId(displayAccount) .build(); String url = null; try { url = this.payService.getWxEntrustPapService().mpSign(wxMpEntrust); } catch (WxPayException e) { e.printStackTrace(); } logger.info(url); } /** * 小程序纯签约 */ @Test public void testMaSign() { String contractCode = "222220000022222"; String displayAccount = Joiner.on("").join("陈*", "(", "10000001", ")"); WxMaEntrustRequest wxMaEntrustRequest = WxMaEntrustRequest.newBuilder() .contractCode(contractCode) .contractDisplayAccount(contractCode) .notifyUrl("http://domain.com/api/wxpay/sign/callback.do") .outerId(displayAccount) .planId("141535") .requestSerial(2L) .timestamp(String.valueOf(System.currentTimeMillis() / 1000)) .build(); try { String url = this.payService.getWxEntrustPapService().maSign(wxMaEntrustRequest); logger.info(url); } catch (WxPayException e) { e.printStackTrace(); } } /** * h5纯签约 */ @Test public void testH5Sign() { String contractCode = "222111122222"; String displayAccount = Joiner.on("").join("陈*", "(", "100000000", ")"); WxH5EntrustRequest wxH5EntrustRequest = WxH5EntrustRequest.newBuilder() .requestSerial(2L) .clientIp("127.0.0.1") .contractCode(contractCode) .contractDisplayAccount(displayAccount) .notifyUrl("http://domain.com/api/wxpay/sign/callback.do") .planId("141535") .returnAppid("1") .timestamp(String.valueOf(System.currentTimeMillis() / 1000)) .version("1.0") .outerId(displayAccount) .build(); try { WxH5EntrustResult wxH5EntrustResult = this.payService.getWxEntrustPapService().h5Sign(wxH5EntrustRequest); logger.info(wxH5EntrustResult.toString()); } catch (WxPayException e) { e.printStackTrace(); } } @Test public void testPaySign() { String contractCode = "2222211110000222"; String displayAccount = Joiner.on("").join("陈*", "(", "10000005", ")"); String outTradeNo = "11100111101"; WxPayEntrustRequest wxPayEntrustRequest = WxPayEntrustRequest.newBuilder() .attach("local") .body("产品名字") .contractAppId(this.payService.getConfig().getAppId()) .contractCode(contractCode) .contractDisplayAccount(displayAccount) .contractMchId(this.payService.getConfig().getMchId()) //签约回调 .contractNotifyUrl("http://domain.com/api/wxpay/sign/callback.do") .detail("产品是好") .deviceInfo("oneplus 7 pro") //.goodsTag() //.limitPay() //支付回调 .notifyUrl("http://domain.com/api/wxpay/pay/callback.do") .openId("oIvLdt8Q-_aKy4Vo6f4YI6gsIhMc") //openId .outTradeNo(outTradeNo) .planId("141535") //.productId() .requestSerial(3L) .spbillCreateIp("127.0.0.1") //.timeExpire() //.timeStart() .totalFee(1) .tradeType("MWEB") .contractOuterId(displayAccount) .build(); try { WxPayEntrustResult wxPayEntrustResult = this.payService.getWxEntrustPapService().paySign(wxPayEntrustRequest); logger.info(wxPayEntrustResult.toString()); } catch (WxPayException e) { e.printStackTrace(); } } @Test public void testWithhold() { String outTradeNo = "101010101"; WxWithholdRequest withholdRequest = WxWithholdRequest.newBuilder() .attach("local") .body("产品名字") .contractId("202011065409471222") // 微信返回的签约协议号 .detail("产品描述") .feeType("CNY") //.goodsTag() .notifyUrl("http://domain.com/api/wxpay/withhold/callback.do") .outTradeNo(outTradeNo) .spbillCreateIp("127.0.0.1") .totalFee(1) .tradeType("PAP") .build(); try { WxWithholdResult wxWithholdResult = this.payService.getWxEntrustPapService().withhold(withholdRequest); logger.info(wxWithholdResult.toString()); } catch (WxPayException e) { e.printStackTrace(); } } @Test public void testPreWithhold() { WxPreWithholdRequest.EstimateAmount estimateAmount = new WxPreWithholdRequest.EstimateAmount(); estimateAmount.setAmount(1); estimateAmount.setCurrency("CNY"); WxPreWithholdRequest wxPreWithholdRequest = WxPreWithholdRequest.newBuilder() .appId("wx73dssxxxxxx") .contractId("202010275173070001") .estimateAmount(estimateAmount) .mchId("1600010102") .build(); try { String httpResponseModel = this.payService.getWxEntrustPapService().preWithhold(wxPreWithholdRequest); logger.info(httpResponseModel); } catch (WxPayException e) { e.printStackTrace(); } } @Test public void testQuerySign() { String outTradeNo = "1212121212"; WxSignQueryRequest wxSignQueryRequest = WxSignQueryRequest.newBuilder() //.contractId("202010275173073211") .contractCode(outTradeNo) .planId(1432112) .version("1.0") .build(); try { WxSignQueryResult wxSignQueryResult = this.payService.getWxEntrustPapService().querySign(wxSignQueryRequest); logger.info(wxSignQueryResult.toString()); } catch (WxPayException e) { logger.info("异常码:" + e.getErrCode()); logger.info("异常:" + e); } } @Test public void testTerminationContract() { WxTerminatedContractRequest wxTerminatedContractRequest = WxTerminatedContractRequest.newBuilder() .contractId("202010275173070231") .contractTerminationRemark("测试解约") .version("1.0") .build(); try { WxTerminationContractResult wxTerminationContractResult = this.payService.getWxEntrustPapService().terminationContract(wxTerminatedContractRequest); logger.info(wxTerminationContractResult.toString()); } catch (WxPayException e) { logger.error(e.getMessage()); } } }
apache-2.0
Clinical3PO/Platform
dev/clinical3PO/app/src/main/java/org/clinical3PO/common/security/CustomAuthenticationProvider.java
1572
package org.clinical3PO.common.security; import java.util.Collection; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.authentication.AuthenticationProvider; import org.springframework.security.authentication.BadCredentialsException; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.AuthenticationException; import org.springframework.security.core.GrantedAuthority; import org.springframework.stereotype.Component; import org.clinical3PO.common.security.model.User; import org.clinical3PO.common.security.service.UserService; @Component public class CustomAuthenticationProvider implements AuthenticationProvider { @Autowired private UserService userService; @Override public Authentication authenticate(Authentication authentication) throws AuthenticationException { String username = authentication.getName(); String password = (String) authentication.getCredentials(); User user = userService.loadUserByUsername(username); if (user == null) { throw new BadCredentialsException("Username not found."); } if (!password.equals(user.getPassword())) { throw new BadCredentialsException("Wrong password."); } Collection<? extends GrantedAuthority> authorities = user.getAuthorities(); return new UsernamePasswordAuthenticationToken(user, password, authorities); } @Override public boolean supports(Class<?> arg0) { return true; } }
apache-2.0
dzhiqin/MyWeChat
src/com/example/mywechat/utils/ActivityCollector.java
697
package com.example.mywechat.utils; import java.util.ArrayList; import java.util.List; import android.app.Activity; /** * ActivityCollector ÀàÓÃÓÚ¹ÜÀíËùÓеĻ * @author dzhiqin * */ public class ActivityCollector { public static List<Activity> activities=new ArrayList<Activity>(); public static void addActivity(Activity activity){ activities.add(activity); } public static void removeActivity(Activity activity){ activities.remove(activity); } /** * ¹Ø±ÕËùÓл */ public static void finishAll(){ for(Activity activity:activities){ if(!activity.isFinishing()){ activity.finish(); } } } public ActivityCollector() { // TODO ×Ô¶¯Éú³ÉµÄ¹¹Ô캯Êý´æ¸ù } }
apache-2.0
cleidimarviana/Tabs-Material
app/src/main/java/com/seamusdawkins/tablayout/fragments/FirstFragment.java
1928
/* * The MIT License (MIT) * * Copyright (c) 2015 Cleidimar Viana * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.seamusdawkins.tablayout.fragments; import android.os.Bundle; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.RelativeLayout; import android.widget.TextView; import com.seamusdawkins.tablayout.R; public class FirstFragment extends Fragment { TextView tv; RelativeLayout rl; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.fragment_one, container, false); tv = (TextView) rootView.findViewById(R.id.action); tv.setText(R.string.str_first); return rootView; } }
apache-2.0
prateekbansal/apache-gora-0.4
gora-cassandra/src/main/java/org/apache/gora/cassandra/store/CassandraClient.java
17283
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gora.cassandra.store; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import me.prettyprint.cassandra.model.ConfigurableConsistencyLevel; import me.prettyprint.cassandra.serializers.ByteBufferSerializer; import me.prettyprint.cassandra.serializers.IntegerSerializer; import me.prettyprint.cassandra.serializers.StringSerializer; import me.prettyprint.cassandra.service.CassandraHostConfigurator; import me.prettyprint.hector.api.Cluster; import me.prettyprint.hector.api.Keyspace; import me.prettyprint.hector.api.beans.OrderedRows; import me.prettyprint.hector.api.beans.OrderedSuperRows; import me.prettyprint.hector.api.beans.Row; import me.prettyprint.hector.api.beans.SuperRow; import me.prettyprint.hector.api.ddl.ColumnFamilyDefinition; import me.prettyprint.hector.api.ddl.ComparatorType; import me.prettyprint.hector.api.ddl.KeyspaceDefinition; import me.prettyprint.hector.api.factory.HFactory; import me.prettyprint.hector.api.mutation.Mutator; import me.prettyprint.hector.api.query.QueryResult; import me.prettyprint.hector.api.query.RangeSlicesQuery; import me.prettyprint.hector.api.query.RangeSuperSlicesQuery; import me.prettyprint.hector.api.HConsistencyLevel; import me.prettyprint.hector.api.Serializer; import org.apache.avro.Schema; import org.apache.avro.Schema.Type; import org.apache.avro.generic.GenericArray; import org.apache.avro.util.Utf8; import org.apache.gora.cassandra.query.CassandraQuery; import org.apache.gora.cassandra.serializers.GenericArraySerializer; import org.apache.gora.cassandra.serializers.GoraSerializerTypeInferer; import org.apache.gora.cassandra.serializers.TypeUtils; import org.apache.gora.mapreduce.GoraRecordReader; import org.apache.gora.persistency.Persistent; import org.apache.gora.persistency.impl.PersistentBase; import org.apache.gora.persistency.State; import org.apache.gora.persistency.StatefulHashMap; import org.apache.gora.query.Query; import org.apache.gora.util.ByteUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class CassandraClient<K, T extends PersistentBase> { public static final Logger LOG = LoggerFactory.getLogger(CassandraClient.class); private Cluster cluster; private Keyspace keyspace; private Mutator<K> mutator; private Class<K> keyClass; private Class<T> persistentClass; private CassandraMapping cassandraMapping = null; private Serializer<K> keySerializer; public void initialize(Class<K> keyClass, Class<T> persistentClass) throws Exception { this.keyClass = keyClass; // get cassandra mapping with persistent class this.persistentClass = persistentClass; this.cassandraMapping = CassandraMappingManager.getManager().get(persistentClass); // LOG.info("persistentClass=" + persistentClass.getName() + " -> cassandraMapping=" + cassandraMapping); this.cluster = HFactory.getOrCreateCluster(this.cassandraMapping.getClusterName(), new CassandraHostConfigurator(this.cassandraMapping.getHostName())); // add keyspace to cluster checkKeyspace(); // Just create a Keyspace object on the client side, corresponding to an already existing keyspace with already created column families. this.keyspace = HFactory.createKeyspace(this.cassandraMapping.getKeyspaceName(), this.cluster); this.keySerializer = GoraSerializerTypeInferer.getSerializer(keyClass); this.mutator = HFactory.createMutator(this.keyspace, this.keySerializer); } /** * Check if keyspace already exists. */ public boolean keyspaceExists() { KeyspaceDefinition keyspaceDefinition = this.cluster.describeKeyspace(this.cassandraMapping.getKeyspaceName()); return (keyspaceDefinition != null); } /** * Check if keyspace already exists. If not, create it. * In this method, we also utilise Hector's {@ConfigurableConsistencyLevel} * logic. It is set by passing a ConfigurableConsistencyLevel object right * when the Keyspace is created. Currently consistency level is .ONE which * permits consistency to wait until one replica has responded. */ public void checkKeyspace() { // "describe keyspace <keyspaceName>;" query KeyspaceDefinition keyspaceDefinition = this.cluster.describeKeyspace(this.cassandraMapping.getKeyspaceName()); if (keyspaceDefinition == null) { List<ColumnFamilyDefinition> columnFamilyDefinitions = this.cassandraMapping.getColumnFamilyDefinitions(); // GORA-197 for (ColumnFamilyDefinition cfDef : columnFamilyDefinitions) { cfDef.setComparatorType(ComparatorType.BYTESTYPE); } keyspaceDefinition = HFactory.createKeyspaceDefinition(this.cassandraMapping.getKeyspaceName(), "org.apache.cassandra.locator.SimpleStrategy", 1, columnFamilyDefinitions); this.cluster.addKeyspace(keyspaceDefinition, true); // LOG.info("Keyspace '" + this.cassandraMapping.getKeyspaceName() + "' in cluster '" + this.cassandraMapping.getClusterName() + "' was created on host '" + this.cassandraMapping.getHostName() + "'"); // Create a customized Consistency Level ConfigurableConsistencyLevel configurableConsistencyLevel = new ConfigurableConsistencyLevel(); Map<String, HConsistencyLevel> clmap = new HashMap<String, HConsistencyLevel>(); // Define CL.ONE for ColumnFamily "ColumnFamily" clmap.put("ColumnFamily", HConsistencyLevel.ONE); // In this we use CL.ONE for read and writes. But you can use different CLs if needed. configurableConsistencyLevel.setReadCfConsistencyLevels(clmap); configurableConsistencyLevel.setWriteCfConsistencyLevels(clmap); // Then let the keyspace know HFactory.createKeyspace("Keyspace", this.cluster, configurableConsistencyLevel); keyspaceDefinition = null; } else { List<ColumnFamilyDefinition> cfDefs = keyspaceDefinition.getCfDefs(); if (cfDefs == null || cfDefs.size() == 0) { LOG.warn(keyspaceDefinition.getName() + " does not have any column family."); } else { for (ColumnFamilyDefinition cfDef : cfDefs) { ComparatorType comparatorType = cfDef.getComparatorType(); if (! comparatorType.equals(ComparatorType.BYTESTYPE)) { // GORA-197 LOG.warn("The comparator type of " + cfDef.getName() + " column family is " + comparatorType.getTypeName() + ", not BytesType. It may cause a fatal error on column validation later."); } else { // LOG.info("The comparator type of " + cfDef.getName() + " column family is " + comparatorType.getTypeName() + "."); } } } } } /** * Drop keyspace. */ public void dropKeyspace() { // "drop keyspace <keyspaceName>;" query this.cluster.dropKeyspace(this.cassandraMapping.getKeyspaceName()); } /** * Insert a field in a column. * @param key the row key * @param fieldName the field name * @param value the field value. */ public void addColumn(K key, String fieldName, Object value) { if (value == null) { return; } ByteBuffer byteBuffer = toByteBuffer(value); String columnFamily = this.cassandraMapping.getFamily(fieldName); String columnName = this.cassandraMapping.getColumn(fieldName); if (columnName == null) { LOG.warn("Column name is null for field=" + fieldName + " with value=" + value.toString()); return; } synchronized(mutator) { HectorUtils.insertColumn(mutator, key, columnFamily, columnName, byteBuffer); } } /** * Insert a member in a super column. This is used for map and record Avro types. * @param key the row key * @param fieldName the field name * @param columnName the column name (the member name, or the index of array) * @param value the member value */ @SuppressWarnings("unchecked") public void addSubColumn(K key, String fieldName, ByteBuffer columnName, Object value) { if (value == null) { return; } ByteBuffer byteBuffer = toByteBuffer(value); String columnFamily = this.cassandraMapping.getFamily(fieldName); String superColumnName = this.cassandraMapping.getColumn(fieldName); synchronized(mutator) { HectorUtils.insertSubColumn(mutator, key, columnFamily, superColumnName, columnName, byteBuffer); } } public void addSubColumn(K key, String fieldName, String columnName, Object value) { addSubColumn(key, fieldName, StringSerializer.get().toByteBuffer(columnName), value); } public void addSubColumn(K key, String fieldName, Integer columnName, Object value) { addSubColumn(key, fieldName, IntegerSerializer.get().toByteBuffer(columnName), value); } /** * Delete a member in a super column. This is used for map and record Avro types. * @param key the row key * @param fieldName the field name * @param columnName the column name (the member name, or the index of array) */ @SuppressWarnings("unchecked") public void deleteSubColumn(K key, String fieldName, ByteBuffer columnName) { String columnFamily = this.cassandraMapping.getFamily(fieldName); String superColumnName = this.cassandraMapping.getColumn(fieldName); synchronized(mutator) { HectorUtils.deleteSubColumn(mutator, key, columnFamily, superColumnName, columnName); } } public void deleteSubColumn(K key, String fieldName, String columnName) { deleteSubColumn(key, fieldName, StringSerializer.get().toByteBuffer(columnName)); } @SuppressWarnings("unchecked") public void addGenericArray(K key, String fieldName, GenericArray array) { if (isSuper( cassandraMapping.getFamily(fieldName) )) { int i= 0; for (Object itemValue: array) { // TODO: hack, do not store empty arrays if (itemValue instanceof GenericArray<?>) { if (((GenericArray)itemValue).size() == 0) { continue; } } else if (itemValue instanceof StatefulHashMap<?,?>) { if (((StatefulHashMap)itemValue).size() == 0) { continue; } } addSubColumn(key, fieldName, i++, itemValue); } } else { addColumn(key, fieldName, array); } } @SuppressWarnings("unchecked") public void addStatefulHashMap(K key, String fieldName, StatefulHashMap<Utf8,Object> map) { if (isSuper( cassandraMapping.getFamily(fieldName) )) { int i= 0; for (Utf8 mapKey: map.keySet()) { if (map.getState(mapKey) == State.DELETED) { deleteSubColumn(key, fieldName, mapKey.toString()); continue; } // TODO: hack, do not store empty arrays Object mapValue = map.get(mapKey); if (mapValue instanceof GenericArray<?>) { if (((GenericArray)mapValue).size() == 0) { continue; } } else if (mapValue instanceof StatefulHashMap<?,?>) { if (((StatefulHashMap)mapValue).size() == 0) { continue; } } addSubColumn(key, fieldName, mapKey.toString(), mapValue); } } else { addColumn(key, fieldName, map); } } /** * Serialize value to ByteBuffer. * @param value the member value * @return ByteBuffer object */ @SuppressWarnings("unchecked") public ByteBuffer toByteBuffer(Object value) { ByteBuffer byteBuffer = null; Serializer serializer = GoraSerializerTypeInferer.getSerializer(value); if (serializer == null) { LOG.info("Serializer not found for: " + value.toString()); } else { byteBuffer = serializer.toByteBuffer(value); } if (byteBuffer == null) { LOG.info("value class=" + value.getClass().getName() + " value=" + value + " -> null"); } return byteBuffer; } /** * Select a family column in the keyspace. * @param cassandraQuery a wrapper of the query * @param family the family name to be queried * @return a list of family rows */ public List<Row<K, ByteBuffer, ByteBuffer>> execute(CassandraQuery<K, T> cassandraQuery, String family) { String[] columnNames = cassandraQuery.getColumns(family); ByteBuffer[] columnNameByteBuffers = new ByteBuffer[columnNames.length]; for (int i = 0; i < columnNames.length; i++) { columnNameByteBuffers[i] = StringSerializer.get().toByteBuffer(columnNames[i]); } Query<K, T> query = cassandraQuery.getQuery(); int limit = (int) query.getLimit(); if (limit < 1) { limit = Integer.MAX_VALUE; } K startKey = query.getStartKey(); K endKey = query.getEndKey(); RangeSlicesQuery<K, ByteBuffer, ByteBuffer> rangeSlicesQuery = HFactory.createRangeSlicesQuery(this.keyspace, this.keySerializer, ByteBufferSerializer.get(), ByteBufferSerializer.get()); rangeSlicesQuery.setColumnFamily(family); rangeSlicesQuery.setKeys(startKey, endKey); rangeSlicesQuery.setRange(ByteBuffer.wrap(new byte[0]), ByteBuffer.wrap(new byte[0]), false, GoraRecordReader.BUFFER_LIMIT_READ_VALUE); rangeSlicesQuery.setRowCount(limit); rangeSlicesQuery.setColumnNames(columnNameByteBuffers); QueryResult<OrderedRows<K, ByteBuffer, ByteBuffer>> queryResult = rangeSlicesQuery.execute(); OrderedRows<K, ByteBuffer, ByteBuffer> orderedRows = queryResult.get(); return orderedRows.getList(); } /** * Select the families that contain at least one column mapped to a query field. * @param query indicates the columns to select * @return a map which keys are the family names and values the corresponding column names required to get all the query fields. */ public Map<String, List<String>> getFamilyMap(Query<K, T> query) { Map<String, List<String>> map = new HashMap<String, List<String>>(); for (String field: query.getFields()) { String family = this.cassandraMapping.getFamily(field); String column = this.cassandraMapping.getColumn(field); // check if the family value was already initialized List<String> list = map.get(family); if (list == null) { list = new ArrayList<String>(); map.put(family, list); } if (column != null) { list.add(column); } } return map; } /** * Select the field names according to the column names, which format if fully qualified: "family:column" * @param query * @return a map which keys are the fully qualified column names and values the query fields */ public Map<String, String> getReverseMap(Query<K, T> query) { Map<String, String> map = new HashMap<String, String>(); for (String field: query.getFields()) { String family = this.cassandraMapping.getFamily(field); String column = this.cassandraMapping.getColumn(field); map.put(family + ":" + column, field); } return map; } public boolean isSuper(String family) { return this.cassandraMapping.isSuper(family); } public List<SuperRow<K, String, ByteBuffer, ByteBuffer>> executeSuper(CassandraQuery<K, T> cassandraQuery, String family) { String[] columnNames = cassandraQuery.getColumns(family); Query<K, T> query = cassandraQuery.getQuery(); int limit = (int) query.getLimit(); if (limit < 1) { limit = Integer.MAX_VALUE; } K startKey = query.getStartKey(); K endKey = query.getEndKey(); RangeSuperSlicesQuery<K, String, ByteBuffer, ByteBuffer> rangeSuperSlicesQuery = HFactory.createRangeSuperSlicesQuery(this.keyspace, this.keySerializer, StringSerializer.get(), ByteBufferSerializer.get(), ByteBufferSerializer.get()); rangeSuperSlicesQuery.setColumnFamily(family); rangeSuperSlicesQuery.setKeys(startKey, endKey); rangeSuperSlicesQuery.setRange("", "", false, GoraRecordReader.BUFFER_LIMIT_READ_VALUE); rangeSuperSlicesQuery.setRowCount(limit); rangeSuperSlicesQuery.setColumnNames(columnNames); QueryResult<OrderedSuperRows<K, String, ByteBuffer, ByteBuffer>> queryResult = rangeSuperSlicesQuery.execute(); OrderedSuperRows<K, String, ByteBuffer, ByteBuffer> orderedRows = queryResult.get(); return orderedRows.getList(); } /** * Obtain Schema/Keyspace name * @return Keyspace */ public String getKeyspaceName() { return this.cassandraMapping.getKeyspaceName(); } }
apache-2.0
DavidHerzogTU-Berlin/cassandraToRun
src/java/org/apache/cassandra/io/sstable/SSTableScanner.java
8241
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.io.sstable; import java.io.IOException; import java.util.Iterator; import com.google.common.util.concurrent.RateLimiter; import com.google.common.collect.AbstractIterator; import org.apache.cassandra.db.DataRange; import org.apache.cassandra.db.DecoratedKey; import org.apache.cassandra.db.RowIndexEntry; import org.apache.cassandra.db.RowPosition; import org.apache.cassandra.db.columniterator.IColumnIteratorFactory; import org.apache.cassandra.db.columniterator.LazyColumnIterator; import org.apache.cassandra.db.columniterator.OnDiskAtomIterator; import org.apache.cassandra.db.compaction.ICompactionScanner; import org.apache.cassandra.db.filter.QueryFilter; import org.apache.cassandra.dht.AbstractBounds; import org.apache.cassandra.dht.Range; import org.apache.cassandra.dht.Token; import org.apache.cassandra.io.util.FileUtils; import org.apache.cassandra.io.util.RandomAccessReader; import org.apache.cassandra.utils.ByteBufferUtil; public class SSTableScanner implements ICompactionScanner { protected final RandomAccessReader dfile; protected final RandomAccessReader ifile; public final SSTableReader sstable; private final DataRange dataRange; private final long stopAt; protected Iterator<OnDiskAtomIterator> iterator; /** * @param sstable SSTable to scan; must not be null * @param filter range of data to fetch; must not be null * @param limiter background i/o RateLimiter; may be null */ SSTableScanner(SSTableReader sstable, DataRange dataRange, RateLimiter limiter) { assert sstable != null; this.dfile = limiter == null ? sstable.openDataReader() : sstable.openDataReader(limiter); this.ifile = sstable.openIndexReader(); this.sstable = sstable; this.dataRange = dataRange; this.stopAt = computeStopAt(); seekToStart(); } private void seekToStart() { if (dataRange.startKey().isMinimum(sstable.partitioner)) return; long indexPosition = sstable.getIndexScanPosition(dataRange.startKey()); // -1 means the key is before everything in the sstable. So just start from the beginning. if (indexPosition == -1) return; ifile.seek(indexPosition); try { while (!ifile.isEOF()) { indexPosition = ifile.getFilePointer(); DecoratedKey indexDecoratedKey = sstable.partitioner.decorateKey(ByteBufferUtil.readWithShortLength(ifile)); int comparison = indexDecoratedKey.compareTo(dataRange.startKey()); if (comparison >= 0) { // Found, just read the dataPosition and seek into index and data files long dataPosition = ifile.readLong(); ifile.seek(indexPosition); dfile.seek(dataPosition); break; } else { RowIndexEntry.serializer.skip(ifile); } } } catch (IOException e) { sstable.markSuspect(); throw new CorruptSSTableException(e, sstable.getFilename()); } } private long computeStopAt() { AbstractBounds<RowPosition> keyRange = dataRange.keyRange(); if (dataRange.stopKey().isMinimum(sstable.partitioner) || (keyRange instanceof Range && ((Range)keyRange).isWrapAround())) return dfile.length(); RowIndexEntry position = sstable.getPosition(keyRange.toRowBounds().right, SSTableReader.Operator.GT); return position == null ? dfile.length() : position.position; } public void close() throws IOException { FileUtils.close(dfile, ifile); } public long getLengthInBytes() { return dfile.length(); } public long getCurrentPosition() { return dfile.getFilePointer(); } public String getBackingFiles() { return sstable.toString(); } public boolean hasNext() { if (iterator == null) iterator = createIterator(); return iterator.hasNext(); } public OnDiskAtomIterator next() { if (iterator == null) iterator = createIterator(); return iterator.next(); } public void remove() { throw new UnsupportedOperationException(); } private Iterator<OnDiskAtomIterator> createIterator() { return new KeyScanningIterator(); } protected class KeyScanningIterator extends AbstractIterator<OnDiskAtomIterator> { private DecoratedKey nextKey; private RowIndexEntry nextEntry; private DecoratedKey currentKey; private RowIndexEntry currentEntry; protected OnDiskAtomIterator computeNext() { try { if (ifile.isEOF() && nextKey == null) return endOfData(); if (currentKey == null) { currentKey = sstable.partitioner.decorateKey(ByteBufferUtil.readWithShortLength(ifile)); currentEntry = RowIndexEntry.serializer.deserialize(ifile, sstable.descriptor.version); } else { currentKey = nextKey; currentEntry = nextEntry; } assert currentEntry.position <= stopAt; if (currentEntry.position == stopAt) return endOfData(); if (ifile.isEOF()) { nextKey = null; nextEntry = null; } else { nextKey = sstable.partitioner.decorateKey(ByteBufferUtil.readWithShortLength(ifile)); nextEntry = RowIndexEntry.serializer.deserialize(ifile, sstable.descriptor.version); } assert !dfile.isEOF(); if (dataRange.selectsFullRowFor(currentKey.key)) { dfile.seek(currentEntry.position); ByteBufferUtil.readWithShortLength(dfile); // key if (sstable.descriptor.version.hasRowSizeAndColumnCount) dfile.readLong(); long dataSize = (nextEntry == null ? dfile.length() : nextEntry.position) - dfile.getFilePointer(); return new SSTableIdentityIterator(sstable, dfile, currentKey, dataSize); } return new LazyColumnIterator(currentKey, new IColumnIteratorFactory() { public OnDiskAtomIterator create() { return dataRange.columnFilter(currentKey.key).getSSTableColumnIterator(sstable, dfile, currentKey, currentEntry); } }); } catch (IOException e) { sstable.markSuspect(); throw new CorruptSSTableException(e, sstable.getFilename()); } } } @Override public String toString() { return getClass().getSimpleName() + "(" + "dfile=" + dfile + " ifile=" + ifile + " sstable=" + sstable + ")"; } }
apache-2.0
dagnir/aws-sdk-java
aws-java-sdk-glacier/src/main/java/com/amazonaws/services/glacier/model/transform/PartListElementJsonUnmarshaller.java
2994
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.glacier.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.glacier.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * PartListElement JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class PartListElementJsonUnmarshaller implements Unmarshaller<PartListElement, JsonUnmarshallerContext> { public PartListElement unmarshall(JsonUnmarshallerContext context) throws Exception { PartListElement partListElement = new PartListElement(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return null; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("RangeInBytes", targetDepth)) { context.nextToken(); partListElement.setRangeInBytes(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("SHA256TreeHash", targetDepth)) { context.nextToken(); partListElement.setSHA256TreeHash(context.getUnmarshaller(String.class).unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return partListElement; } private static PartListElementJsonUnmarshaller instance; public static PartListElementJsonUnmarshaller getInstance() { if (instance == null) instance = new PartListElementJsonUnmarshaller(); return instance; } }
apache-2.0
seborama/demo1-kafka
src/main/java/seborama/demo2/kafka/model/Order.java
1265
package seborama.demo2.kafka.model; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonInclude; @JsonInclude(JsonInclude.Include.NON_EMPTY) @JsonIgnoreProperties(ignoreUnknown = true) public class Order { private String id; private boolean fulfilled; private boolean dispatched; private boolean completed; public void setId(String id) { this.id = id; } public void setFulfilled(Boolean fulfilled) { this.fulfilled = fulfilled; } public void setDispatched(Boolean dispatched) { this.dispatched = dispatched; } public void setCompleted(Boolean completed) { this.completed = completed; } public String getId() { return id; } public Boolean getFulfilled() { return fulfilled; } public Boolean getDispatched() { return dispatched; } public Boolean getCompleted() { return completed; } @Override public String toString() { return "Order{" + "id='" + id + '\'' + ", fulfilled=" + fulfilled + ", dispatched=" + dispatched + ", completed=" + completed + '}'; } }
apache-2.0
Iamasoldier6/DesignPattern
BuilderPatternDemo/src/ChickenBurger.java
168
public class ChickenBurger extends Burger { @Override public float price() { return 50.5f; } @Override public String name() { return "Chicken Burger"; } }
apache-2.0
JNOSQL/artemis
artemis-column/src/test/java/org/jnosql/artemis/column/query/DefaultColumnQueryMapperBuilderTest.java
1793
/* * Copyright (c) 2017 Otávio Santana and others * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Apache License v2.0 which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Apache License v2.0 is available at http://www.opensource.org/licenses/apache2.0.php. * * You may elect to redistribute this code under either of these licenses. * * Contributors: * * Otavio Santana */ package org.jnosql.artemis.column.query; import org.jnosql.artemis.CDIExtension; import org.jnosql.artemis.model.Person; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import javax.inject.Inject; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertThrows; @ExtendWith(CDIExtension.class) public class DefaultColumnQueryMapperBuilderTest { @Inject private ColumnQueryMapperBuilder mapperBuilder; @Test public void shouldReturnErrorWhenEntityClassIsNull() { assertThrows(NullPointerException.class, () -> mapperBuilder.selectFrom(null)); } @Test public void shouldReturnSelectFrom() { ColumnMapperFrom columnFrom = mapperBuilder.selectFrom(Person.class); assertNotNull(columnFrom); } @Test public void shouldReturnErrorWhenDeleteEntityClassIsNull() { assertThrows(NullPointerException.class, () -> mapperBuilder.deleteFrom(null)); } @Test public void shouldReturnDeleteFrom() { ColumnMapperDeleteFrom columnDeleteFrom = mapperBuilder.deleteFrom(Person.class); assertNotNull(columnDeleteFrom); } }
apache-2.0
asakusafw/asakusafw
directio-project/asakusa-directio-dmdl/src/main/java/com/asakusafw/dmdl/directio/text/AbstractTextStreamFormatGenerator.java
26894
/** * Copyright 2011-2021 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.dmdl.directio.text; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.EnumMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.Function; import java.util.function.Predicate; import org.apache.hadoop.io.compress.CompressionCodec; import com.asakusafw.dmdl.directio.util.CharsetUtil; import com.asakusafw.dmdl.directio.util.ClassName; import com.asakusafw.dmdl.directio.util.Value; import com.asakusafw.dmdl.java.emitter.EmitContext; import com.asakusafw.dmdl.java.util.JavaName; import com.asakusafw.dmdl.model.BasicTypeKind; import com.asakusafw.dmdl.semantics.ModelDeclaration; import com.asakusafw.dmdl.semantics.PropertyDeclaration; import com.asakusafw.dmdl.semantics.type.BasicType; import com.asakusafw.dmdl.util.AttributeUtil; import com.asakusafw.runtime.io.text.TextFormat; import com.asakusafw.runtime.io.text.TextInput; import com.asakusafw.runtime.io.text.directio.AbstractTextStreamFormat; import com.asakusafw.runtime.io.text.driver.FieldDefinition; import com.asakusafw.runtime.io.text.driver.RecordDefinition; import com.asakusafw.runtime.io.text.value.BooleanOptionFieldAdapter; import com.asakusafw.runtime.io.text.value.ByteOptionFieldAdapter; import com.asakusafw.runtime.io.text.value.DateOptionFieldAdapter; import com.asakusafw.runtime.io.text.value.DateTimeOptionFieldAdapter; import com.asakusafw.runtime.io.text.value.DecimalOptionFieldAdapter; import com.asakusafw.runtime.io.text.value.DoubleOptionFieldAdapter; import com.asakusafw.runtime.io.text.value.FloatOptionFieldAdapter; import com.asakusafw.runtime.io.text.value.IntOptionFieldAdapter; import com.asakusafw.runtime.io.text.value.LongOptionFieldAdapter; import com.asakusafw.runtime.io.text.value.ShortOptionFieldAdapter; import com.asakusafw.runtime.io.text.value.StringOptionFieldAdapter; import com.asakusafw.runtime.io.text.value.ValueOptionFieldAdapter; import com.asakusafw.runtime.io.util.InputSplitter; import com.asakusafw.runtime.io.util.InputSplitters; import com.asakusafw.runtime.value.StringOption; import com.asakusafw.utils.java.model.syntax.ClassDeclaration; import com.asakusafw.utils.java.model.syntax.Expression; import com.asakusafw.utils.java.model.syntax.InfixOperator; import com.asakusafw.utils.java.model.syntax.MethodDeclaration; import com.asakusafw.utils.java.model.syntax.ModelFactory; import com.asakusafw.utils.java.model.syntax.SimpleName; import com.asakusafw.utils.java.model.syntax.Statement; import com.asakusafw.utils.java.model.syntax.Type; import com.asakusafw.utils.java.model.syntax.TypeBodyDeclaration; import com.asakusafw.utils.java.model.util.AttributeBuilder; import com.asakusafw.utils.java.model.util.ExpressionBuilder; import com.asakusafw.utils.java.model.util.JavadocBuilder; import com.asakusafw.utils.java.model.util.Models; import com.asakusafw.utils.java.model.util.TypeBuilder; /** * Generates {@link AbstractTextStreamFormat}. * @since 0.9.1 */ public abstract class AbstractTextStreamFormatGenerator { private static final Map<BasicTypeKind, Class<? extends ValueOptionFieldAdapter<?>>> ADAPTER_TYPES; static { Map<BasicTypeKind, Class<? extends ValueOptionFieldAdapter<?>>> map = new EnumMap<>(BasicTypeKind.class); map.put(BasicTypeKind.BYTE, ByteOptionFieldAdapter.class); map.put(BasicTypeKind.SHORT, ShortOptionFieldAdapter.class); map.put(BasicTypeKind.INT, IntOptionFieldAdapter.class); map.put(BasicTypeKind.LONG, LongOptionFieldAdapter.class); map.put(BasicTypeKind.FLOAT, FloatOptionFieldAdapter.class); map.put(BasicTypeKind.DOUBLE, DoubleOptionFieldAdapter.class); map.put(BasicTypeKind.DECIMAL, DecimalOptionFieldAdapter.class); map.put(BasicTypeKind.TEXT, StringOptionFieldAdapter.class); map.put(BasicTypeKind.BOOLEAN, BooleanOptionFieldAdapter.class); map.put(BasicTypeKind.DATE, DateOptionFieldAdapter.class); map.put(BasicTypeKind.DATETIME, DateTimeOptionFieldAdapter.class); ADAPTER_TYPES = map; } /** * The current context. */ protected final EmitContext context; /** * The target model. */ protected final ModelDeclaration model; private final ModelFactory f; private final TextFormatSettings formatSettings; private final TextFieldSettings fieldDefaultSettings; /** * Creates a new instance. * @param context the current context * @param model the target model * @param formatSettings the text format settings * @param fieldDefaultSettings the field default settings */ public AbstractTextStreamFormatGenerator( EmitContext context, ModelDeclaration model, TextFormatSettings formatSettings, TextFieldSettings fieldDefaultSettings) { this.context = context; this.model = model; this.formatSettings = formatSettings; this.fieldDefaultSettings = fieldDefaultSettings; this.f = context.getModelFactory(); } /** * Emits an implementation of {@link AbstractTextStreamFormat} class as a Java compilation unit. * @param description the format description * @throws IOException if I/O error was occurred while emitting the compilation unit */ protected void emit(String description) throws IOException { ClassDeclaration decl = f.newClassDeclaration( new JavadocBuilder(f) .inline(Messages.getString("AbstractTextStreamFormatGenerator.javadocClassOverview"), //$NON-NLS-1$ d -> d.text(description), d -> d.linkType(context.resolve(model.getSymbol()))) .toJavadoc(), new AttributeBuilder(f) .Public() .toAttributes(), context.getTypeName(), f.newParameterizedType( context.resolve(AbstractTextStreamFormat.class), context.resolve(model.getSymbol())), Collections.emptyList(), createMembers()); context.emit(decl); } private List<? extends TypeBodyDeclaration> createMembers() { List<TypeBodyDeclaration> results = new ArrayList<>(); results.add(createGetSupportedType()); results.add(createCreateTextFormat()); results.addAll(createCreateRecordDefinition()); createGetInputSplitter().ifPresent(results::add); createGetCompressionCodecClass().ifPresent(results::add); createAfterInput().ifPresent(results::add); createBeforeOutput().ifPresent(results::add); return results; } private MethodDeclaration createGetSupportedType() { return f.newMethodDeclaration( null, new AttributeBuilder(f) .annotation(context.resolve(Override.class)) .Public() .toAttributes(), f.newParameterizedType( context.resolve(Class.class), context.resolve(model.getSymbol())), f.newSimpleName("getSupportedType"), //$NON-NLS-1$ Collections.emptyList(), Arrays.asList(new TypeBuilder(f, context.resolve(model.getSymbol())) .dotClass() .toReturnStatement())); } private MethodDeclaration createCreateTextFormat() { return f.newMethodDeclaration( null, new AttributeBuilder(f) .annotation(context.resolve(Override.class)) .Public() .toAttributes(), context.resolve(TextFormat.class), f.newSimpleName("createTextFormat"), //$NON-NLS-1$ Collections.emptyList(), createGetTextFormatInternal()); } /** * Returns a body of {@link AbstractTextStreamFormat#getTextFormat()}. * @return the body statements */ protected abstract List<Statement> createGetTextFormatInternal(); private List<MethodDeclaration> createCreateRecordDefinition() { SimpleName builder = f.newSimpleName("builder"); //$NON-NLS-1$ List<Statement> statements = new ArrayList<>(); statements.add(new TypeBuilder(f, context.resolve(RecordDefinition.class)) .method("builder", f.newClassLiteral(context.resolve(model.getSymbol()))) //$NON-NLS-1$ .toLocalVariableDeclaration( f.newParameterizedType( context.resolve(RecordDefinition.Builder.class), context.resolve(model.getSymbol())), builder)); List<MethodDeclaration> fields = buildRecordDefinition(statements, builder); statements.add(new ExpressionBuilder(f, builder) .method("build") //$NON-NLS-1$ .toReturnStatement()); List<MethodDeclaration> results = new ArrayList<>(); results.add(f.newMethodDeclaration( null, new AttributeBuilder(f) .annotation(context.resolve(Override.class)) .Protected() .toAttributes(), f.newParameterizedType( context.resolve(RecordDefinition.class), context.resolve(model.getSymbol())), f.newSimpleName("createRecordDefinition"), //$NON-NLS-1$ Collections.emptyList(), statements)); results.addAll(fields); return results; } private List<MethodDeclaration> buildRecordDefinition(List<Statement> statements, SimpleName builder) { formatSettings.getHeaderType().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder) .method("withHeaderType", resolve(v)) //$NON-NLS-1$ .toStatement())); formatSettings.getLessInputAction().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder) .method("withOnLessInput", resolve(v)) //$NON-NLS-1$ .toStatement())); formatSettings.getMoreInputAction().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder) .method("withOnMoreInput", resolve(v)) //$NON-NLS-1$ .toStatement())); fieldDefaultSettings.getTrimInputWhitespaces().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder) .method("withTrimInput", resolve(v)) //$NON-NLS-1$ .toStatement())); fieldDefaultSettings.getSkipEmptyInput().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder) .method("withSkipEmptyInput", resolve(v)) //$NON-NLS-1$ .toStatement())); fieldDefaultSettings.getMalformedInputAction().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder) .method("withOnMalformedInput", resolve(v)) //$NON-NLS-1$ .toStatement())); fieldDefaultSettings.getUnmappableOutputAction().ifPresent(v -> statements.add( new ExpressionBuilder(f, builder) .method("withOnUnmappableOutput", resolve(v)) //$NON-NLS-1$ .toStatement())); List<MethodDeclaration> fields = new ArrayList<>(); for (PropertyDeclaration property : model.getDeclaredProperties()) { if (TextFieldTrait.getKind(property) != TextFieldTrait.Kind.VALUE) { continue; } MethodDeclaration method = createGetFieldDefinition(property); fields.add(method); statements.add(new ExpressionBuilder(f, builder) .method("withField", //$NON-NLS-1$ new TypeBuilder(f, context.resolve(model.getSymbol())) .methodReference(context.getOptionGetterName(property)) .toExpression(), new ExpressionBuilder(f, f.newThis()) .method(method.getName()) .toExpression()) .toStatement()); } return fields; } private MethodDeclaration createGetFieldDefinition(PropertyDeclaration property) { SimpleName builder = f.newSimpleName("builder"); //$NON-NLS-1$ List<Statement> statements = new ArrayList<>(); statements.add(new TypeBuilder(f, context.resolve(FieldDefinition.class)) .method("builder", //$NON-NLS-1$ resolve(TextFieldTrait.getName(property)), buildFieldAdapter(property)) .toLocalVariableDeclaration( f.newParameterizedType( context.resolve(FieldDefinition.Builder.class), context.getFieldType(property)), builder)); TextFieldSettings settings = TextFieldTrait.getSettings(property); settings.getTrimInputWhitespaces().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder) .method("withTrimInput", resolve(v)) //$NON-NLS-1$ .toStatement())); settings.getSkipEmptyInput().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder) .method("withSkipEmptyInput", resolve(v)) //$NON-NLS-1$ .toStatement())); settings.getMalformedInputAction().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder) .method("withOnMalformedInput", resolve(v)) //$NON-NLS-1$ .toStatement())); settings.getUnmappableOutputAction().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder) .method("withOnUnmappableOutput", resolve(v)) //$NON-NLS-1$ .toStatement())); settings.getQuoteStyle().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder) .method("withOutputOption", resolve(v)) //$NON-NLS-1$ .toStatement())); statements.add(new ExpressionBuilder(f, builder) .method("build") //$NON-NLS-1$ .toReturnStatement()); JavaName name = JavaName.of(property.getName()); name.addFirst("get"); //$NON-NLS-1$ name.addLast("field"); //$NON-NLS-1$ name.addLast("definition"); //$NON-NLS-1$ return f.newMethodDeclaration( new JavadocBuilder(f) .inline(Messages.getString("AbstractTextStreamFormatGenerator.javafocGetFieldDefinitionOverview"), //$NON-NLS-1$ d -> d.linkMethod( context.resolve(model.getSymbol()), context.getOptionGetterName(property))) .returns() .text(Messages.getString("AbstractTextStreamFormatGenerator.javadocGetFieldDefinitionReturn")) //$NON-NLS-1$ .toJavadoc(), new AttributeBuilder(f) .Protected() .toAttributes(), f.newParameterizedType( context.resolve(FieldDefinition.class), context.getFieldType(property)), f.newSimpleName(name.toMemberName()), Collections.emptyList(), statements); } private Expression buildFieldAdapter(PropertyDeclaration property) { TextFieldSettings settings = TextFieldTrait.getSettings(property); Value<ClassName> adapterClass = setting(settings, TextFieldSettings::getAdapterClass); if (adapterClass.isPresent()) { return new TypeBuilder(f, resolve(adapterClass.getEntity())) .constructorReference() .toExpression(); } BasicTypeKind kind = ((BasicType) property.getType()).getKind(); Class<? extends ValueOptionFieldAdapter<?>> basicAdapterClass = ADAPTER_TYPES.get(kind); assert basicAdapterClass != null; ExpressionBuilder builder = new TypeBuilder(f, context.resolve(basicAdapterClass)).method("builder"); //$NON-NLS-1$ setting(settings, TextFieldSettings::getNullFormat).ifPresent(v -> builder .method("withNullFormat", resolve(v))); //$NON-NLS-1$ switch (kind) { case BOOLEAN: setting(settings, TextFieldSettings::getTrueFormat).ifPresent(v -> builder .method("withTrueFormat", resolve(v))); //$NON-NLS-1$ setting(settings, TextFieldSettings::getFalseFormat).ifPresent(v -> builder .method("withFalseFormat", resolve(v))); //$NON-NLS-1$ break; case DATE: setting(settings, TextFieldSettings::getDateFormat).ifPresent(v -> builder .method("withDateFormat", resolve(v.toString()))); //$NON-NLS-1$ break; case DATETIME: setting(settings, TextFieldSettings::getDateTimeFormat).ifPresent(v -> builder .method("withDateTimeFormat", resolve(v.toString()))); //$NON-NLS-1$ setting(settings, TextFieldSettings::getTimeZone).ifPresent(v -> builder .method("withTimeZone", resolve(v.getId()))); //$NON-NLS-1$ break; case DECIMAL: setting(settings, TextFieldSettings::getNumberFormat).ifPresent(v -> builder .method("withNumberFormat", resolve(v.toString()))); //$NON-NLS-1$ setting(settings, TextFieldSettings::getDecimalOutputStyle).ifPresent(v -> builder .method("withOutputStyle", resolve(v))); //$NON-NLS-1$ break; case BYTE: case INT: case SHORT: case LONG: case FLOAT: case DOUBLE: setting(settings, TextFieldSettings::getNumberFormat).ifPresent(v -> builder .method("withNumberFormat", resolve(v.toString()))); //$NON-NLS-1$ break; case TEXT: // no special members break; default: throw new AssertionError(kind); } return builder.method("lazy").toExpression(); //$NON-NLS-1$ } private <T> Value<T> setting(TextFieldSettings settings, Function<TextFieldSettings, Value<T>> getter) { return getter.apply(settings).orDefault(getter.apply(fieldDefaultSettings)); } private Optional<MethodDeclaration> createGetInputSplitter() { if (isSplittable()) { return Optional.of(f.newMethodDeclaration( null, new AttributeBuilder(f) .annotation(context.resolve(Override.class)) .Protected() .toAttributes(), context.resolve(InputSplitter.class), f.newSimpleName("getInputSplitter"), //$NON-NLS-1$ Collections.emptyList(), Arrays.asList(new TypeBuilder(f, context.resolve(InputSplitters.class)) .method("byLineFeed") //$NON-NLS-1$ .toReturnStatement()))); } else { return Optional.empty(); } } private boolean isSplittable() { if (formatSettings.getCharset().isPresent()) { if (!CharsetUtil.isAsciiCompatible(formatSettings.getCharset().getEntity())) { return false; } } if (formatSettings.getCompressionType().isPresent()) { return false; } if (model.getDeclaredProperties().stream() .map(TextFieldTrait::getKind) .anyMatch(Predicate.isEqual(TextFieldTrait.Kind.LINE_NUMBER) .or(Predicate.isEqual(TextFieldTrait.Kind.RECORD_NUMBER)))) { return false; } return isSplittableInternal(); } /** * Returns whether or not the input is splittable. * @return {@code true} if it is splittable, otherwise {@code false} */ protected abstract boolean isSplittableInternal(); private Optional<MethodDeclaration> createGetCompressionCodecClass() { if (formatSettings.getCompressionType().isPresent()) { ClassName codec = formatSettings.getCompressionType().getEntity(); return Optional.of(f.newMethodDeclaration( null, new AttributeBuilder(f) .annotation(context.resolve(Override.class)) .Protected() .toAttributes(), new TypeBuilder(f, context.resolve(Class.class)) .parameterize(f.newWildcardExtends(context.resolve(CompressionCodec.class))) .toType(), f.newSimpleName("getCompressionCodecClass"), //$NON-NLS-1$ Collections.emptyList(), Arrays.asList(new TypeBuilder(f, resolve(codec)) .dotClass() .toReturnStatement()))); } else { return Optional.empty(); } } private Optional<MethodDeclaration> createAfterInput() { SimpleName object = f.newSimpleName("object"); //$NON-NLS-1$ SimpleName path = f.newSimpleName("path"); //$NON-NLS-1$ SimpleName input = f.newSimpleName("input"); //$NON-NLS-1$ List<Statement> statements = new ArrayList<>(); for (PropertyDeclaration property : model.getDeclaredProperties()) { switch (TextFieldTrait.getKind(property)) { case VALUE: break; // does nothing case IGNORE: statements.add(new ExpressionBuilder(f, object) .method(context.getOptionSetterName(property), Models.toNullLiteral(f)) .toStatement()); break; case FILE_NAME: statements.add(new ExpressionBuilder(f, object) .method(context.getOptionSetterName(property), path) .toStatement()); break; case LINE_NUMBER: statements.add(new ExpressionBuilder(f, object) .method(context.getValueSetterName(property), adjustLong(property, new ExpressionBuilder(f, input) .method("getLineNumber") //$NON-NLS-1$ .apply(InfixOperator.PLUS, Models.toLiteral(f, 1L)))) .toStatement()); break; case RECORD_NUMBER: statements.add(new ExpressionBuilder(f, object) .method(context.getValueSetterName(property), adjustLong(property, new ExpressionBuilder(f, input) .method("getRecordIndex") //$NON-NLS-1$ .apply(InfixOperator.PLUS, Models.toLiteral(f, 1L)))) .toStatement()); break; default: throw new AssertionError(TextFieldTrait.getKind(property)); } } if (statements.isEmpty()) { return Optional.empty(); } else { return Optional.of(f.newMethodDeclaration( null, new AttributeBuilder(f) .annotation(context.resolve(Override.class)) .Protected() .toAttributes(), context.resolve(void.class), f.newSimpleName("afterInput"), //$NON-NLS-1$ Arrays.asList( f.newFormalParameterDeclaration(context.resolve(model.getSymbol()), object), f.newFormalParameterDeclaration(context.resolve(StringOption.class), path), f.newFormalParameterDeclaration( f.newParameterizedType( context.resolve(TextInput.class), context.resolve(model.getSymbol())), input)), statements)); } } private Expression adjustLong(PropertyDeclaration property, ExpressionBuilder builder) { if (AttributeUtil.hasFieldType(property, BasicTypeKind.LONG)) { return builder.toExpression(); } else if (AttributeUtil.hasFieldType(property, BasicTypeKind.INT)) { return builder.castTo(context.resolve(int.class)).toExpression(); } else { throw new AssertionError(property); } } private Optional<MethodDeclaration> createBeforeOutput() { return Optional.empty(); } /** * Resolves a value. * @param value the value * @return the resolved expression */ protected Expression resolve(boolean value) { return Models.toLiteral(f, value); } /** * Resolves a value. * @param value the value * @return the resolved expression */ protected Expression resolve(char value) { return Models.toLiteral(f, value); } /** * Resolves a value. * @param value the value * @return the resolved expression */ protected Expression resolve(String value) { return Models.toLiteral(f, value); } /** * Resolves a value. * @param value the value * @return the resolved expression */ protected Expression resolve(Enum<?> value) { return new TypeBuilder(f, context.resolve(value.getDeclaringClass())) .field(value.name()) .toExpression(); } /** * Resolves a value. * @param type the value * @return the resolved expression */ protected Type resolve(ClassName type) { return context.resolve(Models.toName(f, type.toString())); } }
apache-2.0
consulo/consulo-relaxng
src/org/intellij/plugins/relaxNG/references/PrefixReferenceProvider.java
4459
/* * Copyright 2007 Sascha Weinreuter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.intellij.plugins.relaxNG.references; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import com.intellij.codeInsight.daemon.EmptyResolveMessageProvider; import com.intellij.codeInspection.LocalQuickFix; import com.intellij.codeInspection.LocalQuickFixProvider; import com.intellij.codeInspection.XmlQuickFixFactory; import com.intellij.lang.xml.XMLLanguage; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.TextRange; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiReference; import com.intellij.psi.PsiReferenceProvider; import com.intellij.psi.XmlElementFactory; import com.intellij.psi.impl.source.resolve.reference.impl.providers.BasicAttributeValueReference; import com.intellij.psi.impl.source.xml.SchemaPrefix; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.xml.XmlAttribute; import com.intellij.psi.xml.XmlAttributeValue; import com.intellij.psi.xml.XmlTag; import com.intellij.util.ArrayUtil; import com.intellij.util.ProcessingContext; /* * Created by IntelliJ IDEA. * User: sweinreuter * Date: 24.07.2007 */ public class PrefixReferenceProvider extends PsiReferenceProvider { private static final Logger LOG = Logger.getInstance("#org.intellij.plugins.relaxNG.references.PrefixReferenceProvider"); @Override @NotNull public PsiReference[] getReferencesByElement(@NotNull PsiElement element, @NotNull ProcessingContext context) { final XmlAttributeValue value = (XmlAttributeValue) element; final String s = value.getValue(); final int i = s.indexOf(':'); if(i <= 0 || s.startsWith("xml:")) { return PsiReference.EMPTY_ARRAY; } return new PsiReference[]{ new PrefixReference(value, i) }; } private static class PrefixReference extends BasicAttributeValueReference implements EmptyResolveMessageProvider, LocalQuickFixProvider { public PrefixReference(XmlAttributeValue value, int length) { super(value, TextRange.from(1, length)); } @Override @Nullable public PsiElement resolve() { final String prefix = getCanonicalText(); XmlTag tag = PsiTreeUtil.getParentOfType(getElement(), XmlTag.class); while(tag != null) { if(tag.getLocalNamespaceDeclarations().containsKey(prefix)) { final XmlAttribute attribute = tag.getAttribute("xmlns:" + prefix, ""); final TextRange textRange = TextRange.from("xmlns:".length(), prefix.length()); return new SchemaPrefix(attribute, textRange, prefix); } tag = tag.getParentTag(); } return null; } @Override public boolean isReferenceTo(PsiElement element) { if(element instanceof SchemaPrefix && element.getContainingFile() == myElement.getContainingFile()) { final PsiElement e = resolve(); if(e instanceof SchemaPrefix) { final String s = ((SchemaPrefix) e).getName(); return s != null && s.equals(((SchemaPrefix) element).getName()); } } return super.isReferenceTo(element); } @Nullable @Override public LocalQuickFix[] getQuickFixes() { final PsiElement element = getElement(); final XmlElementFactory factory = XmlElementFactory.getInstance(element.getProject()); final String value = ((XmlAttributeValue) element).getValue(); final String[] name = value.split(":"); final XmlTag tag = factory.createTagFromText("<" + (name.length > 1 ? name[1] : value) + " />", XMLLanguage.INSTANCE); return new LocalQuickFix[]{XmlQuickFixFactory.getInstance().createNSDeclarationIntentionFix(tag, getCanonicalText(), null)}; } @Override @NotNull public Object[] getVariants() { return ArrayUtil.EMPTY_OBJECT_ARRAY; } @Override public boolean isSoft() { return false; } @Override @NotNull public String getUnresolvedMessagePattern() { return "Undefined namespace prefix ''{0}''"; } } }
apache-2.0
hmcl/Streams
streams/service/src/main/java/com/hortonworks/streamline/streams/service/TopologyTestRunResource.java
26797
/** * Copyright 2017 Hortonworks. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ package com.hortonworks.streamline.streams.service; import com.codahale.metrics.annotation.Timed; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.hortonworks.streamline.common.exception.service.exception.request.BadRequestException; import com.hortonworks.streamline.common.exception.service.exception.request.EntityNotFoundException; import com.hortonworks.streamline.common.exception.service.exception.server.UnhandledServerException; import com.hortonworks.streamline.common.util.WSUtils; import com.hortonworks.streamline.streams.actions.topology.service.TopologyActionsService; import com.hortonworks.streamline.streams.catalog.Topology; import com.hortonworks.streamline.streams.catalog.TopologySink; import com.hortonworks.streamline.streams.catalog.TopologySource; import com.hortonworks.streamline.streams.catalog.TopologyTestRunCase; import com.hortonworks.streamline.streams.catalog.TopologyTestRunCaseSink; import com.hortonworks.streamline.streams.catalog.TopologyTestRunCaseSource; import com.hortonworks.streamline.streams.catalog.TopologyTestRunHistory; import com.hortonworks.streamline.streams.catalog.service.StreamCatalogService; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.BooleanUtils; import org.datanucleus.util.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.stream.Stream; import static java.util.stream.Collectors.toList; import static javax.ws.rs.core.Response.Status.CREATED; import static javax.ws.rs.core.Response.Status.OK; @Path("/v1/catalog") @Produces(MediaType.APPLICATION_JSON) public class TopologyTestRunResource { private static final Logger LOG = LoggerFactory.getLogger(TopologyTestRunResource.class); private static final Integer DEFAULT_LIST_ENTITIES_COUNT = 5; public static final Charset ENCODING_UTF_8 = Charset.forName("UTF-8"); private final StreamCatalogService catalogService; private final TopologyActionsService actionsService; private final ObjectMapper objectMapper; public TopologyTestRunResource(StreamCatalogService catalogService, TopologyActionsService actionsService) { this.catalogService = catalogService; this.actionsService = actionsService; this.objectMapper = new ObjectMapper(); } @POST @Path("/topologies/{topologyId}/actions/testrun") @Timed public Response testRunTopology (@Context UriInfo urlInfo, @PathParam("topologyId") Long topologyId, String testRunInputJson) throws Exception { Topology result = catalogService.getTopology(topologyId); if (result != null) { TopologyTestRunHistory history = actionsService.testRunTopology(result, testRunInputJson); return WSUtils.respondEntity(history, OK); } throw EntityNotFoundException.byId(topologyId.toString()); } @GET @Path("/topologies/{topologyId}/testhistories") @Timed public Response getHistoriesOfTestRunTopology (@Context UriInfo urlInfo, @PathParam("topologyId") Long topologyId, @QueryParam("limit") Integer limit) throws Exception { Collection<TopologyTestRunHistory> histories = catalogService.listTopologyTestRunHistory(topologyId); if (histories == null) { throw EntityNotFoundException.byFilter("topology id " + topologyId); } List<TopologyTestRunHistory> filteredHistories = filterHistories(limit, histories); return WSUtils.respondEntities(filteredHistories, OK); } @GET @Path("/topologies/{topologyId}/versions/{versionId}/testhistories") @Timed public Response getHistoriesOfTestRunTopology (@Context UriInfo urlInfo, @PathParam("topologyId") Long topologyId, @PathParam("versionId") Long versionId, @QueryParam("limit") Integer limit) throws Exception { Collection<TopologyTestRunHistory> histories = catalogService.listTopologyTestRunHistory(topologyId, versionId); if (histories == null) { throw EntityNotFoundException.byFilter("topology id " + topologyId); } List<TopologyTestRunHistory> filteredHistories = filterHistories(limit, histories); return WSUtils.respondEntities(filteredHistories, OK); } @GET @Path("/topologies/{topologyId}/testhistories/{historyId}") @Timed public Response getHistoryOfTestRunTopology (@Context UriInfo urlInfo, @PathParam("topologyId") Long topologyId, @PathParam("historyId") Long historyId, @QueryParam("simplify") Boolean simplify) throws Exception { TopologyTestRunHistory history = catalogService.getTopologyTestRunHistory(historyId); if (history == null) { throw EntityNotFoundException.byId(String.valueOf(historyId)); } if (!history.getTopologyId().equals(topologyId)) { throw BadRequestException.message("Test history " + historyId + " is not belong to topology " + topologyId); } if (BooleanUtils.isTrue(simplify)) { return WSUtils.respondEntity(new SimplifiedTopologyTestRunHistory(history), OK); } else { return WSUtils.respondEntity(history, OK); } } @GET @Path("/topologies/{topologyId}/testhistories/{historyId}/events") public Response getEventsOfTestRunTopologyHistory(@Context UriInfo urlInfo, @PathParam("topologyId") Long topologyId, @PathParam("historyId") Long historyId) throws Exception { return getEventsOfTestRunTopologyHistory(topologyId, historyId, null); } @GET @Path("/topologies/{topologyId}/testhistories/{historyId}/events/{componentName}") public Response getEventsOfTestRunTopologyHistory(@Context UriInfo urlInfo, @PathParam("topologyId") Long topologyId, @PathParam("historyId") Long historyId, @PathParam("componentName") String componentName) throws Exception { return getEventsOfTestRunTopologyHistory(topologyId, historyId, componentName); } @GET @Path("/topologies/{topologyId}/testhistories/{historyId}/events/download") @Produces(MediaType.APPLICATION_OCTET_STREAM) public Response downloadEventsOfTestRunTopologyHistory(@Context UriInfo urlInfo, @PathParam("topologyId") Long topologyId, @PathParam("historyId") Long historyId) throws Exception { File eventLogFile = getEventLogFile(topologyId, historyId); String content = FileUtils.readFileToString(eventLogFile, ENCODING_UTF_8); InputStream is = new ByteArrayInputStream(content.getBytes(StandardCharsets.UTF_8)); String fileName = String.format("events-topology-%d-history-%d.log", topologyId, historyId); return Response.status(OK) .entity(is) .header("Content-Disposition", "attachment; filename=\"" + fileName + "\"") .build(); } private Response getEventsOfTestRunTopologyHistory(Long topologyId, Long historyId, String componentName) throws IOException { File eventLogFile = getEventLogFile(topologyId, historyId); List<String> lines = FileUtils.readLines(eventLogFile, ENCODING_UTF_8); Stream<Map<String, Object>> eventsStream = lines.stream().map(line -> { try { return objectMapper.readValue(line, new TypeReference<Map<String, Object>>() {}); } catch (IOException e) { throw new RuntimeException(e); } }); if (!StringUtils.isEmpty(componentName)) { eventsStream = eventsStream.filter(event -> { String eventComponentName = (String) event.get("componentName"); return eventComponentName != null && eventComponentName.equals(componentName); }); } return WSUtils.respondEntities(eventsStream.collect(toList()), OK); } private File getEventLogFile(Long topologyId, Long historyId) { TopologyTestRunHistory history = catalogService.getTopologyTestRunHistory(historyId); if (history == null) { throw EntityNotFoundException.byId(String.valueOf(historyId)); } if (!history.getTopologyId().equals(topologyId)) { throw BadRequestException.message("Test history " + historyId + " is not belong to topology " + topologyId); } String eventLogFilePath = history.getEventLogFilePath(); File eventLogFile = new File(eventLogFilePath); if (!eventLogFile.exists() || eventLogFile.isDirectory() || !eventLogFile.canRead()) { throw BadRequestException.message("Event log file of history " + historyId + " does not exist or is not readable."); } return eventLogFile; } private List<TopologyTestRunHistory> filterHistories(Integer limit, Collection<TopologyTestRunHistory> histories) { if (limit == null) { limit = DEFAULT_LIST_ENTITIES_COUNT; } return histories.stream() // reverse order .sorted((h1, h2) -> (int) (h2.getId() - h1.getId())) .limit(limit) .collect(toList()); } @POST @Path("/topologies/{topologyId}/testcases") public Response addTestRunCase(@PathParam("topologyId") Long topologyId, TopologyTestRunCase testRunCase) { testRunCase.setTopologyId(topologyId); Long currentVersionId = catalogService.getCurrentVersionId(topologyId); testRunCase.setVersionId(currentVersionId); TopologyTestRunCase addedCase = catalogService.addTopologyTestRunCase(testRunCase); return WSUtils.respondEntity(addedCase, CREATED); } @POST @Path("/topologies/{topologyId}/versions/{versionId}/testcases") public Response addTestRunCase(@PathParam("topologyId") Long topologyId, @PathParam("versionId") Long versionId, TopologyTestRunCase testRunCase) { testRunCase.setTopologyId(topologyId); testRunCase.setVersionId(versionId); TopologyTestRunCase addedCase = catalogService.addTopologyTestRunCase(testRunCase); return WSUtils.respondEntity(addedCase, CREATED); } @PUT @Path("/topologies/{topologyId}/testcases/{testCaseId}") public Response addOrUpdateTestRunCase(@PathParam("topologyId") Long topologyId, @PathParam("testCaseId") Long testCaseId, TopologyTestRunCase testRunCase) { testRunCase.setTopologyId(topologyId); testRunCase.setId(testCaseId); TopologyTestRunCase updatedCase = catalogService.addOrUpdateTopologyTestRunCase(topologyId, testRunCase); return WSUtils.respondEntity(updatedCase, OK); } @GET @Path("/topologies/{topologyId}/testcases/{testCaseId}") public Response getTestRunCase(@PathParam("topologyId") Long topologyId, @PathParam("testCaseId") Long testCaseId) { TopologyTestRunCase testcase = catalogService.getTopologyTestRunCase(topologyId, testCaseId); if (testcase == null) { throw EntityNotFoundException.byId(Long.toString(testCaseId)); } return WSUtils.respondEntity(testcase, OK); } @GET @Path("/topologies/{topologyId}/testcases") @Timed public Response listTestRunCases(@Context UriInfo urlInfo, @PathParam("topologyId") Long topologyId, @QueryParam("limit") Integer limit) throws Exception { Long currentVersionId = catalogService.getCurrentVersionId(topologyId); Collection<TopologyTestRunCase> cases = catalogService.listTopologyTestRunCase(topologyId, currentVersionId); if (cases == null) { throw EntityNotFoundException.byFilter("topology id " + topologyId); } List<TopologyTestRunCase> filteredCases = filterTestRunCases(limit, cases); return WSUtils.respondEntities(filteredCases, OK); } @GET @Path("/topologies/{topologyId}/versions/{versionId}/testcases") @Timed public Response listTestRunCases(@Context UriInfo urlInfo, @PathParam("topologyId") Long topologyId, @PathParam("versionId") Long versionId, @QueryParam("limit") Integer limit) throws Exception { Collection<TopologyTestRunCase> cases = catalogService.listTopologyTestRunCase(topologyId, versionId); if (cases == null) { throw EntityNotFoundException.byFilter("topology id " + topologyId); } List<TopologyTestRunCase> filteredCases = filterTestRunCases(limit, cases); return WSUtils.respondEntities(filteredCases, OK); } @DELETE @Path("/topologies/{topologyId}/testcases/{testCaseId}") public Response removeTestRunCase(@PathParam("topologyId") Long topologyId, @PathParam("testCaseId") Long testCaseId) { TopologyTestRunCase testRunCase = catalogService.removeTestRunCase(topologyId, testCaseId); if (testRunCase != null) { return WSUtils.respondEntity(testRunCase, OK); } throw EntityNotFoundException.byId(testCaseId.toString()); } private List<TopologyTestRunCase> filterTestRunCases(Integer limit, Collection<TopologyTestRunCase> cases) { if (limit == null) { limit = DEFAULT_LIST_ENTITIES_COUNT; } return cases.stream() // reverse order .sorted((h1, h2) -> (int) (h2.getId() - h1.getId())) .limit(limit) .collect(toList()); } @POST @Path("/topologies/{topologyId}/testcases/{testCaseId}/sources") public Response addTestRunCaseSource(@PathParam("topologyId") Long topologyId, @PathParam("testCaseId") Long testCaseId, TopologyTestRunCaseSource testRunCaseSource) { TopologySource topologySource = getAssociatedTopologySource(topologyId, testCaseId, testRunCaseSource.getSourceId()); testRunCaseSource.setVersionId(topologySource.getVersionId()); TopologyTestRunCaseSource addedCaseSource = catalogService.addTopologyTestRunCaseSource(testRunCaseSource); return WSUtils.respondEntity(addedCaseSource, CREATED); } @PUT @Path("/topologies/{topologyId}/testcases/{testCaseId}/sources/{id}") public Response addOrUpdateTestRunCaseSource(@PathParam("topologyId") Long topologyId, @PathParam("testCaseId") Long testCaseId, @PathParam("id") Long id, TopologyTestRunCaseSource testRunCaseSource) { testRunCaseSource.setId(id); testRunCaseSource.setTestCaseId(testCaseId); TopologySource topologySource = getAssociatedTopologySource(topologyId, testCaseId, testRunCaseSource.getSourceId()); testRunCaseSource.setVersionId(topologySource.getVersionId()); TopologyTestRunCaseSource updatedCase = catalogService.addOrUpdateTopologyTestRunCaseSource(testRunCaseSource.getId(), testRunCaseSource); return WSUtils.respondEntity(updatedCase, OK); } private TopologySource getAssociatedTopologySource(Long topologyId, Long testCaseId, Long topologySourceId) { TopologyTestRunCase testCase = catalogService.getTopologyTestRunCase(topologyId, testCaseId); if (testCase == null) { throw EntityNotFoundException.byId("Topology test case with topology id " + topologyId + " and test case id " + testCaseId); } TopologySource topologySource = catalogService.getTopologySource(topologyId, topologySourceId, testCase.getVersionId()); if (topologySource == null) { throw EntityNotFoundException.byId("Topology source with topology id " + topologyId + " and version id " + testCase.getVersionId()); } else if (!testCase.getVersionId().equals(topologySource.getVersionId())) { throw new IllegalStateException("Test case and topology source point to the different version id: " + "version id of test case: " + testCase.getVersionId() + " / " + "version id of topology source: " + topologySource.getVersionId()); } return topologySource; } @GET @Path("/topologies/{topologyId}/testcases/{testcaseId}/sources/{id}") public Response getTestRunCaseSource(@PathParam("topologyId") Long topologyId, @PathParam("testcaseId") Long testcaseId, @PathParam("id") Long id) { TopologyTestRunCaseSource testCaseSource = catalogService.getTopologyTestRunCaseSource(testcaseId, id); if (testCaseSource == null) { throw EntityNotFoundException.byId(Long.toString(id)); } return WSUtils.respondEntity(testCaseSource, OK); } @GET @Path("/topologies/{topologyId}/testcases/{testCaseId}/sources/topologysource/{sourceId}") public Response getTestRunCaseSourceByTopologySource(@PathParam("topologyId") Long topologyId, @PathParam("testCaseId") Long testCaseId, @PathParam("sourceId") Long sourceId) { TopologyTestRunCaseSource testCaseSource = catalogService.getTopologyTestRunCaseSourceBySourceId(testCaseId, sourceId); if (testCaseSource == null) { throw EntityNotFoundException.byId("test case id: " + testCaseId + " , topology source id: " + sourceId); } return WSUtils.respondEntity(testCaseSource, OK); } @GET @Path("/topologies/{topologyId}/testcases/{testCaseId}/sources") public Response listTestRunCaseSource(@PathParam("topologyId") Long topologyId, @PathParam("testCaseId") Long testCaseId) { Collection<TopologyTestRunCaseSource> sources = catalogService.listTopologyTestRunCaseSource(topologyId, testCaseId); if (sources == null) { throw EntityNotFoundException.byFilter("topologyId: " + topologyId + " / testCaseId: " + testCaseId); } return WSUtils.respondEntities(sources, OK); } @POST @Path("/topologies/{topologyId}/testcases/{testCaseId}/sinks") public Response addTestRunCaseSink(@PathParam("topologyId") Long topologyId, @PathParam("testCaseId") Long testCaseId, TopologyTestRunCaseSink testRunCaseSink) { TopologySink topologySink = getAssociatedTopologySink(topologyId, testCaseId, testRunCaseSink.getSinkId()); testRunCaseSink.setVersionId(topologySink.getVersionId()); TopologyTestRunCaseSink addedCaseSink = catalogService.addTopologyTestRunCaseSink(testRunCaseSink); return WSUtils.respondEntity(addedCaseSink, CREATED); } @PUT @Path("/topologies/{topologyId}/testcases/{testCaseId}/sinks/{id}") public Response addOrUpdateTestRunCaseSink(@PathParam("topologyId") Long topologyId, @PathParam("testCaseId") Long testCaseId, @PathParam("id") Long id, TopologyTestRunCaseSink testRunCaseSink) { testRunCaseSink.setId(id); testRunCaseSink.setTestCaseId(testCaseId); TopologySink topologySink = getAssociatedTopologySink(topologyId, testCaseId, testRunCaseSink.getSinkId()); testRunCaseSink.setVersionId(topologySink.getVersionId()); TopologyTestRunCaseSink updatedCase = catalogService.addOrUpdateTopologyTestRunCaseSink(testRunCaseSink.getId(), testRunCaseSink); return WSUtils.respondEntity(updatedCase, OK); } private TopologySink getAssociatedTopologySink(Long topologyId, Long testCaseId, Long topologySinkId) { TopologyTestRunCase testCase = catalogService.getTopologyTestRunCase(topologyId, testCaseId); if (testCase == null) { throw EntityNotFoundException.byId("Topology test case with topology id " + topologyId + " and test case id " + testCaseId); } TopologySink topologySink = catalogService.getTopologySink(topologyId, topologySinkId, testCase.getVersionId()); if (topologySink == null) { throw EntityNotFoundException.byId("Topology sink with topology id " + topologyId + " and version id " + testCase.getVersionId()); } else if (!testCase.getVersionId().equals(topologySink.getVersionId())) { throw new IllegalStateException("Test case and topology sink point to the different version id: " + "version id of test case: " + testCase.getVersionId() + " / " + "version id of topology sink: " + topologySink.getVersionId()); } return topologySink; } @GET @Path("/topologies/{topologyId}/testcases/{testcaseId}/sinks/{id}") public Response getTestRunCaseSink(@PathParam("topologyId") Long topologyId, @PathParam("testcaseId") Long testcaseId, @PathParam("id") Long id) { TopologyTestRunCaseSink testCaseSink = catalogService.getTopologyTestRunCaseSink(testcaseId, id); if (testCaseSink == null) { throw EntityNotFoundException.byId(Long.toString(id)); } return WSUtils.respondEntity(testCaseSink, OK); } @GET @Path("/topologies/{topologyId}/testcases/{testCaseId}/sinks/topologysink/{sinkId}") public Response getTestRunCaseSinkByTopologySink(@PathParam("topologyId") Long topologyId, @PathParam("testCaseId") Long testCaseId, @PathParam("sinkId") Long sinkId) { TopologyTestRunCaseSink testCaseSink = catalogService.getTopologyTestRunCaseSinkBySinkId(testCaseId, sinkId); if (testCaseSink == null) { throw EntityNotFoundException.byId("test case id: " + testCaseId + " , topology source id: " + sinkId); } return WSUtils.respondEntity(testCaseSink, OK); } @GET @Path("/topologies/{topologyId}/testcases/{testCaseId}/sinks") public Response listTestRunCaseSink(@PathParam("topologyId") Long topologyId, @PathParam("testCaseId") Long testCaseId) { Collection<TopologyTestRunCaseSink> sources = catalogService.listTopologyTestRunCaseSink(topologyId, testCaseId); if (sources == null) { throw EntityNotFoundException.byFilter("topologyId: " + topologyId + " / testCaseId: " + testCaseId); } return WSUtils.respondEntities(sources, OK); } private static class SimplifiedTopologyTestRunHistory { private Long id; private Long topologyId; private Long versionId; private Boolean finished = false; private Boolean success = false; private Boolean matched = false; private Long startTime; private Long finishTime; private Long timestamp; SimplifiedTopologyTestRunHistory(TopologyTestRunHistory history) { id = history.getId(); topologyId = history.getTopologyId(); versionId = history.getVersionId(); finished = history.getFinished(); success = history.getSuccess(); matched = history.getMatched(); startTime = history.getStartTime(); finishTime = history.getFinishTime(); timestamp = history.getTimestamp(); } public Long getId() { return id; } public Long getTopologyId() { return topologyId; } public Long getVersionId() { return versionId; } public Boolean getFinished() { return finished; } public Boolean getSuccess() { return success; } public Boolean getMatched() { return matched; } public Long getStartTime() { return startTime; } public Long getFinishTime() { return finishTime; } public Long getTimestamp() { return timestamp; } } }
apache-2.0
google-code/android-scripting
jruby/src/src/org/jruby/ext/ffi/jna/Invocation.java
796
package org.jruby.ext.ffi.jna; import java.util.ArrayList; import org.jruby.runtime.ThreadContext; /** * An invocation session. * This provides post-invoke cleanup. */ final class Invocation { private final ThreadContext context; private ArrayList<Runnable> postInvokeList; Invocation(ThreadContext context) { this.context = context; } void finish() { if (postInvokeList != null) { for (Runnable r : postInvokeList) { r.run(); } } } void addPostInvoke(Runnable postInvoke) { if (postInvokeList == null) { postInvokeList = new ArrayList<Runnable>(); } postInvokeList.add(postInvoke); } ThreadContext getThreadContext() { return context; } }
apache-2.0
wayshall/onetwo
core/modules/security/src/main/java/org/onetwo/ext/security/utils/LoginUserDetails.java
1289
package org.onetwo.ext.security.utils; import java.util.Collection; import org.onetwo.common.web.userdetails.UserDetail; import org.onetwo.common.web.userdetails.UserRoot; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.userdetails.User; @SuppressWarnings("serial") public class LoginUserDetails extends User implements UserDetail, /*SsoTokenable,*/ UserRoot { final private long userId; // private String token; private String nickname; private String avatar; public LoginUserDetails(long userId, String username, String password, Collection<? extends GrantedAuthority> authorities) { super(username, password, authorities); this.userId = userId; } public long getUserId() { return userId; } @Override public String getUserName() { return getUsername(); } @Override public boolean isSystemRootUser() { return userId==ROOT_USER_ID; } public String getNickname() { return nickname; } public void setNickname(String nickname) { this.nickname = nickname; } public String getAvatar() { return avatar; } public void setAvatar(String avatar) { this.avatar = avatar; } /*public String getToken() { return token; } public void setToken(String token) { this.token = token; }*/ }
apache-2.0
shiftconnects/android-auth-manager
sample/src/main/java/com/shiftconnects/android/auth/example/util/GsonConverter.java
3430
/* * Copyright (C) 2015 P100 OG, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.shiftconnects.android.auth.example.util; import com.google.gson.Gson; import com.google.gson.JsonParseException; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.lang.reflect.Type; import retrofit.converter.ConversionException; import retrofit.converter.Converter; import retrofit.mime.MimeUtil; import retrofit.mime.TypedInput; import retrofit.mime.TypedOutput; /** * A {@link Converter} which uses GSON for serialization and deserialization of entities. * * @author Jake Wharton (jw@squareup.com) */ public class GsonConverter implements Converter { private final Gson gson; private String charset; /** * Create an instance using the supplied {@link Gson} object for conversion. Encoding to JSON and * decoding from JSON (when no charset is specified by a header) will use UTF-8. */ public GsonConverter(Gson gson) { this(gson, "UTF-8"); } /** * Create an instance using the supplied {@link Gson} object for conversion. Encoding to JSON and * decoding from JSON (when no charset is specified by a header) will use the specified charset. */ public GsonConverter(Gson gson, String charset) { this.gson = gson; this.charset = charset; } @Override public Object fromBody(TypedInput body, Type type) throws ConversionException { String charset = this.charset; if (body.mimeType() != null) { charset = MimeUtil.parseCharset(body.mimeType(), charset); } InputStreamReader isr = null; try { isr = new InputStreamReader(body.in(), charset); return gson.fromJson(isr, type); } catch (IOException e) { throw new ConversionException(e); } catch (JsonParseException e) { throw new ConversionException(e); } finally { if (isr != null) { try { isr.close(); } catch (IOException ignored) { } } } } @Override public TypedOutput toBody(Object object) { try { return new JsonTypedOutput(gson.toJson(object).getBytes(charset), charset); } catch (UnsupportedEncodingException e) { throw new AssertionError(e); } } private static class JsonTypedOutput implements TypedOutput { private final byte[] jsonBytes; private final String mimeType; JsonTypedOutput(byte[] jsonBytes, String encode) { this.jsonBytes = jsonBytes; this.mimeType = "application/json; charset=" + encode; } @Override public String fileName() { return null; } @Override public String mimeType() { return mimeType; } @Override public long length() { return jsonBytes.length; } @Override public void writeTo(OutputStream out) throws IOException { out.write(jsonBytes); } } }
apache-2.0
treasure-data/digdag
digdag-tests/src/test/java/acceptance/ValidateProjectIT.java
2609
package acceptance; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import utils.CommandStatus; import utils.TemporaryDigdagServer; import java.nio.file.Path; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static utils.TestUtils.copyResource; import static utils.TestUtils.main; // // This file doesn't contain normal case. // It defined in another test. // public class ValidateProjectIT { @Rule public TemporaryFolder folder = new TemporaryFolder(); @Rule public TemporaryDigdagServer server = TemporaryDigdagServer.builder() .build(); private Path config; private Path projectDir; @Before public void setUp() throws Exception { projectDir = folder.getRoot().toPath().resolve("foobar"); config = folder.newFile().toPath(); } @Test public void uploadInvalidTaskProject() throws Exception { // Create new project CommandStatus initStatus = main("init", "-c", config.toString(), projectDir.toString()); assertThat(initStatus.code(), is(0)); copyResource("acceptance/error_task/invalid_at_group.dig", projectDir.resolve("invalid_at_group.dig")); // Push the project CommandStatus pushStatus = main( "push", "--project", projectDir.toString(), "foobar", "-c", config.toString(), "-e", server.endpoint()); assertThat(pushStatus.code(), is(1)); assertThat(pushStatus.errUtf8(), containsString("A task can't have more than one operator")); } @Test public void uploadInvalidScheduleProject() throws Exception { // Create new project CommandStatus initStatus = main("init", "-c", config.toString(), projectDir.toString()); assertThat(initStatus.code(), is(0)); copyResource("acceptance/schedule/invalid_schedule.dig", projectDir.resolve("invalid_schedule.dig")); // Push the project CommandStatus pushStatus = main( "push", "--project", projectDir.toString(), "foobar", "-c", config.toString(), "-e", server.endpoint()); assertThat(pushStatus.code(), is(1)); assertThat(pushStatus.errUtf8(), containsString("scheduler requires mm:ss format")); } }
apache-2.0
scholzj/barnabas
api/src/main/java/io/strimzi/api/kafka/model/connect/build/Output.java
2326
/* * Copyright Strimzi authors. * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). */ package io.strimzi.api.kafka.model.connect.build; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; import io.strimzi.api.kafka.model.UnknownPropertyPreserving; import io.strimzi.crdgenerator.annotations.Description; import lombok.EqualsAndHashCode; import java.io.Serializable; import java.util.HashMap; import java.util.Map; /** * Abstract baseclass for different representations of connect build outputs, discriminated by {@link #getType() type}. */ @JsonTypeInfo( use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.EXISTING_PROPERTY, property = "type" ) @JsonSubTypes( { @JsonSubTypes.Type(value = DockerOutput.class, name = Output.TYPE_DOCKER), @JsonSubTypes.Type(value = ImageStreamOutput.class, name = Output.TYPE_IMAGESTREAM) } ) @JsonInclude(JsonInclude.Include.NON_NULL) @EqualsAndHashCode public abstract class Output implements UnknownPropertyPreserving, Serializable { private static final long serialVersionUID = 1L; public static final String TYPE_DOCKER = "docker"; public static final String TYPE_IMAGESTREAM = "imagestream"; private String image; private Map<String, Object> additionalProperties = new HashMap<>(0); @Description("Output type. " + "Must be either `docker` for pushing the newly build image to Docker compatible registry or `imagestream` for pushing the image to OpenShift ImageStream. " + "Required.") public abstract String getType(); @Description("The name of the image which will be built. " + "Required") @JsonProperty(required = true) public String getImage() { return image; } public void setImage(String image) { this.image = image; } @Override public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @Override public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } }
apache-2.0
TranscendComputing/TopStackRDS
src/com/transcend/rds/worker/ModifyDBParameterGroupActionWorker.java
11169
/** * */ package com.transcend.rds.worker; import java.util.Arrays; import java.util.Collection; import java.util.LinkedList; import java.util.List; import org.hibernate.Session; import org.slf4j.Logger; import org.springframework.transaction.annotation.Transactional; import com.msi.tough.cf.json.DatabagParameter; import com.msi.tough.core.Appctx; import com.msi.tough.core.HibernateUtil; import com.msi.tough.core.JsonUtil; import com.msi.tough.model.AccountBean; import com.msi.tough.model.rds.RdsDbinstance; import com.msi.tough.model.rds.RdsDbparameterGroup; import com.msi.tough.model.rds.RdsParameter; import com.msi.tough.query.ErrorResponse; import com.msi.tough.query.QueryFaults; import com.msi.tough.query.ServiceRequestContext; import com.msi.tough.rds.ValidationManager; import com.msi.tough.rds.json.RDSConfigDatabagItem; import com.msi.tough.rds.json.RDSDatabag; import com.msi.tough.rds.json.RDSParameterGroupDatabagItem; import com.msi.tough.utils.AccountUtil; import com.msi.tough.utils.ChefUtil; import com.msi.tough.utils.ConfigurationUtil; import com.msi.tough.utils.Constants; import com.msi.tough.utils.RDSQueryFaults; import com.msi.tough.utils.rds.InstanceEntity; import com.msi.tough.utils.rds.ParameterGroupEntity; import com.msi.tough.utils.rds.RDSUtilities; import com.msi.tough.workflow.core.AbstractWorker; import com.transcend.rds.message.ModifyDBParameterGroupActionMessage.ModifyDBParameterGroupActionRequestMessage; import com.transcend.rds.message.ModifyDBParameterGroupActionMessage.ModifyDBParameterGroupActionResultMessage; import com.transcend.rds.message.RDSMessage.Parameter; /** * @author tdhite */ public class ModifyDBParameterGroupActionWorker extends AbstractWorker<ModifyDBParameterGroupActionRequestMessage, ModifyDBParameterGroupActionResultMessage> { private final static Logger logger = Appctx .getLogger(ModifyDBParameterGroupActionWorker.class.getName()); /** * We need a local copy of this doWork to provide the transactional * annotation. Transaction management is handled by the annotation, which * can only be on a concrete class. * @param req * @return * @throws Exception */ @Transactional public ModifyDBParameterGroupActionResultMessage doWork( ModifyDBParameterGroupActionRequestMessage req) throws Exception { logger.debug("Performing work for ModifyDBParameterGroupAction."); return super.doWork(req, getSession()); } /** * modifyDBParameterGroup ************************************************ * This Operation modifies the parameters associated with the named * DBParameterGroup. It essentially adds/updates parameters associated with * a DBParameterGroup If parameter exists then update if parameter doesn't * exist then insert Request: DBParameterGroupName(R) List of Parameter * records(R) Parameters: List of up to 20 parameter records Response: * DBParameterGroup Exceptions: DBParameterGroupNotFound * InvalidDBParameterGroupState Processing 1. Confirm that ParamaterGroup * exists and is in the appropriate state 2. Update the Parameter records by * inserting or updating new parameter 3. Return response */ @Override protected ModifyDBParameterGroupActionResultMessage doWork0(ModifyDBParameterGroupActionRequestMessage req, ServiceRequestContext context) throws Exception { logger.debug("ModifyDBParameterGroup action is called."); final Session sess = HibernateUtil.newSession(); final AccountBean ac = context.getAccountBean(); final ModifyDBParameterGroupActionResultMessage.Builder resp = ModifyDBParameterGroupActionResultMessage.newBuilder(); try { sess.beginTransaction(); final long userId = ac.getId(); final String grpName = ValidationManager.validateIdentifier( req.getDbParameterGroupName(), 255, true); final List<Parameter> pList = req.getParametersList(); final int pListLen = pList.size(); logger.info("ModifyDBParameterGroup: " + " UserID = " + userId + " ParameterGroupName = " + grpName + " Total Number of Listed Parameters = " + pListLen); if (grpName.equals("default.mysql5.5")) { throw RDSQueryFaults .InvalidClientTokenId("You do not have privilege to modify default DBParameterGroup."); } // check that DBParameterGroup exists final RdsDbparameterGroup pGrpRec = ParameterGroupEntity .getParameterGroup(sess, grpName, ac.getId()); if (pGrpRec == null) { throw RDSQueryFaults.DBParameterGroupNotFound(); } final Collection<RdsDbinstance> dbInstances = InstanceEntity .selectDBInstancesByParameterGroup(sess, grpName, -1, ac); // make sure that all DBInstances using this DBParameterGroup are in // available state for (final RdsDbinstance dbinstance : dbInstances) { if (!dbinstance.getDbinstanceStatus().equals( RDSUtilities.STATUS_AVAILABLE)) { throw RDSQueryFaults .InvalidDBParameterGroupState("Currently there are DBInstance(s) that use this DBParameterGroup and it" + " is not in available state."); } } // reset the parameters in the DB List<RdsParameter> forRebootPending = new LinkedList<RdsParameter>(); final String paramGrpFamily = pGrpRec.getDbparameterGroupFamily(); final AccountBean sac = AccountUtil.readAccount(sess, 1L); for (final Parameter p : pList) { final RdsParameter target = ParameterGroupEntity.getParameter( sess, grpName, p.getParameterName(), userId); if (target == null) { throw RDSQueryFaults.InvalidParameterValue(p .getParameterName() + " parameter does not exist."); } logger.debug("Current target parameter: " + target.toString()); if (!target.getIsModifiable()) { throw RDSQueryFaults.InvalidParameterValue(p .getParameterName() + " is not modifiable parameter."); } // TODO validate p.getParameterValue along with // p.getParameterName to ensure the value is allowed else if (p.getApplyMethod().equals( RDSUtilities.PARM_APPMETHOD_IMMEDIATE)) { if (target.getApplyType().equals( RDSUtilities.PARM_APPTYPE_STATIC)) { throw QueryFaults .InvalidParameterCombination(target .getParameterName() + " is not dynamic. You can only" + " use \"pending-reboot\" as valid ApplyMethod for this parameter."); } target.setParameterValue(p.getParameterValue()); target.setSource(Constants.USER); sess.save(target); } else if (p.getApplyMethod().equals( RDSUtilities.PARM_APPMETHOD_PENDING)) { final RdsParameter temp = new RdsParameter(); temp.setParameterName(p.getParameterName()); temp.setApplyMethod(p.getApplyMethod()); temp.setParameterValue(p.getParameterValue()); forRebootPending.add(temp); } } // Delete and regenerate the Databag logger.debug("There are " + dbInstances.size() + " databags to modify."); for (final RdsDbinstance instance : dbInstances) { logger.debug("Currently updating the databag for DBInstance " + instance.getDbinstanceId()); final String databagName = "rds-" + ac.getId() + "-" + instance.getDbinstanceId(); logger.debug("Deleting the databag " + databagName); ChefUtil.deleteDatabagItem(databagName, "config"); final String postWaitUrl = (String) ConfigurationUtil .getConfiguration(Arrays.asList(new String[] { "TRANSCEND_URL", instance.getAvailabilityZone() })); final String servletUrl = (String) ConfigurationUtil .getConfiguration(Arrays.asList(new String[] { "SERVLET_URL", instance.getAvailabilityZone() })); final RDSConfigDatabagItem configDataBagItem = new RDSConfigDatabagItem( "config", instance.getAllocatedStorage().toString(), instance.getMasterUsername(), instance.getMasterUserPassword(), instance.getAutoMinorVersionUpgrade(), instance.getEngine(), instance.getEngineVersion(), instance.getDbName(), instance .getBackupRetentionPeriod().toString(), instance.getPreferredBackupWindow(), instance.getPreferredMaintenanceWindow(), instance .getPort().toString(), postWaitUrl, servletUrl, instance.getDbinstanceId(), "rds." + ac.getId() + "." + instance.getDbinstanceId(), ac.getId(), instance.getDbinstanceClass(), "false"); final RDSParameterGroupDatabagItem parameterGroupDatabagItem = new RDSParameterGroupDatabagItem( "parameters", pGrpRec); parameterGroupDatabagItem.getParameters().remove("read_only"); parameterGroupDatabagItem.getParameters().put( "read_only", DatabagParameter.factory("boolean", "" + instance.getRead_only(), true, "dynamic")); parameterGroupDatabagItem.getParameters().remove("port"); parameterGroupDatabagItem.getParameters().put( "port", DatabagParameter.factory("integer", "" + instance.getPort(), false, "static")); final RDSDatabag bag = new RDSDatabag(configDataBagItem, parameterGroupDatabagItem); logger.debug("Databag: " + JsonUtil.toJsonPrettyPrintString(bag)); logger.debug("Regenerating the databag " + databagName); ChefUtil.createDatabagItem(databagName, "config", bag.toJson()); } if (forRebootPending != null && forRebootPending.size() > 0) { // forRebootPending is now a list of static parameters and // dynamic parameters with pending-reboot ApplyMethod forRebootPending = ParameterGroupEntity .modifyParamGroupWithPartialList(sess, pGrpRec, forRebootPending, userId); // code below may need to be rewritten for better performance; // Hibernate may be useful to improve the snippet below for (final RdsDbinstance instance : dbInstances) { final List<RdsParameter> alreadyPending = instance .getPendingRebootParameters(); if (alreadyPending == null || alreadyPending.size() == 0) { instance.setPendingRebootParameters(forRebootPending); // instance.setDbinstanceStatus(RDSUtilities.STATUS_MODIFYING); sess.save(instance); } else { for (final RdsParameter newParam : forRebootPending) { boolean found = false; int i = 0; while (!found && i < alreadyPending.size()) { if (alreadyPending.get(i).getParameterName() .equals(newParam.getParameterName())) { alreadyPending.get(i).setParameterValue( newParam.getParameterValue()); found = true; } ++i; } if (!found) { alreadyPending.add(newParam); } } } } } // build response document - returns DBParameterGroupName resp.setDbParameterGroupName(grpName); logger.debug("Committing all the changes..."); sess.getTransaction().commit(); } catch (final ErrorResponse rde) { sess.getTransaction().rollback(); throw rde; } catch (final Exception e) { e.printStackTrace(); sess.getTransaction().rollback(); final String msg = "CreateInstance: Class: " + e.getClass() + "Msg:" + e.getMessage(); logger.error(msg); throw RDSQueryFaults.InternalFailure(); } finally { sess.close(); } return resp.buildPartial(); } }
apache-2.0
Evil-Co-Legacy/CyborgEmulator
extension/chip8/src/main/java/org/evilco/emulator/extension/chip8/Chip8Extension.java
1398
/* * Copyright (C) 2014 Johannes Donath <johannesd@evil-co.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.evilco.emulator.extension.chip8; import org.evilco.emulator.ui_old.extension.AbstractEmulatorExtension; import org.evilco.emulator.ui_old.extension.InterfaceExtensionManager; /** * @author Johannes Donath <johannesd@evil-co.com> * @copyright Copyright (C) 2014 Evil-Co <http://www.evil-co.com> */ public class Chip8Extension extends AbstractEmulatorExtension { /** * {@inheritDoc} */ @Override public String getIdentifier () { return "org.evilco.emulator.extension.chip8"; } /** * {@inheritDoc} */ @Override public void onEnable (InterfaceExtensionManager extensionManager) { super.onEnable (extensionManager); extensionManager.registerExtension (this, "c8", Chip8Emulator.class); } }
apache-2.0
firefoxmmx2/Android_ServiceTest
gen/com/example/android_servicetest/BuildConfig.java
173
/** Automatically generated file. DO NOT MODIFY */ package com.example.android_servicetest; public final class BuildConfig { public final static boolean DEBUG = true; }
apache-2.0
daergoth/MatrixC
src/main/java/com/fordprog/matrix/interpreter/type/Type.java
115
package com.fordprog.matrix.interpreter.type; public enum Type { RATIONAL, MATRIX, FUNCTION, VOID }
apache-2.0
peter-mount/filesystem
filesystem-core/src/main/java/onl/area51/filesystem/io/Flat.java
1156
/* * Copyright 2016 peter. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package onl.area51.filesystem.io; import java.io.IOException; import java.nio.file.Path; import java.util.Map; import org.kohsuke.MetaInfServices; /** * A flat FileSystem which locally matches it's structure */ @MetaInfServices(FileSystemIO.class) public class Flat extends LocalFileSystemIO { public Flat( Path basePath, Map<String, ?> env ) { super( basePath, env ); } @Override protected String getPath( char[] path ) throws IOException { return String.valueOf( path ); } }
apache-2.0
fkzrh/signal-collect-sna
src/main/java/com/signalcollect/sna/constants/SNAClassNames.java
1155
/* * @author Flavio Keller * * Copyright 2014 University of Zurich * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.signalcollect.sna.constants; /** * Enumeration for the different classes that can occur * when running a SNA method algorithm */ public enum SNAClassNames { DEGREE("Degree"), PAGERANK("PageRank"), CLOSENESS("Closeness"), BETWEENNESS("Betweenness"), PATH("Path"), LOCALCLUSTERCOEFFICIENT( "LocalClusterCoefficient"), TRIADCENSUS("Triad Census"), LABELPROPAGATION( "Label Propagation") ; private final String className; SNAClassNames(String name) { this.className = name; } }
apache-2.0
HaStr/kieker
kieker-common/test/kieker/test/common/junit/record/flow/trace/concurrency/monitor/TestMonitorNotifyEvent.java
3493
/*************************************************************************** * Copyright 2015 Kieker Project (http://kieker-monitoring.net) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ***************************************************************************/ package kieker.test.common.junit.record.flow.trace.concurrency.monitor; import java.nio.ByteBuffer; import org.junit.Assert; import org.junit.Test; import kieker.common.record.flow.trace.concurrency.monitor.MonitorNotifyEvent; import kieker.common.util.registry.IRegistry; import kieker.common.util.registry.Registry; import kieker.test.common.junit.AbstractKiekerTest; /** * @author Jan Waller * * @since 1.8 */ public class TestMonitorNotifyEvent extends AbstractKiekerTest { private static final long TSTAMP = 987998L; private static final long TRACE_ID = 23444L; private static final int ORDER_INDEX = 234; private static final int LOCK_ID = 13; /** * Default constructor. */ public TestMonitorNotifyEvent() { // empty default constructor } /** * Tests the constructor and toArray(..) methods of {@link MonitorNotifyEvent}. * * Assert that a record instance event1 equals an instance event2 created by serializing event1 to an array event1Array * and using event1Array to construct event2. This ignores a set loggingTimestamp! */ @Test public void testSerializeDeserializeEquals() { final MonitorNotifyEvent event1 = new MonitorNotifyEvent(TSTAMP, TRACE_ID, ORDER_INDEX, LOCK_ID); Assert.assertEquals("Unexpected timestamp", TSTAMP, event1.getTimestamp()); Assert.assertEquals("Unexpected trace ID", TRACE_ID, event1.getTraceId()); Assert.assertEquals("Unexpected order index", ORDER_INDEX, event1.getOrderIndex()); Assert.assertEquals("Unexpected lock id", LOCK_ID, event1.getLockId()); final Object[] event1Array = event1.toArray(); final MonitorNotifyEvent event2 = new MonitorNotifyEvent(event1Array); Assert.assertEquals(event1, event2); Assert.assertEquals(0, event1.compareTo(event2)); } /** * Tests the constructor and writeBytes(..) methods of {@link MonitorNotifyEvent}. */ @Test public void testSerializeDeserializeBinaryEquals() { final MonitorNotifyEvent event1 = new MonitorNotifyEvent(TSTAMP, TRACE_ID, ORDER_INDEX, LOCK_ID); Assert.assertEquals("Unexpected timestamp", TSTAMP, event1.getTimestamp()); Assert.assertEquals("Unexpected trace ID", TRACE_ID, event1.getTraceId()); Assert.assertEquals("Unexpected order index", ORDER_INDEX, event1.getOrderIndex()); Assert.assertEquals("Unexpected lock id", LOCK_ID, event1.getLockId()); final IRegistry<String> stringRegistry = new Registry<String>(); final ByteBuffer buffer = ByteBuffer.allocate(event1.getSize()); event1.writeBytes(buffer, stringRegistry); buffer.flip(); final MonitorNotifyEvent event2 = new MonitorNotifyEvent(buffer, stringRegistry); Assert.assertEquals(event1, event2); Assert.assertEquals(0, event1.compareTo(event2)); } }
apache-2.0
enioka/jqm
jqm-all/jqm-client/jqm-api-client-jdbc/src/test/java/com/enioka/jqm/api/test/BasicTest.java
7479
/** * Copyright © 2013 enioka. All rights reserved * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.enioka.jqm.api.test; import java.util.Calendar; import java.util.HashMap; import java.util.List; import java.util.Properties; import org.apache.log4j.Logger; import org.junit.Assert; import org.junit.Test; import com.enioka.jqm.api.JqmClientFactory; import com.enioka.jqm.api.Query; import com.enioka.jqm.api.Query.Sort; import com.enioka.jqm.api.State; import com.enioka.jqm.jdbc.Db; import com.enioka.jqm.jdbc.DbConn; import com.enioka.jqm.model.Instruction; import com.enioka.jqm.model.JobDef; import com.enioka.jqm.model.JobDef.PathType; import com.enioka.jqm.model.JobInstance; import com.enioka.jqm.model.Queue; /** * Simple tests for checking query syntax (no data) */ public class BasicTest { private static Logger jqmlogger = Logger.getLogger(BasicTest.class); @Test public void testChain() { // No exception allowed! JqmClientFactory.getClient().getQueues(); jqmlogger.info("q1"); JqmClientFactory.getClient().getQueues(); jqmlogger.info("q2"); } @Test public void testQuery() { Query q = new Query("toto", null); q.setInstanceApplication("marsu"); q.setInstanceKeyword2("pouet"); q.setInstanceModule("module"); q.setParentId(12); q.setJobInstanceId(132); q.setQueryLiveInstances(true); q.setJobDefKeyword2("pouet2"); JqmClientFactory.getClient().getJobs(q); } @Test public void testQueryDate() { Query q = new Query("toto", null); q.setInstanceApplication("marsu"); q.setInstanceKeyword2("pouet"); q.setInstanceModule("module"); q.setParentId(12); q.setJobInstanceId(132); q.setQueryLiveInstances(true); q.setEnqueuedBefore(Calendar.getInstance()); q.setEndedAfter(Calendar.getInstance()); q.setBeganRunningAfter(Calendar.getInstance()); q.setBeganRunningBefore(Calendar.getInstance()); q.setEnqueuedAfter(Calendar.getInstance()); q.setEnqueuedBefore(Calendar.getInstance()); q.setJobDefKeyword2("pouet2"); JqmClientFactory.getClient().getJobs(q); } @Test public void testQueryStatusOne() { Query q = new Query("toto", null); q.setQueryLiveInstances(true); q.setInstanceApplication("marsu"); q.addStatusFilter(State.CRASHED); JqmClientFactory.getClient().getJobs(q); } @Test public void testQueryStatusTwo() { Query q = new Query("toto", null); q.setQueryLiveInstances(true); q.setInstanceApplication("marsu"); q.addStatusFilter(State.CRASHED); q.addStatusFilter(State.HOLDED); JqmClientFactory.getClient().getJobs(q); } @Test public void testFluentQuery() { Query q = new Query("toto", null); q.setQueryLiveInstances(true); q.setInstanceApplication("marsu"); q.addStatusFilter(State.CRASHED); q.addStatusFilter(State.HOLDED); JqmClientFactory.getClient().getJobs(Query.create().addStatusFilter(State.RUNNING).setApplicationName("MARSU")); } @Test public void testQueryPercent() { JqmClientFactory.getClient().getJobs(Query.create().setApplicationName("%TEST")); } @Test public void testQueryNull() { JqmClientFactory.getClient().getJobs(new Query("", null)); } @Test public void testQueueNameId() { Query.create().setQueueName("test").run(); Query.create().setQueueId(12).run(); } @Test public void testPaginationWithFilter() { Query.create().setQueueName("test").setPageSize(10).run(); Query.create().setQueueId(12).setPageSize(10).run(); } @Test public void testUsername() { Query.create().setUser("test").setPageSize(10).run(); } @Test public void testSortHistory() { Query.create().setUser("test").setPageSize(10).addSortAsc(Sort.APPLICATIONNAME).addSortDesc(Sort.DATEATTRIBUTION) .addSortAsc(Sort.DATEEND).addSortDesc(Sort.DATEENQUEUE).addSortAsc(Sort.ID).addSortDesc(Sort.QUEUENAME) .addSortAsc(Sort.STATUS).addSortDesc(Sort.USERNAME).addSortAsc(Sort.PARENTID).run(); } @Test public void testSortJi() { Query.create().setQueryHistoryInstances(false).setQueryLiveInstances(true).setUser("test").addSortAsc(Sort.APPLICATIONNAME) .addSortDesc(Sort.DATEATTRIBUTION).addSortDesc(Sort.DATEENQUEUE).addSortAsc(Sort.ID).addSortDesc(Sort.QUEUENAME) .addSortAsc(Sort.STATUS).addSortDesc(Sort.USERNAME).addSortAsc(Sort.PARENTID).run(); } @Test public void testOnlyQueue() { Query.create().setQueryLiveInstances(true).setQueryHistoryInstances(false).setUser("test").run(); } @Test public void testBug159() { Query.create().setJobInstanceId(1234).setQueryLiveInstances(true).setQueryHistoryInstances(false).setPageSize(15).setFirstRow(0) .run(); } @Test public void testBug292() { Query.create().addSortDesc(Query.Sort.ID).setQueueName("QBATCH").setQueryHistoryInstances(true).setQueryLiveInstances(true).run(); } @Test public void testBug305() { Properties p = new Properties(); p.putAll(Db.loadProperties()); Db db = new Db(p); DbConn cnx = null; try { cnx = db.getConn(); int qId = Queue.create(cnx, "q1", "q1 description", true); int jobDefdId = JobDef.create(cnx, "test description", "class", null, "jar", qId, 1, "appName", null, null, null, null, null, false, null, PathType.FS); JobInstance.enqueue(cnx, com.enioka.jqm.model.State.RUNNING, qId, jobDefdId, null, null, null, null, null, null, null, null, null, false, false, null, 1, Instruction.RUN, new HashMap<String, String>()); JobInstance.enqueue(cnx, com.enioka.jqm.model.State.RUNNING, qId, jobDefdId, null, null, null, null, null, null, null, null, null, false, false, null, 1, Instruction.RUN, new HashMap<String, String>()); cnx.commit(); Properties p2 = new Properties(); p2.put("com.enioka.jqm.jdbc.contextobject", db); List<com.enioka.jqm.api.JobInstance> res = JqmClientFactory.getClient("test", p2, false) .getJobs(Query.create().setQueryHistoryInstances(false).setQueryLiveInstances(true).addSortDesc(Query.Sort.ID) .setPageSize(1).setApplicationName("appName")); Assert.assertEquals(1, res.size()); } finally { if (cnx != null) { cnx.closeQuietly(cnx); } } } }
apache-2.0
mrniko/redisson
redisson/src/main/java/org/redisson/iterator/RedissonBaseMapIterator.java
1297
/** * Copyright (c) 2013-2020 Nikita Koksharov * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.redisson.iterator; import java.util.AbstractMap; import java.util.Map; import java.util.Map.Entry; /** * * @author Nikita Koksharov * * @param <V> value type */ public abstract class RedissonBaseMapIterator<V> extends BaseIterator<V, Entry<Object, Object>> { @SuppressWarnings("unchecked") protected V getValue(Map.Entry<Object, Object> entry) { return (V) new AbstractMap.SimpleEntry(entry.getKey(), entry.getValue()) { @Override public Object setValue(Object value) { return put(entry, value); } }; } protected abstract Object put(Entry<Object, Object> entry, Object value); }
apache-2.0
alancnet/artifactory
web/rest-ui/src/main/java/org/artifactory/ui/rest/resource/home/HomeResource.java
1122
package org.artifactory.ui.rest.resource.home; import org.artifactory.api.security.AuthorizationService; import org.artifactory.ui.rest.resource.BaseResource; import org.artifactory.ui.rest.service.general.GeneralServiceFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import javax.annotation.security.RolesAllowed; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; /** * @author Chen keinan */ @Path("home") @RolesAllowed({AuthorizationService.ROLE_ADMIN, AuthorizationService.ROLE_USER}) @Component @Scope(BeanDefinition.SCOPE_PROTOTYPE) public class HomeResource extends BaseResource { @Autowired GeneralServiceFactory generalFactory; @GET @Produces(MediaType.APPLICATION_JSON) public Response getHomeData() throws Exception { return runService(generalFactory.getHomePage()); } }
apache-2.0
cloudkick/cloudkick-android
src/com/cloudkick/LoginActivity.java
9030
/* * Licensed to Cloudkick, Inc ('Cloudkick') under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * Cloudkick licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cloudkick; import java.io.BufferedReader; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import org.apache.http.HttpResponse; import org.apache.http.NameValuePair; import org.apache.http.client.HttpClient; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.message.BasicNameValuePair; import android.app.Activity; import android.app.AlertDialog; import android.app.ProgressDialog; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.net.Uri; import android.os.AsyncTask; import android.os.Bundle; import android.preference.PreferenceManager; import android.util.Log; import android.view.View; import android.widget.EditText; import android.widget.RelativeLayout; import android.widget.Toast; public class LoginActivity extends Activity { private static final int SETTINGS_ACTIVITY_ID = 0; RelativeLayout loginView = null; private String user = null; private String pass = null; private ProgressDialog progress = null; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.login); setTitle("Cloudkick for Android"); findViewById(R.id.button_login).setOnClickListener(new LoginClickListener()); findViewById(R.id.button_signup).setOnClickListener(new SignupClickListener()); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == SETTINGS_ACTIVITY_ID) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(LoginActivity.this); if (prefs.getString("editKey", "").equals("") && prefs.getString("editSecret", "").equals("")) { finish(); } else { Intent result = new Intent(); result.putExtra("login", true); setResult(Activity.RESULT_OK, result); finish(); } } } private class LoginClickListener implements View.OnClickListener { public void onClick(View v) { new AccountLister().execute(); } } private class SignupClickListener implements View.OnClickListener { public void onClick(View v) { startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("https://www.cloudkick.com/pricing/"))); } } private class AccountLister extends AsyncTask<Void, Void, ArrayList<String>>{ private Integer statusCode = null; @Override protected void onPreExecute() { user = ((EditText) findViewById(R.id.input_email)).getText().toString(); pass = ((EditText) findViewById(R.id.input_password)).getText().toString(); progress = ProgressDialog.show(LoginActivity.this, "", "Logging In...", true); } @Override protected ArrayList<String> doInBackground(Void...voids) { ArrayList<String> accounts = new ArrayList<String>(); try { HttpClient client = new DefaultHttpClient(); HttpPost post = new HttpPost("https://www.cloudkick.com/oauth/list_accounts/"); ArrayList<NameValuePair> values = new ArrayList<NameValuePair>(2); values.add(new BasicNameValuePair("user", user)); values.add(new BasicNameValuePair("password", pass)); post.setEntity(new UrlEncodedFormEntity(values)); HttpResponse response = client.execute(post); statusCode = response.getStatusLine().getStatusCode(); InputStream is = response.getEntity().getContent(); BufferedReader rd = new BufferedReader(new InputStreamReader(is)); String line; while ((line = rd.readLine()) != null) { accounts.add(line); Log.i("LoginActivity", line); } } catch (Exception e) { e.printStackTrace(); statusCode = 0; } return accounts; } @Override protected void onPostExecute(ArrayList<String> accounts) { switch (statusCode) { case 200: if (accounts.size() == 1) { new KeyRetriever().execute(accounts.get(0)); } else { String[] tmpAccountArray = new String[accounts.size()]; final String[] accountArray = accounts.toArray(tmpAccountArray); AlertDialog.Builder builder = new AlertDialog.Builder(LoginActivity.this); builder.setTitle("Select an Account"); builder.setItems(accountArray, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int item) { new KeyRetriever().execute(accountArray[item]); } }); AlertDialog selectAccount = builder.create(); selectAccount.show(); } break; case 400: progress.dismiss(); if (accounts.get(0).equals("You have enabled multi factor authentication for this account. To access the API key list, please visit the website.")) { AlertDialog.Builder builder = new AlertDialog.Builder(LoginActivity.this); builder.setTitle("MFA is Enabled"); String mfaMessage = ("You appear to have multi-factor authentication enabled on your account. " + "You will need to manually create an API key with read permissions in the " + "web interface, then enter it directly in the settings panel."); builder.setMessage(mfaMessage); builder.setPositiveButton("Settings", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { Intent settingsActivity = new Intent(getBaseContext(), Preferences.class); startActivityForResult(settingsActivity, SETTINGS_ACTIVITY_ID); } }); AlertDialog mfaDialog = builder.create(); mfaDialog.show(); } else { Toast.makeText(LoginActivity.this, "Invalid Username or Password", Toast.LENGTH_LONG).show(); } break; default: progress.dismiss(); Toast.makeText(LoginActivity.this, "An Error Occurred Retrieving Your Accounts", Toast.LENGTH_LONG).show(); }; } } private class KeyRetriever extends AsyncTask<String, Void, String[]>{ private Integer statusCode = null; @Override protected String[] doInBackground(String...accts) { Log.i("LoginActivity", "Selected Account: " + accts[0]); String[] creds = new String[2]; try { HttpClient client = new DefaultHttpClient(); HttpPost post = new HttpPost("https://www.cloudkick.com/oauth/create_consumer/"); ArrayList<NameValuePair> values = new ArrayList<NameValuePair>(2); values.add(new BasicNameValuePair("user", user)); values.add(new BasicNameValuePair("password", pass)); values.add(new BasicNameValuePair("account", accts[0])); values.add(new BasicNameValuePair("system", "Cloudkick for Android")); values.add(new BasicNameValuePair("perm_read", "True")); values.add(new BasicNameValuePair("perm_write", "False")); values.add(new BasicNameValuePair("perm_execute", "False")); post.setEntity(new UrlEncodedFormEntity(values)); HttpResponse response = client.execute(post); statusCode = response.getStatusLine().getStatusCode(); Log.i("LoginActivity", "Return Code: " + statusCode); InputStream is = response.getEntity().getContent(); BufferedReader rd = new BufferedReader(new InputStreamReader(is)); String line; for (int i = 0; i < 2; i++) { line = rd.readLine(); if (line == null) { return creds; } creds[i] = line; } } catch (Exception e) { statusCode = 0; } return creds; } @Override protected void onPostExecute(String[] creds) { progress.dismiss(); if (statusCode != 200) { // Show short error messages - this is a dirty hack if (creds[0] != null && creds[0].startsWith("User with role")) { Toast.makeText(LoginActivity.this, creds[0], Toast.LENGTH_LONG).show(); } else { Toast.makeText(LoginActivity.this, "An Error Occurred on Login", Toast.LENGTH_LONG).show(); return; } } SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(LoginActivity.this); SharedPreferences.Editor editor = prefs.edit(); editor.putString("editKey", creds[0]); editor.putString("editSecret", creds[1]); editor.commit(); Intent result = new Intent(); result.putExtra("login", true); setResult(Activity.RESULT_OK, result); LoginActivity.this.finish(); } } }
apache-2.0
deternan/Weather-line-bot
libai/src/test/java/ai/api/test/ProtocolProdTest.java
1887
package ai.api.test; /*********************************************************************************************************************** * * API.AI Java SDK - client-side libraries for API.AI * ================================================= * * Copyright (C) 2014 by Speaktoit, Inc. (https://www.speaktoit.com) * https://www.api.ai * *********************************************************************************************************************** * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * ***********************************************************************************************************************/ public class ProtocolProdTest extends ProtocolTestBase { // Testing keys protected static final String ACCESS_TOKEN = "3485a96fb27744db83e78b8c4bc9e7b7"; protected String getAccessToken() { return ACCESS_TOKEN; } @Override protected String getSecondAccessToken() { return "968235e8e4954cf0bb0dc07736725ecd"; } protected String getRuAccessToken(){ return "07806228a357411d83064309a279c7fd"; } protected String getBrAccessToken(){ // TODO return ""; } protected String getPtBrAccessToken(){ return "42db6ad6a51c47088318a8104833b66c"; } @Override protected String getJaAccessToken() { // TODO return ""; } }
apache-2.0
bobmcwhirter/drools
drools-core/src/main/java/org/drools/persistence/PersistenceManager.java
232
package org.drools.persistence; import javax.transaction.xa.XAResource; public interface PersistenceManager { XAResource getXAResource(); Transaction getTransaction(); void save(); void load(); }
apache-2.0
CliffYuan/netty
src/test/java/org/jboss/netty/handler/codec/serialization/OioOioSocketCompatibleObjectStreamEchoTest.java
1368
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.jboss.netty.handler.codec.serialization; import java.util.concurrent.Executor; import org.jboss.netty.channel.ChannelFactory; import org.jboss.netty.channel.socket.oio.OioClientSocketChannelFactory; import org.jboss.netty.channel.socket.oio.OioServerSocketChannelFactory; public class OioOioSocketCompatibleObjectStreamEchoTest extends AbstractSocketCompatibleObjectStreamEchoTest { @Override protected ChannelFactory newClientSocketChannelFactory(Executor executor) { return new OioClientSocketChannelFactory(executor); } @Override protected ChannelFactory newServerSocketChannelFactory(Executor executor) { return new OioServerSocketChannelFactory(executor, executor); } }
apache-2.0
MarkManYUN/coolweather
app/src/main/java/com/coolweather/android/WeatherActivity.java
9815
package com.coolweather.android; import android.content.Intent; import android.content.SharedPreferences; import android.graphics.Color; import android.os.Build; import android.preference.PreferenceManager; import android.support.v4.view.GravityCompat; import android.support.v4.view.ScrollingView; import android.support.v4.widget.DrawerLayout; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.view.LayoutInflater; import android.view.View; import android.widget.Button; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ScrollView; import android.widget.TextView; import android.widget.Toast; import com.bumptech.glide.Glide; import com.coolweather.android.gson.Forecast; import com.coolweather.android.gson.Weather; import com.coolweather.android.service.AutoUpdateService; import com.coolweather.android.util.HttpUtil; import com.coolweather.android.util.Utility; import java.io.IOException; import okhttp3.Call; import okhttp3.Callback; import okhttp3.Response; public class WeatherActivity extends AppCompatActivity { private ScrollView weatherLayout; private TextView titleCity; private TextView titleUpdateTime; private TextView degreeText; private TextView weatherInfoText; private LinearLayout forecastLayout; private TextView aqiText; private TextView pm25Text; private TextView comfortText; private TextView carWashText; private TextView sportText; private ImageView bingPicImg; public SwipeRefreshLayout swipeRefreshLayout; private String mWeatherId; public DrawerLayout drawerLayout; private Button navButton; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if (Build.VERSION.SDK_INT >= 21) { View decorView = getWindow().getDecorView(); decorView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN | View.SYSTEM_UI_FLAG_LAYOUT_STABLE); getWindow().setStatusBarColor(Color.TRANSPARENT); } setContentView(R.layout.activity_weather); //初始条件 weatherLayout = (ScrollView) findViewById(R.id.weather_layout); titleCity = (TextView) findViewById(R.id.title_city); titleUpdateTime = (TextView) findViewById(R.id.title_update_time); degreeText = (TextView) findViewById(R.id.degree_text); weatherInfoText = (TextView) findViewById(R.id.weather_info_text); forecastLayout = (LinearLayout) findViewById(R.id.forecast_layout); aqiText = (TextView) findViewById(R.id.aqi_text); pm25Text = (TextView) findViewById(R.id.pm25_text); comfortText = (TextView) findViewById(R.id.comfort_text); carWashText = (TextView) findViewById(R.id.car_wash_text); sportText = (TextView) findViewById(R.id.sport_text); bingPicImg = (ImageView) findViewById(R.id.bing_pic_img); drawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout); navButton = (Button) findViewById(R.id.nav_button); swipeRefreshLayout = (SwipeRefreshLayout) findViewById(R.id.swipe_refresh); swipeRefreshLayout.setColorSchemeResources(R.color.colorTopic); SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this); String weatherString = prefs.getString("weather", null); if (weatherString != null) { //有缓存时直接解析天气数据 Weather weather = Utility.handleWeatherResponse(weatherString); mWeatherId = weather.basic.weatherId; showWeatherInfo(weather); } else { //无缓存时去服务器查询天气 mWeatherId = getIntent().getStringExtra("weather_id"); String weatherId = getIntent().getStringExtra("weather_id"); weatherLayout.setVisibility(View.INVISIBLE); requestWeather(weatherId); } navButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { drawerLayout.openDrawer(GravityCompat.START); } }); swipeRefreshLayout.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() { @Override public void onRefresh() { requestWeather(mWeatherId); } }); String bingPic = prefs.getString("bing_pic", null); if (bingPic != null) { Glide.with(this).load(bingPic).into(bingPicImg); } else { loadBingPic(); } } /** * 根据天气ID请求城市天气信息 */ public void requestWeather(final String weatherId) { String weatherUtl = "http://guolin.tech/api/weather?cityid=" + weatherId + "&key=04ae9fa43fb341b596f719aa6d6babda"; HttpUtil.sendOkHttpRequest(weatherUtl, new Callback() { @Override public void onFailure(Call call, IOException e) { runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(WeatherActivity.this, "获取天气信息失败", Toast.LENGTH_SHORT).show(); swipeRefreshLayout.setRefreshing(false); } }); } @Override public void onResponse(Call call, Response response) throws IOException { final String responseText = response.body().string(); final Weather weather = Utility.handleWeatherResponse(responseText); runOnUiThread(new Runnable() { @Override public void run() { if (weather != null && "ok".equals(weather.status)) { SharedPreferences.Editor editor = PreferenceManager .getDefaultSharedPreferences(WeatherActivity.this).edit(); editor.putString("weather", responseText); editor.apply(); Toast.makeText(WeatherActivity.this, "成功更新最新天气", Toast.LENGTH_SHORT).show(); showWeatherInfo(weather); } else { Toast.makeText(WeatherActivity.this, "获取天气信息失败", Toast.LENGTH_SHORT).show(); } swipeRefreshLayout.setRefreshing(false); } }); } }); loadBingPic(); } private void loadBingPic() { String requestBingPic = "http://guolin.tech/api/bing_pic"; HttpUtil.sendOkHttpRequest(requestBingPic, new Callback() { @Override public void onFailure(Call call, IOException e) { e.printStackTrace(); } @Override public void onResponse(Call call, Response response) throws IOException { final String bingPic = response.body().string(); SharedPreferences.Editor editor = PreferenceManager.getDefaultSharedPreferences(WeatherActivity.this).edit(); editor.putString("bing_pic", bingPic); editor.apply(); runOnUiThread(new Runnable() { @Override public void run() { Glide.with(WeatherActivity.this).load(bingPic).into(bingPicImg); } }); } }); } /** * 处理并展示Weather实体类中的数据 */ private void showWeatherInfo(Weather weather) { String cityName = weather.basic.cityName; String updateTime = "更新时间: " + weather.basic.update.updateTime.split(" ")[1]; String degree = weather.now.temperature + "ºC"; String weatherInfo = weather.now.more.info; titleCity.setText(cityName); titleUpdateTime.setText(updateTime); degreeText.setText(degree); weatherInfoText.setText(weatherInfo); forecastLayout.removeAllViews(); for (Forecast forecast : weather.forecastList) { View view = LayoutInflater.from(this).inflate(R.layout.forecast_item, forecastLayout, false); TextView dateText = (TextView) view.findViewById(R.id.date_text); TextView infoText = (TextView) view.findViewById(R.id.info_text); TextView maxText = (TextView) view.findViewById(R.id.max_text); TextView minText = (TextView) view.findViewById(R.id.min_text); dateText.setText(forecast.date); infoText.setText(forecast.more.info); maxText.setText(forecast.temperature.max); minText.setText(forecast.temperature.min); forecastLayout.addView(view); } if (weather.aqi != null) { aqiText.setText(weather.aqi.city.aqi); pm25Text.setText(weather.aqi.city.pm25); } String comfort = "舒适度:" + weather.suggestion.comfort.info; String catWash = "洗车指数:" + weather.suggestion.carWash.info; String sport = "运动指数:" + weather.suggestion.sport.info; comfortText.setText(comfort); carWashText.setText(catWash); sportText.setText(sport); weatherLayout.setVisibility(View.VISIBLE); if (weather != null && "ok".equals(weather.status)) { Intent intent = new Intent(this, AutoUpdateService.class); startService(intent); } else { Toast.makeText(WeatherActivity.this, "获取天气信息失败", Toast.LENGTH_SHORT).show(); } } }
apache-2.0
skycdm/HuntFun
app/src/main/java/com/example/cdm/huntfun/activity/ImagePagerActivity.java
2872
package com.example.cdm.huntfun.activity; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentStatePagerAdapter; import android.support.v4.view.ViewPager.OnPageChangeListener; import android.widget.TextView; import com.example.cdm.huntfun.R; import com.example.cdm.huntfun.photoView.ImageDetailFragment; import com.example.cdm.huntfun.widget.HackyViewPager; import java.util.List; /** * 图片查看器 */ public class ImagePagerActivity extends FragmentActivity { private static final String STATE_POSITION = "STATE_POSITION"; public static final String EXTRA_IMAGE_INDEX = "image_index"; public static final String EXTRA_IMAGE_URLS = "image_urls"; private HackyViewPager mPager; private int pagerPosition; private TextView indicator; // public static Drawable DEFAULTDRAWABLE; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.umessage_image_detail_pager); // DEFAULTDRAWABLE=this.getResources().getDrawable(R.drawable.umessage_load_default); pagerPosition = getIntent().getIntExtra(EXTRA_IMAGE_INDEX, 0); List<String> urls = getIntent().getStringArrayListExtra(EXTRA_IMAGE_URLS); mPager = (HackyViewPager) findViewById(R.id.pager); ImagePagerAdapter mAdapter = new ImagePagerAdapter(getSupportFragmentManager(), urls); mPager.setAdapter(mAdapter); indicator = (TextView) findViewById(R.id.indicator); CharSequence text = getString(R.string.xq_viewpager_indicator, 1, mPager.getAdapter().getCount()); indicator.setText(text); // 更新下标 mPager.addOnPageChangeListener(new OnPageChangeListener() { @Override public void onPageScrollStateChanged(int arg0) { } @Override public void onPageScrolled(int arg0, float arg1, int arg2) { } @Override public void onPageSelected(int arg0) { CharSequence text = getString(R.string.xq_viewpager_indicator, arg0 + 1, mPager.getAdapter().getCount()); indicator.setText(text); } }); if (savedInstanceState != null) { pagerPosition = savedInstanceState.getInt(STATE_POSITION); } mPager.setCurrentItem(pagerPosition); } @Override public void onSaveInstanceState(Bundle outState) { outState.putInt(STATE_POSITION, mPager.getCurrentItem()); } private class ImagePagerAdapter extends FragmentStatePagerAdapter { public List<String> fileList; public ImagePagerAdapter(FragmentManager fm, List<String> fileList) { super(fm); this.fileList = fileList; } @Override public int getCount() { return fileList == null ? 0 : fileList.size(); } @Override public Fragment getItem(int position) { String url = fileList.get(position); return ImageDetailFragment.newInstance(url); } } }
apache-2.0
m-m-m/util
version/src/main/java/net/sf/mmm/util/version/impl/VersionIdentifierFormatterPhaseValue.java
1372
/* Copyright (c) The m-m-m Team, Licensed under the Apache License, Version 2.0 * http://www.apache.org/licenses/LICENSE-2.0 */ package net.sf.mmm.util.version.impl; import net.sf.mmm.util.version.api.DevelopmentPhase; import net.sf.mmm.util.version.api.VersionIdentifier; /** * This is the implementation of {@link net.sf.mmm.util.lang.api.Formatter} for the {@link DevelopmentPhase#getValue() * value} of the {@link VersionIdentifier#getPhase() phase}. * * @author Joerg Hohwiller (hohwille at users.sourceforge.net) * @since 3.0.0 */ public class VersionIdentifierFormatterPhaseValue extends AbstractVersionIdentifierFormatterString { /** * The constructor. * * @param prefix is the static prefix to append before the {@link VersionIdentifier#getPhase() phase}. Will be omitted * if {@link VersionIdentifier#getPhase() phase} is {@code null}. * @param maximumLength is the maximum number of letters for the {@link VersionIdentifier#getPhase() phase}. The * default is {@link Integer#MAX_VALUE}. */ public VersionIdentifierFormatterPhaseValue(String prefix, int maximumLength) { super(prefix, maximumLength); } @Override protected String getString(VersionIdentifier value) { DevelopmentPhase phase = value.getPhase(); if (phase != null) { return phase.getValue(); } return null; } }
apache-2.0
Zakemi/Escape
src/main/java/Escape/Escape.java
5644
package Escape; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.EventQueue; import java.awt.Insets; import javax.swing.JFrame; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JTabbedPane; import Escape.Controller.Controller; import Escape.Model.Arena; import Escape.Service.Service; import Escape.View.Rank; import Escape.View.View; import java.awt.Color; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.io.IOException; import java.io.InputStream; import java.util.logging.LogManager; /** * The main class of the program. */ public class Escape extends JFrame { static { InputStream in = Escape.class.getResourceAsStream("/logging.properties"); if (in != null) { try { LogManager.getLogManager().readConfiguration(in); } catch(IOException e) { e.printStackTrace(); } } } /** * The <code>serialVersionUID</code> of the class. */ private static final long serialVersionUID = -3689415169655758824L; /** * The main JPanel of the <code>frame</code>. */ private JPanel contentPane; /** * The main <code>Arena</code> object of the program. */ private Arena arena; /** * Part of the Game tab, the main <code>View</code> object. */ private View view; /** * The main <code>Controller</code> object of the program. */ private Controller control; /** * Part of the Rank tab, the main <code>Rank</code> object. */ private Rank rank; /** * The name of the player. * Default is "Guest". */ private String username = "Guest"; /** * The password for the database. */ private String DAOpassword = "pwd"; /** * Main method of the program. * Creates the main JFrame object and asks the user to set <code>DAOpassword</code> * and <code>username</code> before start the game. * * @param args command-line parameters */ public static void main(String[] args) { EventQueue.invokeLater(new Runnable() { public void run() { try { Escape frame = new Escape(); frame.setVisible(true); do{ frame.DAOpassword = JOptionPane.showInputDialog(frame, "Enter password for database!"); } while(frame.DAOpassword.equals("pwd")); do{ frame.username = JOptionPane.showInputDialog(frame, "Enter your in-game name!"); } while(frame.username.equals("") || frame.username == null); frame.rank.setDAOpassword(frame.DAOpassword); frame.rank.refreshRank(); } catch (Exception e) { e.printStackTrace(); } } }); } /** * Constructor for the main JFrame object. * Sets the <code>frame</code> and initialize the <code>arena</code>, <code>view</code>, * <code>control</code>, <code>rank</code> variables, add tabs. * Calls the <code>initMenu</code> for add menu. */ public Escape() { setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); setTitle("Escape"); setBounds(300, 0, 0, 0); pack(); Insets insets = getInsets(); setSize(new Dimension(insets.left + insets.right + 600, insets.top + insets.bottom + 630)); contentPane = new JPanel(); contentPane.setBackground(Color.WHITE); arena = new Arena(6, 600); view = new View(arena); control = new Controller(arena, view); view.setControl(control); rank = new Rank(); setContentPane(contentPane); contentPane.setLayout(new BorderLayout()); createMenuBar(); JTabbedPane tabbedPane = new JTabbedPane(); tabbedPane.addTab("Game", view); tabbedPane.addTab("Rank", rank); tabbedPane.setFocusable(false); contentPane.add(tabbedPane); setLocationRelativeTo(view.getPlayer()); } /** * Creates the Menu and add to the main JFrame. * Creates the "New Game", "Save Game" and "Exit" items and * add ActionListener for control actions. */ private void createMenuBar() { JMenuBar menubar = new JMenuBar(); JMenu file = new JMenu("File"); file.setMnemonic(KeyEvent.VK_F); JMenuItem newGameMenuItem = new JMenuItem("New Game"); newGameMenuItem.setMnemonic(KeyEvent.VK_E); newGameMenuItem.setToolTipText("Start a new game"); newGameMenuItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent event) { Service.newGame(arena, control, view); } }); JMenuItem saveGameMenuItem = new JMenuItem("Save Game"); saveGameMenuItem.setMnemonic(KeyEvent.VK_E); saveGameMenuItem.setToolTipText("Save the actual score and start a new game!"); saveGameMenuItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent event) { System.out.println(control.getPlayerScore()+control.getEnemyScore()); Service.saveGame(control, username, DAOpassword); Service.newGame(arena, control, view); } }); JMenuItem exitMenuItem = new JMenuItem("Exit"); exitMenuItem.setMnemonic(KeyEvent.VK_E); exitMenuItem.setToolTipText("Exit application"); exitMenuItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent event) { System.exit(0); } }); file.add(newGameMenuItem); file.add(saveGameMenuItem); file.add(exitMenuItem); menubar.add(file); setJMenuBar(menubar); } }
apache-2.0
kevinearls/camel
components/camel-influxdb/src/main/java/org/apache/camel/component/influxdb/InfluxDbEndpoint.java
4672
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.influxdb; import org.apache.camel.Consumer; import org.apache.camel.Processor; import org.apache.camel.Producer; import org.apache.camel.support.DefaultEndpoint; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.UriEndpoint; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriPath; import org.apache.camel.support.CamelContextHelper; import org.influxdb.InfluxDB; /** * The influxdb component allows you to interact with <a href="https://influxdata.com/time-series-platform/influxdb/">InfluxDB</a>, a time series database. */ @UriEndpoint(firstVersion = "2.18.0", scheme = "influxdb", title = "InfluxDB", syntax = "influxdb:connectionBean", label = "database", producerOnly = true) public class InfluxDbEndpoint extends DefaultEndpoint { private InfluxDB influxDB; @UriPath @Metadata(required = "true") private String connectionBean; @UriParam private String databaseName; @UriParam(defaultValue = "default") private String retentionPolicy = "default"; @UriParam(defaultValue = "false") private boolean batch; @UriParam(defaultValue = InfluxDbOperations.INSERT) private String operation = InfluxDbOperations.INSERT; @UriParam private String query; public InfluxDbEndpoint(String uri, InfluxDbComponent component) { super(uri, component); } @Override public Producer createProducer() throws Exception { return new InfluxDbProducer(this); } @Override public Consumer createConsumer(Processor processor) throws Exception { throw new UnsupportedOperationException("You cannot receive messages from this endpoint"); } @Override protected void doStart() throws Exception { influxDB = CamelContextHelper.mandatoryLookup(getCamelContext(), connectionBean, InfluxDB.class); log.debug("Resolved the connection with the name {} as {}", connectionBean, influxDB); super.doStart(); } @Override protected void doStop() throws Exception { super.doStop(); } @Override public boolean isSingleton() { return true; } public InfluxDB getInfluxDB() { return influxDB; } /** * The Influx DB to use */ public void setInfluxDB(InfluxDB influxDB) { this.influxDB = influxDB; } public String getDatabaseName() { return databaseName; } /** * The name of the database where the time series will be stored */ public void setDatabaseName(String databaseName) { this.databaseName = databaseName; } public String getRetentionPolicy() { return retentionPolicy; } /** * The string that defines the retention policy to the data created by the endpoint */ public void setRetentionPolicy(String retentionPolicy) { this.retentionPolicy = retentionPolicy; } public String getConnectionBean() { return connectionBean; } /** * Connection to the influx database, of class InfluxDB.class */ public void setConnectionBean(String connectionBean) { this.connectionBean = connectionBean; } public boolean isBatch() { return batch; } /** * Define if this operation is a batch operation or not */ public void setBatch(boolean batch) { this.batch = batch; } public String getOperation() { return operation; } /** * Define if this operation is an insert or a query */ public void setOperation(String operation) { this.operation = operation; } public String getQuery() { return query; } /** * Define the query in case of operation query */ public void setQuery(String query) { this.query = query; } }
apache-2.0
Liyueyang/NewXmPluginSDK
common_ui/src/main/java/com/xiaomi/smarthome/common/ui/dialog/MLAlertController.java
39780
/* * Copyright (C) 2008 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.xiaomi.smarthome.common.ui.dialog; import static android.view.ViewGroup.LayoutParams.MATCH_PARENT; import java.lang.ref.WeakReference; import android.content.Context; import android.content.DialogInterface; import android.database.Cursor; import android.graphics.drawable.Drawable; import android.os.Handler; import android.os.Message; import android.text.TextUtils; import android.util.AttributeSet; import android.view.Gravity; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.Window; import android.view.WindowManager; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.CheckedTextView; import android.widget.CursorAdapter; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ListAdapter; import android.widget.ListView; import android.widget.ScrollView; import android.widget.SimpleCursorAdapter; import android.widget.TextView; import com.xiaomi.common.R; public class MLAlertController { private static final int BIT_BUTTON_POSITIVE = 1; private static final int BIT_BUTTON_NEGATIVE = 2; private static final int BIT_BUTTON_NEUTRAL = 4; private final Context mContext; private final DialogInterface mDialogInterface; private final Window mWindow; private CharSequence mTitle; private CharSequence mMessage; private ListView mListView; private View mView; private int mViewSpacingLeft; private int mViewSpacingTop; private int mViewSpacingRight; private int mViewSpacingBottom; private boolean mViewSpacingSpecified = false; private Button mButtonPositive; private CharSequence mButtonPositiveText; private Message mButtonPositiveMessage; private Button mButtonNegative; private CharSequence mButtonNegativeText; private Message mButtonNegativeMessage; private Button mButtonNeutral; private CharSequence mButtonNeutralText; private Message mButtonNeutralMessage; private ScrollView mScrollView; private int mIconId = -1; private Drawable mIcon; private ImageView mIconView; private TextView mTitleView; private TextView mMessageView; private View mCustomTitleView; private boolean mForceInverseBackground; private ListAdapter mAdapter; private int mCheckedItem = -1; private int mAlertDialogLayout; private int mListLayout; private int mListLayoutWithTitle; private int mMultiChoiceItemLayout; private int mSingleChoiceItemLayout; private int mListItemLayout; // add by afei for progressDialog Top and normal is Bottom private int mGravity; private Handler mHandler; private boolean mTransplantBg = false; private boolean mAutoDismiss = true; // 对话框在点击按钮之后是否自动消失 private boolean mCustomBgTransplant = false; View.OnClickListener mButtonHandler = new View.OnClickListener() { public void onClick(View v) { Message m = null; if (v == mButtonPositive && mButtonPositiveMessage != null) { m = Message.obtain(mButtonPositiveMessage); } else if (v == mButtonNegative && mButtonNegativeMessage != null) { m = Message.obtain(mButtonNegativeMessage); } else if (v == mButtonNeutral && mButtonNeutralMessage != null) { m = Message.obtain(mButtonNeutralMessage); } if (m != null) { m.sendToTarget(); } if (mAutoDismiss) { // Post a message so we dismiss after the above handlers are // executed mHandler.obtainMessage(ButtonHandler.MSG_DISMISS_DIALOG, mDialogInterface) .sendToTarget(); } } }; private static final class ButtonHandler extends Handler { // Button clicks have Message.what as the BUTTON{1,2,3} constant private static final int MSG_DISMISS_DIALOG = 1; private WeakReference<DialogInterface> mDialog; public ButtonHandler(DialogInterface dialog) { mDialog = new WeakReference<DialogInterface>(dialog); } @Override public void handleMessage(Message msg) { switch (msg.what) { case DialogInterface.BUTTON_POSITIVE: case DialogInterface.BUTTON_NEGATIVE: case DialogInterface.BUTTON_NEUTRAL: ((DialogInterface.OnClickListener) msg.obj).onClick(mDialog.get(), msg.what); break; case MSG_DISMISS_DIALOG: ((DialogInterface) msg.obj).dismiss(); } } } public void sendDismissMessage() { mHandler.obtainMessage(ButtonHandler.MSG_DISMISS_DIALOG, mDialogInterface).sendToTarget(); } public MLAlertController(Context context, DialogInterface di, Window window) { this(context, di, window, Gravity.BOTTOM); } public MLAlertController(Context context, DialogInterface di, Window window, int gravity) { mContext = context; mDialogInterface = di; mWindow = window; mHandler = new ButtonHandler(di); mAlertDialogLayout = R.layout.ml_alert_dialog; mListLayout = R.layout.ml_select_dialog; mListLayoutWithTitle = R.layout.ml_select_dialog_center; mMultiChoiceItemLayout = R.layout.ml_select_dialog_multichoice; mSingleChoiceItemLayout = R.layout.ml_select_dialog_singlechoice; mListItemLayout = R.layout.ml_select_dialog_item; mGravity = gravity; } static boolean canTextInput(View v) { if (v.onCheckIsTextEditor()) { return true; } if (!(v instanceof ViewGroup)) { return false; } ViewGroup vg = (ViewGroup) v; int i = vg.getChildCount(); while (i > 0) { i--; v = vg.getChildAt(i); if (canTextInput(v)) { return true; } } return false; } public void installContent() { /* We use a custom title so never request a window title */ mWindow.requestFeature(Window.FEATURE_NO_TITLE); mWindow.setGravity(mGravity); if (mView == null || !canTextInput(mView)) { mWindow.setFlags(WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM, WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM); } mWindow.setContentView(mAlertDialogLayout); setupView(); } public void setTitle(CharSequence title) { mTitle = title; if (mTitleView != null) { mTitleView.setText(title); } } /** * @see android.app.AlertDialog.Builder#setCustomTitle(View) */ public void setCustomTitle(View customTitleView) { mCustomTitleView = customTitleView; } public void setAudoDismiss(boolean autoDismiss) { mAutoDismiss = autoDismiss; } public void setMessage(CharSequence message) { mMessage = message; if (mMessageView != null) { mMessageView.setText(message); } } /** * Set the view to display in the dialog. */ public void setView(View view) { mView = view; mViewSpacingSpecified = false; } public void setCustomTransplant(boolean b) { mCustomBgTransplant = b; } /** * Set the view to display in the dialog along with the spacing around that * view */ public void setView(View view, int viewSpacingLeft, int viewSpacingTop, int viewSpacingRight, int viewSpacingBottom) { mView = view; mViewSpacingSpecified = true; mViewSpacingLeft = viewSpacingLeft; mViewSpacingTop = viewSpacingTop; mViewSpacingRight = viewSpacingRight; mViewSpacingBottom = viewSpacingBottom; } /** * Sets a click listener or a message to be sent when the button is clicked. * You only need to pass one of {@code listener} or {@code msg}. * * @param whichButton Which button, can be one of * {@link DialogInterface#BUTTON_POSITIVE}, * {@link DialogInterface#BUTTON_NEGATIVE}, or * {@link DialogInterface#BUTTON_NEUTRAL} * @param text The text to display in positive button. * @param listener The * {@link DialogInterface.OnClickListener} to * use. * @param msg The {@link Message} to be sent when clicked. */ public void setButton(int whichButton, CharSequence text, DialogInterface.OnClickListener listener, Message msg) { if (msg == null && listener != null) { msg = mHandler.obtainMessage(whichButton, listener); } switch (whichButton) { case DialogInterface.BUTTON_POSITIVE: mButtonPositiveText = text; mButtonPositiveMessage = msg; break; case DialogInterface.BUTTON_NEGATIVE: mButtonNegativeText = text; mButtonNegativeMessage = msg; break; case DialogInterface.BUTTON_NEUTRAL: mButtonNeutralText = text; mButtonNeutralMessage = msg; break; default: throw new IllegalArgumentException("Button does not exist"); } } /** * Set resId to 0 if you don't want an icon. * * @param resId the resourceId of the drawable to use as the icon or 0 if * you don't want an icon. */ public void setIcon(int resId) { mIconId = resId; if (mIconView != null) { if (resId > 0) { mIconView.setImageResource(mIconId); } else if (resId == 0) { mIconView.setVisibility(View.GONE); } } } public void setIcon(Drawable icon) { mIcon = icon; if ((mIconView != null) && (mIcon != null)) { mIconView.setImageDrawable(icon); } } public void setInverseBackgroundForced(boolean forceInverseBackground) { mForceInverseBackground = forceInverseBackground; } public ListView getListView() { return mListView; } public View getView() { return mView; } public Button getButton(int whichButton) { switch (whichButton) { case DialogInterface.BUTTON_POSITIVE: return mButtonPositive; case DialogInterface.BUTTON_NEGATIVE: return mButtonNegative; case DialogInterface.BUTTON_NEUTRAL: return mButtonNeutral; default: return null; } } @SuppressWarnings({ "UnusedDeclaration" }) public boolean onKeyDown(int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_MENU && mListView != null && mListView.getVisibility() == View.VISIBLE) { this.mDialogInterface.dismiss(); } return mScrollView != null && mScrollView.executeKeyEvent(event); } @SuppressWarnings({ "UnusedDeclaration" }) public boolean onKeyUp(int keyCode, KeyEvent event) { return mScrollView != null && mScrollView.executeKeyEvent(event); } private void setupView() { LinearLayout contentPanel = (LinearLayout) mWindow.findViewById(R.id.contentPanel); setupContent(contentPanel); boolean hasButtons = setupButtons(); LinearLayout topPanel = (LinearLayout) mWindow.findViewById(R.id.topPanel); boolean hasTitle = setupTitle(topPanel); View buttonPanel = mWindow.findViewById(R.id.buttonPanel); if (!hasButtons) { buttonPanel.setVisibility(View.GONE); } FrameLayout customPanel = (FrameLayout) mWindow.findViewById(R.id.customPanel); if (mView != null) { // 自定义dialog透明背景 // mWindow.findViewById(R.id.parentPanel).setBackgroundColor(mContext.getResources().getColor(android.R.color.transparent)); FrameLayout custom = (FrameLayout) mWindow.findViewById(R.id.custom); custom.addView(mView); if (mViewSpacingSpecified) { custom.setPadding(mViewSpacingLeft, mViewSpacingTop, mViewSpacingRight, mViewSpacingBottom); if (mCustomBgTransplant) mTransplantBg = true; } if (mListView != null) { ((LinearLayout.LayoutParams) customPanel.getLayoutParams()).weight = 0; } } else { customPanel.setVisibility(View.GONE); } if (mTransplantBg) { mWindow.findViewById(R.id.parentPanel).setBackgroundColor( mContext.getResources().getColor(android.R.color.transparent)); } else { // mWindow.findViewById(R.id.parentPanel).setBackgroundColor(0xffffffff); } if (mListView != null) { // Listview有分割线divider,因此header和listview需要显示分割线 mWindow.findViewById(R.id.title_divider_line).setVisibility(View.VISIBLE); mWindow.findViewById(R.id.title_divider_line_bottom).setVisibility(View.VISIBLE); } else { mWindow.findViewById(R.id.title_divider_line).setVisibility(View.GONE); mWindow.findViewById(R.id.title_divider_line_bottom).setVisibility(View.GONE); } /** * Add margin top for the button panel if we have not any panel */ if (topPanel.getVisibility() == View.GONE && contentPanel.getVisibility() == View.GONE && customPanel.getVisibility() == View.GONE && hasButtons) { buttonPanel.setPadding(buttonPanel.getPaddingLeft(), buttonPanel.getPaddingBottom(), buttonPanel.getPaddingRight(), buttonPanel.getPaddingBottom()); } /* * Only display the divider if we have a title and a custom view or a * message. */ if (hasTitle) { // View divider = null; // if (mMessage != null || mView != null || mListView != null) { // divider = mWindow.findViewById(R.id.titleDivider); // } else { // divider = mWindow.findViewById(R.id.titleDividerTop); // } // // if (divider != null) { // divider.setVisibility(View.VISIBLE); // } } setBackground(topPanel, contentPanel, customPanel, hasButtons, hasTitle, buttonPanel); if (TextUtils.isEmpty(mTitle) && TextUtils.isEmpty(mMessage)) { mWindow.findViewById(R.id.empty_view).setVisibility(View.GONE); } } private boolean setupTitle(LinearLayout topPanel) { boolean hasTitle = true; if (mCustomTitleView != null) { // Add the custom title view directly to the topPanel layout LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams( LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT); topPanel.addView(mCustomTitleView, 0, lp); // Hide the title template View titleTemplate = mWindow.findViewById(R.id.title_template); titleTemplate.setVisibility(View.GONE); } else { final boolean hasTextTitle = !TextUtils.isEmpty(mTitle); mIconView = (ImageView) mWindow.findViewById(R.id.icon); if (hasTextTitle) { /* Display the title if a title is supplied, else hide it */ mTitleView = (TextView) mWindow.findViewById(R.id.alertTitle); mTitleView.setText(mTitle); /* * Do this last so that if the user has supplied any icons we * use them instead of the default ones. If the user has * specified 0 then make it disappear. */ if (mIconId > 0) { mIconView.setImageResource(mIconId); } else if (mIcon != null) { mIconView.setImageDrawable(mIcon); } else if (mIconId == 0) { /* * Apply the padding from the icon to ensure the title is * aligned correctly. */ mTitleView.setPadding(mIconView.getPaddingLeft(), mIconView.getPaddingTop(), mIconView.getPaddingRight(), mIconView.getPaddingBottom()); mIconView.setVisibility(View.GONE); } } else { // Hide the title template View titleTemplate = mWindow.findViewById(R.id.title_template); titleTemplate.setVisibility(View.GONE); mIconView.setVisibility(View.GONE); topPanel.setVisibility(View.GONE); hasTitle = false; } } return hasTitle; } private void setupContent(LinearLayout contentPanel) { mScrollView = (ScrollView) mWindow.findViewById(R.id.scrollView); mScrollView.setFocusable(false); // Special case for users that only want to display a String mMessageView = (TextView) mWindow.findViewById(R.id.message); if (mMessageView == null) { return; } if (mMessage != null) { mMessageView.setText(mMessage); } else { mMessageView.setVisibility(View.GONE); mScrollView.removeView(mMessageView); if (mListView != null) { contentPanel.removeView(mWindow.findViewById(R.id.scrollView)); contentPanel.addView(mListView, new LinearLayout.LayoutParams(MATCH_PARENT, MATCH_PARENT)); contentPanel.setLayoutParams(new LinearLayout.LayoutParams(MATCH_PARENT, 0, 1.0f)); } else { contentPanel.setVisibility(View.GONE); } } } private boolean setupButtons() { int whichButtons = 0; mButtonPositive = (Button) mWindow.findViewById(R.id.button1); mButtonPositive.setOnClickListener(mButtonHandler); if (TextUtils.isEmpty(mButtonPositiveText)) { mButtonPositive.setVisibility(View.GONE); } else { mButtonPositive.setText(mButtonPositiveText); mButtonPositive.setVisibility(View.VISIBLE); whichButtons = whichButtons | BIT_BUTTON_POSITIVE; } mButtonNegative = (Button) mWindow.findViewById(R.id.button2); mButtonNegative.setOnClickListener(mButtonHandler); if (TextUtils.isEmpty(mButtonNegativeText)) { mButtonNegative.setVisibility(View.GONE); } else { mButtonNegative.setText(mButtonNegativeText); mButtonNegative.setVisibility(View.VISIBLE); whichButtons = whichButtons | BIT_BUTTON_NEGATIVE; } mButtonNeutral = (Button) mWindow.findViewById(R.id.button3); mButtonNeutral.setOnClickListener(mButtonHandler); if (TextUtils.isEmpty(mButtonNeutralText)) { mButtonNeutral.setVisibility(View.GONE); } else { mButtonNeutral.setText(mButtonNeutralText); mButtonNeutral.setVisibility(View.VISIBLE); whichButtons = whichButtons | BIT_BUTTON_NEUTRAL; } if (shouldCenterSingleButton(whichButtons)) { if (whichButtons == BIT_BUTTON_POSITIVE) { centerButton(mButtonPositive); } else if (whichButtons == BIT_BUTTON_NEGATIVE) { centerButton(mButtonNegative); } else if (whichButtons == BIT_BUTTON_NEUTRAL) { centerButton(mButtonNeutral); } } return whichButtons != 0; } private static boolean shouldCenterSingleButton(int whichButton) { return whichButton == BIT_BUTTON_POSITIVE || whichButton == BIT_BUTTON_NEGATIVE || whichButton == BIT_BUTTON_NEUTRAL; } private void centerButton(TextView button) { LinearLayout.LayoutParams params = (LinearLayout.LayoutParams) button.getLayoutParams(); params.gravity = Gravity.CENTER_HORIZONTAL; params.weight = 0.5f; button.setLayoutParams(params); button.setBackgroundResource(R.drawable.common_button); } private void setBackground(LinearLayout topPanel, LinearLayout contentPanel, View customPanel, boolean hasButtons, boolean hasTitle, View buttonPanel) { if (mTransplantBg) { /* Get all the different background required */ int fullDark = mContext.getResources().getColor(android.R.color.transparent); int topDark = mContext.getResources().getColor(android.R.color.transparent); int centerDark = mContext.getResources().getColor(android.R.color.transparent); int bottomDark = mContext.getResources().getColor(android.R.color.transparent); int fullBright = mContext.getResources().getColor(android.R.color.transparent); int topBright = mContext.getResources().getColor(android.R.color.transparent); int centerBright = mContext.getResources().getColor(android.R.color.transparent); int bottomBright = mContext.getResources().getColor(android.R.color.transparent); int bottomMedium = mContext.getResources().getColor(android.R.color.transparent); /* * We now set the background of all of the sections of the alert. * First collect together each section that is being displayed along * with whether it is on a light or dark background, then run * through them setting their backgrounds. This is complicated * because we need to correctly use the full, top, middle, and * bottom graphics depending on how many views they are and where * they appear. */ View[] views = new View[4]; boolean[] light = new boolean[4]; View lastView = null; boolean lastLight = false; int pos = 0; if (hasTitle) { views[pos] = topPanel; light[pos] = false; pos++; } /* * The contentPanel displays either a custom text message or a * ListView. If it's text we should use the dark background for * ListView we should use the light background. If neither are there * the contentPanel will be hidden so set it as null. */ views[pos] = (contentPanel.getVisibility() == View.GONE) ? null : contentPanel; light[pos] = mListView != null; pos++; if (customPanel != null) { views[pos] = customPanel; light[pos] = mForceInverseBackground; pos++; } if (hasButtons) { views[pos] = buttonPanel; light[pos] = true; } boolean setView = false; for (pos = 0; pos < views.length; pos++) { View v = views[pos]; if (v == null) { continue; } if (lastView != null) { if (!setView) { lastView.setBackgroundResource(lastLight ? topBright : topDark); } else { lastView.setBackgroundResource(lastLight ? centerBright : centerDark); } setView = true; } lastView = v; lastLight = light[pos]; } if (lastView != null) { if (setView) { /* * ListViews will use the Bright background but buttons use * the Medium background. */ lastView.setBackgroundResource( lastLight ? (hasButtons ? bottomMedium : bottomBright) : bottomDark); } else { lastView.setBackgroundResource(lastLight ? fullBright : fullDark); } } } if ((mListView != null) && (mAdapter != null)) { mListView.setAdapter(mAdapter); if (mCheckedItem > -1) { mListView.setItemChecked(mCheckedItem, true); mListView.setSelection(mCheckedItem); } } } public static class RecycleListView extends ListView { boolean mRecycleOnMeasure = true; public RecycleListView(Context context) { super(context); } public RecycleListView(Context context, AttributeSet attrs) { super(context, attrs); } public RecycleListView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); } protected boolean recycleOnMeasure() { return mRecycleOnMeasure; } } public static class AlertParams { public final Context mContext; public final LayoutInflater mInflater; public int mIconId = 0; public Drawable mIcon; public CharSequence mTitle; public View mCustomTitleView; public CharSequence mMessage; public CharSequence mPositiveButtonText; public DialogInterface.OnClickListener mPositiveButtonListener; public CharSequence mNegativeButtonText; public DialogInterface.OnClickListener mNegativeButtonListener; public CharSequence mNeutralButtonText; public DialogInterface.OnClickListener mNeutralButtonListener; public boolean mCancelable; public DialogInterface.OnCancelListener mOnCancelListener; public DialogInterface.OnKeyListener mOnKeyListener; public CharSequence[] mItems; public ListAdapter mAdapter; public DialogInterface.OnClickListener mOnClickListener; public View mView; public int mViewSpacingLeft; public int mViewSpacingTop; public int mViewSpacingRight; public int mViewSpacingBottom; public boolean mViewSpacingSpecified = false; public boolean[] mCheckedItems; public boolean mIsMultiChoice; public boolean mIsSingleChoice; public int mCheckedItem = -1; public DialogInterface.OnMultiChoiceClickListener mOnCheckboxClickListener; public Cursor mCursor; public String mLabelColumn; public String mIsCheckedColumn; public boolean mForceInverseBackground; public AdapterView.OnItemSelectedListener mOnItemSelectedListener; public OnPrepareListViewListener mOnPrepareListViewListener; public boolean mRecycleOnMeasure = true; public boolean mAutoDismiss = true; public MLAlertDialog.DismissCallBack mDismissCallBack; public CharSequence mCustomTitle; public boolean mCustomBgTransplant = false; /** * Interface definition for a callback to be invoked before the ListView * will be bound to an adapter. */ public interface OnPrepareListViewListener { /** * Called before the ListView is bound to an adapter. * * @param listView The ListView that will be shown in the dialog. */ void onPrepareListView(ListView listView); } public AlertParams(Context context) { mContext = context; mCancelable = true; mInflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); } public void apply(MLAlertController dialog) { if (mCustomTitleView != null) { dialog.setCustomTitle(mCustomTitleView); } else { if (mTitle != null) { dialog.setTitle(mTitle); } if (mIcon != null) { dialog.setIcon(mIcon); } if (mIconId >= 0) { dialog.setIcon(mIconId); } } if (mMessage != null) { dialog.setMessage(mMessage); } if (mPositiveButtonText != null) { dialog.setButton(DialogInterface.BUTTON_POSITIVE, mPositiveButtonText, mPositiveButtonListener, null); } if (mNegativeButtonText != null) { dialog.setButton(DialogInterface.BUTTON_NEGATIVE, mNegativeButtonText, mNegativeButtonListener, null); } if (mNeutralButtonText != null) { dialog.setButton(DialogInterface.BUTTON_NEUTRAL, mNeutralButtonText, mNeutralButtonListener, null); } if (mForceInverseBackground) { dialog.setInverseBackgroundForced(true); } // For a list, the client can either supply an array of items or an // adapter or a cursor dialog.mTransplantBg = false; if ((mItems != null) || (mCursor != null) || (mAdapter != null)) { if (dialog.mGravity == Gravity.CENTER) { createCenterListView(dialog); } else { createListView(dialog); } } if (mView != null) { if (mViewSpacingSpecified) { dialog.setView(mView, mViewSpacingLeft, mViewSpacingTop, mViewSpacingRight, mViewSpacingBottom); } else { dialog.setView(mView); } } dialog.setAudoDismiss(mAutoDismiss); dialog.setCustomTransplant(mCustomBgTransplant); } private void createCenterListView(final MLAlertController dialog) { final LinearLayout customView = (LinearLayout) mInflater.inflate(dialog.mListLayoutWithTitle, null); final RecycleListView listView = (RecycleListView) customView .findViewById(R.id.select_dialog_listview); ListAdapter adapter; int layout = R.layout.ml_center_item; if (mCursor == null) { adapter = (mAdapter != null) ? mAdapter : new ArrayAdapter<CharSequence>(mContext, layout, R.id.text1, mItems); } else { adapter = new SimpleCursorAdapter(mContext, layout, mCursor, new String[] { mLabelColumn }, new int[] { R.id.text1 }); } if (mCustomTitle != null) { ((TextView) (customView.findViewById(R.id.title))).setText(mCustomTitle); } if (mOnPrepareListViewListener != null) { mOnPrepareListViewListener.onPrepareListView(listView); } /* * Don't directly set the adapter on the ListView as we might want * to add a footer to the ListView later. */ dialog.mAdapter = adapter; listView.setAdapter(adapter); dialog.mCheckedItem = mCheckedItem; if (mOnClickListener != null) { listView.setOnItemClickListener(new OnItemClickListener() { public void onItemClick(AdapterView parent, View v, int position, long id) { mOnClickListener.onClick(dialog.mDialogInterface, position); if (!mIsSingleChoice) { dialog.mDialogInterface.dismiss(); } } }); } else if (mOnCheckboxClickListener != null) { listView.setOnItemClickListener(new OnItemClickListener() { public void onItemClick(AdapterView parent, View v, int position, long id) { if (mCheckedItems != null) { mCheckedItems[position] = listView.isItemChecked(position); } mOnCheckboxClickListener.onClick( dialog.mDialogInterface, position, listView.isItemChecked(position)); } }); } // Attach a given OnItemSelectedListener to the ListView if (mOnItemSelectedListener != null) { listView.setOnItemSelectedListener(mOnItemSelectedListener); } if (mOnItemSelectedListener != null) { listView.setOnItemSelectedListener(mOnItemSelectedListener); } if (mIsSingleChoice) { listView.setChoiceMode(ListView.CHOICE_MODE_SINGLE); } else if (mIsMultiChoice) { listView.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE); } listView.mRecycleOnMeasure = mRecycleOnMeasure; dialog.mView = customView; dialog.mTransplantBg = true; dialog.setCustomTransplant(mCustomBgTransplant); } private void createListView(final MLAlertController dialog) { final RecycleListView listView = (RecycleListView) mInflater.inflate(dialog.mListLayout, null); ListAdapter adapter; if (mIsMultiChoice) { if (mCursor == null) { adapter = new ArrayAdapter<CharSequence>( mContext, dialog.mMultiChoiceItemLayout, R.id.text1, mItems) { @Override public View getView(int position, View convertView, ViewGroup parent) { View view = super.getView(position, convertView, parent); if (mCheckedItems != null) { boolean isItemChecked = mCheckedItems[position]; if (isItemChecked) { listView.setItemChecked(position, true); } } return view; } }; } else { adapter = new CursorAdapter(mContext, mCursor, false) { private final int mLabelIndex; private final int mIsCheckedIndex; { final Cursor cursor = getCursor(); mLabelIndex = cursor.getColumnIndexOrThrow(mLabelColumn); mIsCheckedIndex = cursor.getColumnIndexOrThrow(mIsCheckedColumn); } @Override public void bindView(View view, Context context, Cursor cursor) { CheckedTextView text = (CheckedTextView) view.findViewById(R.id.text1); text.setText(cursor.getString(mLabelIndex)); listView.setItemChecked(cursor.getPosition(), cursor.getInt(mIsCheckedIndex) == 1); } @Override public View newView(Context context, Cursor cursor, ViewGroup parent) { return mInflater.inflate(dialog.mMultiChoiceItemLayout, parent, false); } }; } } else { int layout = mIsSingleChoice ? dialog.mSingleChoiceItemLayout : dialog.mListItemLayout; if (mCursor == null) { adapter = (mAdapter != null) ? mAdapter : new ArrayAdapter<CharSequence>(mContext, layout, R.id.text1, mItems); } else { adapter = new SimpleCursorAdapter(mContext, layout, mCursor, new String[] { mLabelColumn }, new int[] { R.id.text1 }); } } if (mOnPrepareListViewListener != null) { mOnPrepareListViewListener.onPrepareListView(listView); } /* * Don't directly set the adapter on the ListView as we might want * to add a footer to the ListView later. */ dialog.mAdapter = adapter; dialog.mCheckedItem = mCheckedItem; if (mOnClickListener != null) { listView.setOnItemClickListener(new OnItemClickListener() { public void onItemClick(AdapterView parent, View v, int position, long id) { mOnClickListener.onClick(dialog.mDialogInterface, position); if (!mIsSingleChoice) { dialog.mDialogInterface.dismiss(); } } }); } else if (mOnCheckboxClickListener != null) { listView.setOnItemClickListener(new OnItemClickListener() { public void onItemClick(AdapterView parent, View v, int position, long id) { if (mCheckedItems != null) { mCheckedItems[position] = listView.isItemChecked(position); } mOnCheckboxClickListener.onClick( dialog.mDialogInterface, position, listView.isItemChecked(position)); } }); } // Attach a given OnItemSelectedListener to the ListView if (mOnItemSelectedListener != null) { listView.setOnItemSelectedListener(mOnItemSelectedListener); } if (mIsSingleChoice) { listView.setChoiceMode(ListView.CHOICE_MODE_SINGLE); } else if (mIsMultiChoice) { listView.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE); } listView.mRecycleOnMeasure = mRecycleOnMeasure; dialog.mListView = listView; dialog.setCustomTransplant(mCustomBgTransplant); } } }
apache-2.0
reddcoin-project/reddcoinj-pow
core/src/test/java/com/google/reddcoin/net/discovery/SeedPeersTest.java
1795
/** * Copyright 2011 Micheal Swiggs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.reddcoin.net.discovery; import com.google.reddcoin.params.MainNetParams; import org.junit.Test; import java.net.InetSocketAddress; import java.util.concurrent.TimeUnit; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; import static org.junit.Assert.assertThat; public class SeedPeersTest { @Test public void getPeer_one() throws Exception{ SeedPeers seedPeers = new SeedPeers(MainNetParams.get()); assertThat(seedPeers.getPeer(), notNullValue()); } @Test public void getPeer_all() throws Exception{ SeedPeers seedPeers = new SeedPeers(MainNetParams.get()); for(int i = 0; i < SeedPeers.seedAddrs.length; ++i){ assertThat("Failed on index: "+i, seedPeers.getPeer(), notNullValue()); } assertThat(seedPeers.getPeer(), equalTo(null)); } @Test public void getPeers_length() throws Exception{ SeedPeers seedPeers = new SeedPeers(MainNetParams.get()); InetSocketAddress[] addresses = seedPeers.getPeers(0, TimeUnit.SECONDS); assertThat(addresses.length, equalTo(SeedPeers.seedAddrs.length)); } }
apache-2.0
bxf12315/drools
drools-core/src/main/java/org/drools/core/reteoo/EntryPointNode.java
20815
/* * Copyright 2007 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.core.reteoo; import org.drools.core.base.ClassObjectType; import org.drools.core.common.InternalFactHandle; import org.drools.core.common.InternalWorkingMemory; import org.drools.core.common.InternalWorkingMemoryEntryPoint; import org.drools.core.common.PropagationContextFactory; import org.drools.core.common.RuleBasePartitionId; import org.drools.core.util.Iterator; import org.drools.core.util.ObjectHashSet.ObjectEntry; import org.drools.core.reteoo.LeftInputAdapterNode.LiaNodeMemory; import org.drools.core.reteoo.ObjectTypeNode.ObjectTypeNodeMemory; import org.drools.core.reteoo.builder.BuildContext; import org.drools.core.rule.EntryPointId; import org.drools.core.spi.ObjectType; import org.drools.core.spi.PropagationContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; /** * A node that is an entry point into the Rete network. * * As we move the design to support network partitions and concurrent processing * of parts of the network, we also need to support multiple, independent entry * points and this class represents that. * * It replaces the function of the Rete Node class in previous designs. * * @see ObjectTypeNode */ public class EntryPointNode extends ObjectSource implements Externalizable, ObjectSink { // ------------------------------------------------------------ // Instance members // ------------------------------------------------------------ private static final long serialVersionUID = 510l; protected static transient Logger log = LoggerFactory.getLogger(EntryPointNode.class); /** * The entry point ID for this node */ private EntryPointId entryPoint; /** * The object type nodes under this node */ private Map<ObjectType, ObjectTypeNode> objectTypeNodes; private ObjectTypeNode queryNode; private ObjectTypeNode activationNode; // ------------------------------------------------------------ // Constructors // ------------------------------------------------------------ public EntryPointNode() { } public EntryPointNode(final int id, final ObjectSource objectSource, final BuildContext context) { this( id, context.getPartitionId(), context.getKnowledgeBase().getConfiguration().isMultithreadEvaluation(), objectSource, context.getCurrentEntryPoint() ); // irrelevant for this node, since it overrides sink management } public EntryPointNode(final int id, final RuleBasePartitionId partitionId, final boolean partitionsEnabled, final ObjectSource objectSource, final EntryPointId entryPoint) { super( id, partitionId, partitionsEnabled, objectSource, 999 ); // irrelevant for this node, since it overrides sink management this.entryPoint = entryPoint; this.objectTypeNodes = new ConcurrentHashMap<ObjectType, ObjectTypeNode>(); } // ------------------------------------------------------------ // Instance methods // ------------------------------------------------------------ @SuppressWarnings("unchecked") public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { super.readExternal( in ); entryPoint = (EntryPointId) in.readObject(); objectTypeNodes = (Map<ObjectType, ObjectTypeNode>) in.readObject(); } public void writeExternal(ObjectOutput out) throws IOException { super.writeExternal( out ); out.writeObject( entryPoint ); out.writeObject( objectTypeNodes ); } public short getType() { return NodeTypeEnums.EntryPointNode; } /** * @return the entryPoint */ public EntryPointId getEntryPoint() { return entryPoint; } void setEntryPoint(EntryPointId entryPoint) { this.entryPoint = entryPoint; } public void assertQuery(final InternalFactHandle factHandle, final PropagationContext context, final InternalWorkingMemory workingMemory) { if ( queryNode == null ) { this.queryNode = objectTypeNodes.get( ClassObjectType.DroolsQuery_ObjectType ); } if ( queryNode != null ) { // There may be no queries defined this.queryNode.assertObject( factHandle, context, workingMemory ); } } public void retractQuery(final InternalFactHandle factHandle, final PropagationContext context, final InternalWorkingMemory workingMemory) { if ( queryNode == null ) { this.queryNode = objectTypeNodes.get( ClassObjectType.DroolsQuery_ObjectType ); } if ( queryNode != null ) { // There may be no queries defined this.queryNode.retractObject( factHandle, context, workingMemory ); } } public void modifyQuery(final InternalFactHandle factHandle, final PropagationContext context, final InternalWorkingMemory workingMemory) { if ( queryNode == null ) { this.queryNode = objectTypeNodes.get( ClassObjectType.DroolsQuery_ObjectType ); } if ( queryNode != null ) { ModifyPreviousTuples modifyPreviousTuples = new ModifyPreviousTuples(factHandle.getFirstLeftTuple(), factHandle.getFirstRightTuple(), this ); factHandle.clearLeftTuples(); factHandle.clearRightTuples(); // There may be no queries defined this.queryNode.modifyObject( factHandle, modifyPreviousTuples, context, workingMemory ); modifyPreviousTuples.retractTuples( context, workingMemory ); } } public ObjectTypeNode getQueryNode() { if ( queryNode == null ) { this.queryNode = objectTypeNodes.get( ClassObjectType.DroolsQuery_ObjectType ); } return this.queryNode; } public void assertActivation(final InternalFactHandle factHandle, final PropagationContext context, final InternalWorkingMemory workingMemory) { if ( activationNode == null ) { this.activationNode = objectTypeNodes.get( ClassObjectType.Match_ObjectType ); } if ( activationNode != null ) { // There may be no queries defined this.activationNode.assertObject( factHandle, context, workingMemory ); } } public void retractActivation(final InternalFactHandle factHandle, final PropagationContext context, final InternalWorkingMemory workingMemory) { if ( activationNode == null ) { this.activationNode = objectTypeNodes.get( ClassObjectType.Match_ObjectType ); } if ( activationNode != null ) { // There may be no queries defined this.activationNode.retractObject( factHandle, context, workingMemory ); } } public void modifyActivation(final InternalFactHandle factHandle, final PropagationContext context, final InternalWorkingMemory workingMemory) { if ( activationNode == null ) { this.activationNode = objectTypeNodes.get( ClassObjectType.Match_ObjectType ); } if ( activationNode != null ) { ModifyPreviousTuples modifyPreviousTuples = new ModifyPreviousTuples(factHandle.getFirstLeftTuple(), factHandle.getFirstRightTuple(), this ); factHandle.clearLeftTuples(); factHandle.clearRightTuples(); // There may be no queries defined this.activationNode.modifyObject( factHandle, modifyPreviousTuples, context, workingMemory ); modifyPreviousTuples.retractTuples( context, workingMemory ); } } public void assertObject(final InternalFactHandle handle, final PropagationContext context, final ObjectTypeConf objectTypeConf, final InternalWorkingMemory workingMemory) { if ( log.isTraceEnabled() ) { log.trace( "Insert {}", handle.toString() ); } ObjectTypeNode[] cachedNodes = objectTypeConf.getObjectTypeNodes(); for ( int i = 0, length = cachedNodes.length; i < length; i++ ) { cachedNodes[i].assertObject( handle, context, workingMemory ); } } public void modifyObject(final InternalFactHandle handle, final PropagationContext pctx, final ObjectTypeConf objectTypeConf, final InternalWorkingMemory wm) { if ( log.isTraceEnabled() ) { log.trace( "Update {}", handle.toString() ); } ObjectTypeNode[] cachedNodes = objectTypeConf.getObjectTypeNodes(); // make a reference to the previous tuples, then null then on the handle ModifyPreviousTuples modifyPreviousTuples = new ModifyPreviousTuples(handle.getFirstLeftTuple(), handle.getFirstRightTuple(), this ); handle.clearLeftTuples(); handle.clearRightTuples(); for ( int i = 0, length = cachedNodes.length; i < length; i++ ) { cachedNodes[i].modifyObject( handle, modifyPreviousTuples, pctx, wm ); // remove any right tuples that matches the current OTN before continue the modify on the next OTN cache entry if (i < cachedNodes.length - 1) { RightTuple rightTuple = modifyPreviousTuples.peekRightTuple(); while ( rightTuple != null && (( BetaNode ) rightTuple.getRightTupleSink()).getObjectTypeNode() == cachedNodes[i] ) { modifyPreviousTuples.removeRightTuple(); doRightDelete(pctx, wm, rightTuple); rightTuple = modifyPreviousTuples.peekRightTuple(); } LeftTuple leftTuple; ObjectTypeNode otn; while ( true ) { leftTuple = modifyPreviousTuples.peekLeftTuple(); otn = null; if (leftTuple != null) { LeftTupleSink leftTupleSink = leftTuple.getLeftTupleSink(); if (leftTupleSink instanceof LeftTupleSource) { otn = ((LeftTupleSource)leftTupleSink).getLeftTupleSource().getObjectTypeNode(); } else if (leftTupleSink instanceof RuleTerminalNode) { otn = ((RuleTerminalNode)leftTupleSink).getObjectTypeNode(); } } if ( otn == null || otn == cachedNodes[i+1] ) break; modifyPreviousTuples.removeLeftTuple(); doDeleteObject(pctx, wm, leftTuple); } } } modifyPreviousTuples.retractTuples( pctx, wm ); } public void doDeleteObject(PropagationContext pctx, InternalWorkingMemory wm, LeftTuple leftTuple) { LeftInputAdapterNode liaNode = (LeftInputAdapterNode) leftTuple.getLeftTupleSink().getLeftTupleSource(); LiaNodeMemory lm = ( LiaNodeMemory ) wm.getNodeMemory( liaNode ); LeftInputAdapterNode.doDeleteObject( leftTuple, pctx, lm.getSegmentMemory(), wm, liaNode, true, lm ); } public void doRightDelete(PropagationContext pctx, InternalWorkingMemory wm, RightTuple rightTuple) { rightTuple.setPropagationContext( pctx ); rightTuple.getRightTupleSink().retractRightTuple( rightTuple, pctx, wm ); } public void modifyObject(InternalFactHandle factHandle, ModifyPreviousTuples modifyPreviousTuples, PropagationContext context, InternalWorkingMemory workingMemory) { // this method was silently failing, so I am now throwing an exception to make // sure no one calls it by mistake throw new UnsupportedOperationException( "This method should NEVER EVER be called" ); } /** * This is the entry point into the network for all asserted Facts. Iterates a cache * of matching <code>ObjectTypdeNode</code>s asserting the Fact. If the cache does not * exist it first iterates and builds the cache. * * @param factHandle * The FactHandle of the fact to assert * @param context * The <code>PropagationContext</code> of the <code>WorkingMemory</code> action * @param workingMemory * The working memory session. */ public void assertObject(final InternalFactHandle factHandle, final PropagationContext context, final InternalWorkingMemory workingMemory) { // this method was silently failing, so I am now throwing an exception to make // sure no one calls it by mistake throw new UnsupportedOperationException( "This method should NEVER EVER be called" ); } /** * Retract a fact object from this <code>RuleBase</code> and the specified * <code>WorkingMemory</code>. * * @param handle * The handle of the fact to retract. * @param workingMemory * The working memory session. */ public void retractObject(final InternalFactHandle handle, final PropagationContext context, final ObjectTypeConf objectTypeConf, final InternalWorkingMemory workingMemory) { if ( log.isTraceEnabled() ) { log.trace( "Delete {}", handle.toString() ); } ObjectTypeNode[] cachedNodes = objectTypeConf.getObjectTypeNodes(); if ( cachedNodes == null ) { // it is possible that there are no ObjectTypeNodes for an object being retracted return; } for ( int i = 0; i < cachedNodes.length; i++ ) { cachedNodes[i].retractObject( handle, context, workingMemory ); } } /** * Adds the <code>ObjectSink</code> so that it may receive * <code>Objects</code> propagated from this <code>ObjectSource</code>. * * @param objectSink * The <code>ObjectSink</code> to receive propagated * <code>Objects</code>. Rete only accepts <code>ObjectTypeNode</code>s * as parameters to this method, though. */ public void addObjectSink(final ObjectSink objectSink) { final ObjectTypeNode node = (ObjectTypeNode) objectSink; this.objectTypeNodes.put( node.getObjectType(), node ); } public void removeObjectSink(final ObjectSink objectSink) { final ObjectTypeNode node = (ObjectTypeNode) objectSink; this.objectTypeNodes.remove( node.getObjectType() ); } public void attach( BuildContext context ) { this.source.addObjectSink( this ); if (context == null ) { return; } if ( context.getKnowledgeBase().getConfiguration().isPhreakEnabled() ) { for ( InternalWorkingMemory workingMemory : context.getWorkingMemories() ) { workingMemory.updateEntryPointsCache(); } return; } for ( InternalWorkingMemory workingMemory : context.getWorkingMemories() ) { workingMemory.updateEntryPointsCache(); PropagationContextFactory pctxFactory = workingMemory.getKnowledgeBase().getConfiguration().getComponentFactory().getPropagationContextFactory(); final PropagationContext propagationContext = pctxFactory.createPropagationContext(workingMemory.getNextPropagationIdCounter(), PropagationContext.RULE_ADDITION, null, null, null); this.source.updateSink( this, propagationContext, workingMemory ); } } protected void doRemove(final RuleRemovalContext context, final ReteooBuilder builder, final InternalWorkingMemory[] workingMemories) { } public Map<ObjectType, ObjectTypeNode> getObjectTypeNodes() { return this.objectTypeNodes; } public int hashCode() { return this.entryPoint.hashCode(); } public boolean equals(final Object object) { if ( object == this ) { return true; } if ( object == null || !(object instanceof EntryPointNode) ) { return false; } final EntryPointNode other = (EntryPointNode) object; return this.entryPoint.equals( other.entryPoint ); } public void updateSink(final ObjectSink sink, final PropagationContext context, final InternalWorkingMemory workingMemory) { // @todo // JBRULES-612: the cache MUST be invalidated when a new node type is added to the network, so iterate and reset all caches. final ObjectTypeNode node = (ObjectTypeNode) sink; final ObjectType newObjectType = node.getObjectType(); InternalWorkingMemoryEntryPoint wmEntryPoint = (InternalWorkingMemoryEntryPoint) workingMemory.getWorkingMemoryEntryPoint( this.entryPoint.getEntryPointId() ); for ( ObjectTypeConf objectTypeConf : wmEntryPoint.getObjectTypeConfigurationRegistry().values() ) { if ( newObjectType.isAssignableFrom( objectTypeConf.getConcreteObjectTypeNode().getObjectType() ) ) { objectTypeConf.resetCache(); ObjectTypeNode sourceNode = objectTypeConf.getConcreteObjectTypeNode(); Iterator it = ((ObjectTypeNodeMemory) workingMemory.getNodeMemory( sourceNode )).memory.iterator(); for ( ObjectEntry entry = (ObjectEntry) it.next(); entry != null; entry = (ObjectEntry) it.next() ) { sink.assertObject( (InternalFactHandle) entry.getValue(), context, workingMemory ); } } } } public boolean isObjectMemoryEnabled() { return false; } public void setObjectMemoryEnabled(boolean objectMemoryEnabled) { throw new UnsupportedOperationException( "Entry Point Node has no Object memory" ); } public String toString() { return "[EntryPointNode(" + this.id + ") " + this.entryPoint + " ]"; } public void byPassModifyToBetaNode(InternalFactHandle factHandle, ModifyPreviousTuples modifyPreviousTuples, PropagationContext context, InternalWorkingMemory workingMemory) { throw new UnsupportedOperationException(); } @Override public long calculateDeclaredMask(List<String> settableProperties) { throw new UnsupportedOperationException(); } }
apache-2.0
dagnir/aws-sdk-java
aws-java-sdk-route53/src/main/java/com/amazonaws/services/route53/model/ListVPCAssociationAuthorizationsRequest.java
11109
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.route53.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * <p> * A complex type that contains information about that can be associated with your hosted zone. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/route53-2013-04-01/ListVPCAssociationAuthorizations" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListVPCAssociationAuthorizationsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone. * </p> */ private String hostedZoneId; /** * <p> * <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be * associated with the specified hosted zone. To get the next page of results, submit another request, and include * the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in another * <code>ListVPCAssociationAuthorizations</code> request. * </p> */ private String nextToken; /** * <p> * <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to return. If * you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs per page. * </p> */ private String maxResults; /** * <p> * The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone. * </p> * * @param hostedZoneId * The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone. */ public void setHostedZoneId(String hostedZoneId) { this.hostedZoneId = hostedZoneId; } /** * <p> * The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone. * </p> * * @return The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone. */ public String getHostedZoneId() { return this.hostedZoneId; } /** * <p> * The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone. * </p> * * @param hostedZoneId * The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone. * @return Returns a reference to this object so that method calls can be chained together. */ public ListVPCAssociationAuthorizationsRequest withHostedZoneId(String hostedZoneId) { setHostedZoneId(hostedZoneId); return this; } /** * <p> * <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be * associated with the specified hosted zone. To get the next page of results, submit another request, and include * the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in another * <code>ListVPCAssociationAuthorizations</code> request. * </p> * * @param nextToken * <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be * associated with the specified hosted zone. To get the next page of results, submit another request, and * include the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in * another <code>ListVPCAssociationAuthorizations</code> request. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be * associated with the specified hosted zone. To get the next page of results, submit another request, and include * the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in another * <code>ListVPCAssociationAuthorizations</code> request. * </p> * * @return <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be * associated with the specified hosted zone. To get the next page of results, submit another request, and * include the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in * another <code>ListVPCAssociationAuthorizations</code> request. */ public String getNextToken() { return this.nextToken; } /** * <p> * <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be * associated with the specified hosted zone. To get the next page of results, submit another request, and include * the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in another * <code>ListVPCAssociationAuthorizations</code> request. * </p> * * @param nextToken * <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be * associated with the specified hosted zone. To get the next page of results, submit another request, and * include the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in * another <code>ListVPCAssociationAuthorizations</code> request. * @return Returns a reference to this object so that method calls can be chained together. */ public ListVPCAssociationAuthorizationsRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * <p> * <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to return. If * you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs per page. * </p> * * @param maxResults * <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to * return. If you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs * per page. */ public void setMaxResults(String maxResults) { this.maxResults = maxResults; } /** * <p> * <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to return. If * you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs per page. * </p> * * @return <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to * return. If you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs * per page. */ public String getMaxResults() { return this.maxResults; } /** * <p> * <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to return. If * you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs per page. * </p> * * @param maxResults * <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to * return. If you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs * per page. * @return Returns a reference to this object so that method calls can be chained together. */ public ListVPCAssociationAuthorizationsRequest withMaxResults(String maxResults) { setMaxResults(maxResults); return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getHostedZoneId() != null) sb.append("HostedZoneId: ").append(getHostedZoneId()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()).append(","); if (getMaxResults() != null) sb.append("MaxResults: ").append(getMaxResults()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListVPCAssociationAuthorizationsRequest == false) return false; ListVPCAssociationAuthorizationsRequest other = (ListVPCAssociationAuthorizationsRequest) obj; if (other.getHostedZoneId() == null ^ this.getHostedZoneId() == null) return false; if (other.getHostedZoneId() != null && other.getHostedZoneId().equals(this.getHostedZoneId()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; if (other.getMaxResults() == null ^ this.getMaxResults() == null) return false; if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getHostedZoneId() == null) ? 0 : getHostedZoneId().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode()); return hashCode; } @Override public ListVPCAssociationAuthorizationsRequest clone() { return (ListVPCAssociationAuthorizationsRequest) super.clone(); } }
apache-2.0
lemire/incubator-kylin
invertedindex/src/main/java/org/apache/kylin/invertedindex/index/ColumnValueContainer.java
1361
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kylin.invertedindex.index; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.roaringbitmap.RoaringBitmap; /** * @author yangli9 */ public interface ColumnValueContainer { void append(ImmutableBytesWritable valueBytes); void closeForChange(); int getSize(); // works only after closeForChange() void getValueAt(int i, ImmutableBytesWritable valueBytes); RoaringBitmap getBitMap(Integer startId, Integer endId); int getMaxValueId(); }
apache-2.0
altran/Valuereporter
src/main/java/org/valuereporter/observation/ObservedMethodsResouce.java
4690
package org.valuereporter.observation; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.valuereporter.QueryOperations; import org.valuereporter.WriteOperations; import javax.ws.rs.*; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; import java.util.ArrayList; import java.util.List; /** * @author <a href="mailto:erik-dev@fjas.no">Erik Drolshammer</a> */ @Component @Path("/observedmethods") public class ObservedMethodsResouce { private static final Logger log = LoggerFactory.getLogger(ObservedMethodsResouce.class); private final QueryOperations queryOperations; private final WriteOperations writeOperations; private final ObjectMapper mapper; /** @Autowired public ObservedMethodsResouce(QueryOperations queryOperations, WriteOperations writeOperations, ObjectMapper mapper) { this.queryOperations = queryOperations; this.writeOperations = writeOperations; this.mapper = mapper; } **/ @Autowired public ObservedMethodsResouce(ObservationsService observationsService, ObjectMapper mapper) { this.queryOperations = observationsService; this.writeOperations = observationsService; this.mapper = mapper; } //http://localhost:4901/reporter/observe/observedmethods/{prefix}/{name} /** * A request with no filtering parameters should return a list of all observations. * * @param prefix prefix used to identify running process * @param name package.classname.method * @return List of observations */ @GET @Path("/{prefix}/{name}") @Produces(MediaType.APPLICATION_JSON) public Response findObservationsByName(@PathParam("prefix") String prefix,@PathParam("name") String name) { final List<ObservedMethod> observedMethods; //Should also support no queryParams -> findAll if (name != null ) { log.trace("findObservationsByName name={}", name); observedMethods = queryOperations.findObservationsByName(prefix, name); } else { throw new UnsupportedOperationException("You must supply a name. <package.classname.method>"); } Writer strWriter = new StringWriter(); try { mapper.writeValue(strWriter, observedMethods); } catch (IOException e) { log.error("Could not convert {} ObservedMethod to JSON.", observedMethods.size(), e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity("Error converting to requested format.").build(); } return Response.ok(strWriter.toString()).build(); } //http://localhost:4901/reporter/observe/observedmethods/{prefix} @POST @Path("/{prefix}") @Produces(MediaType.APPLICATION_JSON) public Response addObservationMethod(@PathParam("prefix") String prefix, String jsonBody){ log.trace("addObservationMethod prefix {} , jsonBody {}.", prefix, jsonBody); List<ObservedMethod> observedMethods = null; try { observedMethods = mapper.readValue(jsonBody, new TypeReference<ArrayList<ObservedMethodJson>>(){ }); if (observedMethods != null) { for (ObservedMethod observedMethod : observedMethods) { observedMethod.setPrefix(prefix); } } } catch (IOException e) { log.warn("Unexpected error trying to produce list of ObservedMethod from \n prefix {} \n json {}, \n Reason {}",prefix, jsonBody, e.getMessage()); return Response.status(Response.Status.NOT_ACCEPTABLE).entity("Error converting to requested format.").build(); } long updatedCount = writeOperations.addObservations(prefix,observedMethods); String message = "added " + updatedCount + " observedMethods."; Writer strWriter = new StringWriter(); try { mapper.writeValue(strWriter, message); } catch (IOException e) { log.error("Could not convert {} to JSON.", updatedCount, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity("Error converting to requested format.").build(); } return Response.ok(strWriter.toString()).build(); } }
apache-2.0
noobyang/AndroidStudy
game/src/main/java/com/lee/game/MainActivity.java
7946
package com.lee.game; import android.content.Intent; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.os.Bundle; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.text.TextUtils; import android.view.View; import android.widget.TextView; import com.lee.base.activity.BaseActivity; import com.lee.base.application.PackageNameContainer; import com.noobyang.log.LogUtil; import java.text.Collator; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import butterknife.BindView; import butterknife.ButterKnife; /** /** * Main Activity * <p/> * Created by LiYang on 2019/4/8. */ public class MainActivity extends BaseActivity { private static final String ACTION_SAMPLE_CODE = "com.lee.main.action.SAMPLE_CODE_GAME"; private static final String EXTRA_NAME_PATH = "com.lee.main.Path"; private static final String PATH_DIVIDED_SYMBOLS = "."; private static final String PATH_DIVIDED_SYMBOLS_REGEX = "\\."; @BindView(R.id.tv_path) TextView tvPath; @BindView(R.id.rv_sample_code) RecyclerView rvSampleCode; private PackageManager packageManager; private List<SampleCodeEntity> sampleCodeEntities; private SampleCodeAdapter sampleCodeAdapter; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); ButterKnife.bind(this); initData(); initView(); } @Override protected void onNewIntent(Intent intent) { super.onNewIntent(intent); setIntent(intent); updateSampleCodes(); } @Override protected void onDestroy() { super.onDestroy(); } private void initData() { packageManager = getPackageManager(); sampleCodeAdapter = new SampleCodeAdapter(this, sampleCodeEntities, itemClickListener); } private void initView() { LinearLayoutManager layoutManager = new LinearLayoutManager(this); layoutManager.setOrientation(LinearLayoutManager.VERTICAL); rvSampleCode.setLayoutManager(layoutManager); rvSampleCode.setAdapter(sampleCodeAdapter); updateSampleCodes(); } private void updateSampleCodes() { String path = getIntent().getStringExtra(EXTRA_NAME_PATH); initSampleCodes(path); sampleCodeAdapter.setData(sampleCodeEntities); sampleCodeAdapter.notifyDataSetChanged(); setPathText(path); } private void setPathText(String path) { if (TextUtils.isEmpty(path)) { tvPath.setText(R.string.app_name); } else { tvPath.setText(path); } } protected void initSampleCodes(String path) { if (sampleCodeEntities == null) { sampleCodeEntities = new ArrayList<>(); } else { sampleCodeEntities.clear(); } List<ResolveInfo> sampleCodeResolveInfoList = getSampleCodeResolveInfoList(); if (sampleCodeResolveInfoList == null || sampleCodeResolveInfoList.size() == 0) { return; } String[] prefixPaths; String currentPrefixPath; Map<String, Boolean> folderLabel = new HashMap<>(); String label; String[] labelPath; String sampleCodeLabel; for (ResolveInfo sampleCodeResolveInfo : sampleCodeResolveInfoList) { if (TextUtils.isEmpty(path)) { prefixPaths = null; currentPrefixPath = null; } else { path = getRelativeName(path); prefixPaths = path.split(PATH_DIVIDED_SYMBOLS_REGEX); currentPrefixPath = path + PATH_DIVIDED_SYMBOLS; } label = getRelativeName(sampleCodeResolveInfo.activityInfo.name); LogUtil.d("getData currentPrefixPath = " + currentPrefixPath + "---label = " + label); if (TextUtils.isEmpty(currentPrefixPath) || label.startsWith(currentPrefixPath)) { labelPath = label.split(PATH_DIVIDED_SYMBOLS_REGEX); int prefixPathsLen = prefixPaths == null ? 0 : prefixPaths.length; sampleCodeLabel = labelPath[prefixPathsLen]; if (prefixPathsLen == labelPath.length - 1) { // activity addActivityItem(sampleCodeEntities, sampleCodeLabel, sampleCodeResolveInfo.activityInfo.applicationInfo.packageName, sampleCodeResolveInfo.activityInfo.name); } else { // folder if (folderLabel.get(sampleCodeLabel) == null) { addFolderItem(sampleCodeEntities, sampleCodeLabel, currentPrefixPath); folderLabel.put(sampleCodeLabel, true); } } } } Collections.sort(sampleCodeEntities, comparator); } private String getRelativeName(String className) { if (TextUtils.isEmpty(className)) { return className; } for (String packageName : PackageNameContainer.getPackageNames()) { if (className.startsWith(packageName + PATH_DIVIDED_SYMBOLS)) { return className.substring(packageName.length() + 1); } } return className; } private List<ResolveInfo> getSampleCodeResolveInfoList() { Intent sampleCodeIntent = new Intent(ACTION_SAMPLE_CODE, null); sampleCodeIntent.addCategory(Intent.CATEGORY_SAMPLE_CODE); return packageManager.queryIntentActivities(sampleCodeIntent, 0); } private final static Comparator<SampleCodeEntity> comparator = new Comparator<SampleCodeEntity>() { private final Collator collator = Collator.getInstance(); public int compare(SampleCodeEntity entity1, SampleCodeEntity entity2) { return collator.compare(entity1.getTitle(), entity2.getTitle()); } }; private void addActivityItem(List<SampleCodeEntity> data, String sampleCodeLabel, String packageName, String className) { Intent activityIntent = new Intent(); activityIntent.setClassName(packageName, className); addItem(data, SampleCodeEntity.SampleCodeType.SAMPLE_CODE_TYPE_ACTIVITY, sampleCodeLabel, activityIntent); } private void addFolderItem(List<SampleCodeEntity> data, String sampleCodeLabel, String currentPrefixPath) { Intent folderIntent = new Intent(); folderIntent.setClass(this, MainActivity.class); String path = TextUtils.isEmpty(currentPrefixPath) ? sampleCodeLabel : currentPrefixPath + sampleCodeLabel; folderIntent.putExtra(EXTRA_NAME_PATH, path); addItem(data, SampleCodeEntity.SampleCodeType.SAMPLE_CODE_TYPE_FOLDER, sampleCodeLabel, folderIntent); } protected void addItem(List<SampleCodeEntity> data, int type, String title, Intent intent) { SampleCodeEntity entity = new SampleCodeEntity(type, title, intent); data.add(entity); } private SampleCodeAdapter.OnItemClickListener itemClickListener = new SampleCodeAdapter.OnItemClickListener() { @Override public void onItemClick(View view, int position) { SampleCodeEntity entity = sampleCodeEntities.get(position); if (entity != null) { Intent intent = entity.getIntent(); intent.addCategory(Intent.CATEGORY_SAMPLE_CODE); startActivity(intent); } } }; }
apache-2.0
bither/bither-android
bither-android/src/net/bither/util/FileUtil.java
16209
/* * Copyright 2014 http://Bither.net * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.bither.util; import android.annotation.TargetApi; import android.app.Activity; import android.content.Context; import android.database.Cursor; import android.graphics.Bitmap; import android.media.ExifInterface; import android.net.Uri; import android.os.Environment; import android.provider.MediaStore; import net.bither.BitherApplication; import net.bither.bitherj.utils.Utils; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; public class FileUtil { // old tickerName file private static final String HUOBI_TICKER_NAME = "huobi.ticker"; private static final String BITSTAMP_TICKER_NAME = "bitstamp.ticker"; private static final String BTCE_TICKER_NAME = "btce.ticker"; private static final String OKCOIN_TICKER_NAME = "okcoin.ticker"; private static final String CHBTC_TICKER_NAME = "chbtc.ticker"; private static final String BTCCHINA_TICKER_NAME = "btcchina.ticker"; private static final String BITHER_BACKUP_SDCARD_DIR = "BitherBackup"; private static final String BITHER_BACKUP_ROM_DIR = "backup"; private static final String BITHER_BACKUP_HOT_FILE_NAME = "keys"; private static final String EXCAHNGE_TICKER_NAME = "exchange.ticker"; private static final String EXCHANGE_KLINE_NAME = "exchange.kline"; private static final String EXCHANGE_DEPTH_NAME = "exchange.depth"; private static final String PRICE_ALERT = "price.alert"; private static final String EXCHANGERATE = "exchangerate"; private static final String CURRENCIES_RATE = "currencies_rate"; private static final String MARKET_CAHER = "mark"; private static final String IMAGE_CACHE_DIR = "image"; private static final String IMAGE_SHARE_FILE_NAME = "share.jpg"; private static final String IMAGE_CACHE_UPLOAD = IMAGE_CACHE_DIR + "/upload"; private static final String IMAGE_CACHE_612 = IMAGE_CACHE_DIR + "/612"; private static final String IMAGE_CACHE_150 = IMAGE_CACHE_DIR + "/150"; private static final String AD_CACHE = "ad"; private static final String AD_NAME = "ad.json"; private static final String AD_IMAGE_EN_CACHE = AD_CACHE + "/img_en"; private static final String AD_IMAGE_ZH_CN_CACHE = AD_CACHE + "/img_zh_CN"; private static final String AD_IMAGE_ZH_TW_CACHE = AD_CACHE + "/img_zh_TW"; /** * sdCard exist */ public static boolean existSdCardMounted() { String storageState = android.os.Environment.getExternalStorageState(); if (Utils.isEmpty(storageState)) { return false; } return Utils.compareString(storageState, android.os.Environment.MEDIA_MOUNTED); } public static File getSDPath() { File sdDir = Environment.getExternalStorageDirectory(); return sdDir; } public static File getBackupSdCardDir() { File backupDir = new File(getSDPath(), BITHER_BACKUP_SDCARD_DIR); if (!backupDir.exists()) { backupDir.mkdirs(); } return backupDir; } public static File getBackupFileOfCold() { File file = new File(getBackupSdCardDir(), DateTimeUtil.getNameForFile(System.currentTimeMillis()) + ".bak" ); return file; } public static List<File> getBackupFileListOfCold() { File dir = getBackupSdCardDir(); List<File> fileList = new ArrayList<File>(); File[] files = dir.listFiles(); if (files != null && files.length > 0) { files = orderByDateDesc(files); for (File file : files) { if (StringUtil.checkBackupFileOfCold(file.getName())) { fileList.add(file); } } } return fileList; } private static File getBackupRomDir() { File backupDir = new File(Utils.getWalletRomCache(), BITHER_BACKUP_ROM_DIR); if (!backupDir.exists()) { backupDir.mkdirs(); } return backupDir; } public static File getBackupKeyOfHot() { File backupDir = getBackupRomDir(); return new File(backupDir, BITHER_BACKUP_HOT_FILE_NAME); } public static File getDiskDir(String dirName, Boolean createNomedia) { File dir = getDiskCacheDir(BitherApplication.mContext, dirName); if (!dir.exists()) { dir.mkdirs(); if (createNomedia) { try { File noMediaFile = new File(dir, ".nomedia"); noMediaFile.createNewFile(); } catch (IOException e) { e.printStackTrace(); } } } return dir; } public static Uri saveShareImage(Bitmap bmp) { File dir = getDiskDir(IMAGE_CACHE_DIR, true); File jpg = new File(dir, IMAGE_SHARE_FILE_NAME); NativeUtil.compressBitmap(bmp, 85, jpg.getAbsolutePath(), true); return Uri.fromFile(jpg); } public static File getExternalCacheDir(Context context) { // if (SdkUtils.hasFroyo()) { // // return context.getCacheDir(); // } // Before Froyo we need to construct the external cache dir ourselves final String cacheDir = "/Android/data/" + context.getPackageName() + "/cache/"; return new File(Environment.getExternalStorageDirectory().getPath() + cacheDir); } public static File getDiskCacheDir(Context context, String uniqueName) { File extCacheDir = getExternalCacheDir(context); final String cachePath = (Environment.MEDIA_MOUNTED.equals(Environment .getExternalStorageState()) || !isExternalStorageRemovable()) && extCacheDir != null ? extCacheDir.getPath() : context .getCacheDir().getPath(); return new File(cachePath + File.separator + uniqueName); } @TargetApi(9) public static boolean isExternalStorageRemovable() { if (SdkUtils.hasGingerbread()) { return Environment.isExternalStorageRemovable(); } return true; } private static File getMarketCache() { return getDiskDir(MARKET_CAHER, false); } public static File getAdImageEnDir() { return getDiskDir(AD_IMAGE_EN_CACHE, true); } public static File getAdImagZhCnDir() { return getDiskDir(AD_IMAGE_ZH_CN_CACHE, true); } public static File getAdImagZhTwDir() { return getDiskDir(AD_IMAGE_ZH_TW_CACHE, true); } private static File getAdDir() { return getDiskDir(AD_CACHE, false); } public static File getUploadImageDir() { return getDiskDir(IMAGE_CACHE_UPLOAD, true); } public static File getAvatarDir() { return getDiskDir(IMAGE_CACHE_612, true); } public static File getSmallAvatarDir() { return getDiskDir(IMAGE_CACHE_150, true); } public static File getExchangeRateFile() { File file = getDiskDir("", false); return new File(file, EXCHANGERATE); } public static File getCurrenciesRateFile() { File file = getDiskDir("", false); return new File(file, CURRENCIES_RATE); } public static File getTickerFile() { File file = getMarketCache(); file = new File(file, EXCAHNGE_TICKER_NAME); return file; } public static File getPriceAlertFile() { File marketDir = getMarketCache(); return new File(marketDir, PRICE_ALERT); } public static File getKlineFile() { File file = getMarketCache(); file = new File(file, EXCHANGE_KLINE_NAME); return file; } public static File getDepthFile() { File file = getMarketCache(); file = new File(file, EXCHANGE_DEPTH_NAME); return file; } public static File getAdFile() { File file = getAdDir(); file = new File(file, AD_NAME); return file; } @SuppressWarnings("resource") public static Object deserialize(File file) { FileInputStream fos = null; try { if (!file.exists()) { return null; } fos = new FileInputStream(file); ObjectInputStream ois; ois = new ObjectInputStream(fos); Object object = ois.readObject(); return object; } catch (Exception e) { e.printStackTrace(); return null; } finally { try { if (fos != null) { fos.close(); } } catch (IOException e) { e.printStackTrace(); } } } public static void serializeObject(File file, Object object) { FileOutputStream fos = null; try { fos = new FileOutputStream(file); ObjectOutputStream oos = new ObjectOutputStream(fos); oos.writeObject(object); oos.flush(); fos.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } public static File[] orderByDateDesc(File[] fs) { Arrays.sort(fs, new Comparator<File>() { public int compare(File f1, File f2) { long diff = f1.lastModified() - f2.lastModified(); if (diff > 0) { return -1;//-1 f1 before f2 } else if (diff == 0) { return 0; } else { return 1; } } public boolean equals(Object obj) { return true; } }); return fs; } public static void copyFile(File src, File tar) throws Exception { if (src.isFile()) { BufferedInputStream bis = null; BufferedOutputStream bos = null; try { InputStream is = new FileInputStream(src); bis = new BufferedInputStream(is); OutputStream op = new FileOutputStream(tar); bos = new BufferedOutputStream(op); byte[] bt = new byte[8192]; int len = bis.read(bt); while (len != -1) { bos.write(bt, 0, len); len = bis.read(bt); } bis.close(); bos.close(); } catch (Exception e) { e.printStackTrace(); } finally { } } else if (src.isDirectory()) { File[] files = src.listFiles(); tar.mkdir(); for (int i = 0; i < files.length; i++) { copyFile(files[i].getAbsoluteFile(), new File(tar.getAbsoluteFile() + File.separator + files[i].getName()) ); } } else { throw new FileNotFoundException(); } } public static void delFolder(String folderPath) { try { delAllFile(folderPath); String filePath = folderPath; filePath = filePath.toString(); java.io.File myFilePath = new java.io.File(filePath); myFilePath.delete(); } catch (Exception e) { e.printStackTrace(); } } private static void delAllFile(String path) { File file = new File(path); if (!file.exists()) { return; } if (!file.isDirectory()) { return; } String[] tempList = file.list(); if (tempList == null) { return; } File temp = null; for (int i = 0; i < tempList.length; i++) { if (path.endsWith(File.separator)) { temp = new File(path + tempList[i]); } else { temp = new File(path + File.separator + tempList[i]); } if (temp.isFile()) { temp.delete(); } if (temp.isDirectory()) { delAllFile(path + "/" + tempList[i]); delFolder(path + "/" + tempList[i]); } } } public static void upgradeTickerFile() { File marketDir = getMarketCache(); File file = new File(marketDir, BITSTAMP_TICKER_NAME); fileExistAndDelete(file); file = new File(marketDir, BTCE_TICKER_NAME); fileExistAndDelete(file); file = new File(marketDir, HUOBI_TICKER_NAME); fileExistAndDelete(file); file = new File(marketDir, OKCOIN_TICKER_NAME); fileExistAndDelete(file); file = new File(marketDir, CHBTC_TICKER_NAME); fileExistAndDelete(file); file = new File(marketDir, BTCCHINA_TICKER_NAME); fileExistAndDelete(file); } public static boolean fileExistAndDelete(File file) { return file.exists() && file.delete(); } public static File convertUriToFile(Activity activity, Uri uri) { File file = null; try { String[] proj = {MediaStore.Images.Media.DATA}; @SuppressWarnings("deprecation") Cursor actualimagecursor = activity.managedQuery(uri, proj, null, null, null); if (actualimagecursor != null) { int actual_image_column_index = actualimagecursor .getColumnIndexOrThrow(MediaStore.Images.Media.DATA); actualimagecursor.moveToFirst(); String img_path = actualimagecursor .getString(actual_image_column_index); if (!Utils.isEmpty(img_path)) { file = new File(img_path); } } else { file = new File(new URI(uri.toString())); if (file.exists()) { return file; } } } catch (Exception e) { } return file; } public static int getOrientationOfFile(String fileName) { int orientation = 0; try { ExifInterface exif = new ExifInterface(fileName); String orientationString = exif .getAttribute(ExifInterface.TAG_ORIENTATION); if (Utils.isNubmer(orientationString)) { int orc = Integer.valueOf(orientationString); switch (orc) { case ExifInterface.ORIENTATION_ROTATE_90: orientation = 90; break; case ExifInterface.ORIENTATION_ROTATE_180: orientation = 180; break; case ExifInterface.ORIENTATION_ROTATE_270: orientation = 270; break; default: break; } } } catch (IOException e) { e.printStackTrace(); } return orientation; } }
apache-2.0
cobbzilla/cobbzilla-utils
src/main/java/org/cobbzilla/util/jdbc/DbUrlUtil.java
495
package org.cobbzilla.util.jdbc; import java.util.regex.Matcher; import java.util.regex.Pattern; public class DbUrlUtil { public static final Pattern JDBC_URL_REGEX = Pattern.compile("^jdbc:postgresql://[\\.\\w]+:\\d+/(.+)$"); public static String setDbName(String url, String dbName) { final Matcher matcher = JDBC_URL_REGEX.matcher(url); if (!matcher.find()) return url; final String renamed = matcher.replaceFirst(dbName); return renamed; } }
apache-2.0
xiao-chen/hadoop
hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/ozShell/TestOzoneShell.java
43840
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.ozone.ozShell; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Random; import java.util.UUID; import java.util.stream.Collectors; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hdds.cli.MissingSubcommandException; import org.apache.hadoop.hdds.client.ReplicationFactor; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.ozone.MiniOzoneCluster; import org.apache.hadoop.ozone.OzoneAcl; import org.apache.hadoop.ozone.OzoneAcl.OzoneACLRights; import org.apache.hadoop.ozone.OzoneAcl.OzoneACLType; import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.client.OzoneBucket; import org.apache.hadoop.ozone.client.OzoneKey; import org.apache.hadoop.ozone.client.OzoneVolume; import org.apache.hadoop.ozone.client.VolumeArgs; import org.apache.hadoop.ozone.client.io.OzoneOutputStream; import org.apache.hadoop.ozone.client.protocol.ClientProtocol; import org.apache.hadoop.ozone.client.rest.OzoneException; import org.apache.hadoop.ozone.client.rest.RestClient; import org.apache.hadoop.ozone.client.rpc.RpcClient; import org.apache.hadoop.ozone.om.helpers.ServiceInfo; import org.apache.hadoop.ozone.web.ozShell.Shell; import org.apache.hadoop.ozone.web.request.OzoneQuota; import org.apache.hadoop.ozone.web.response.BucketInfo; import org.apache.hadoop.ozone.web.response.KeyInfo; import org.apache.hadoop.ozone.web.response.VolumeInfo; import org.apache.hadoop.ozone.web.utils.JsonUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.test.GenericTestUtils; import com.google.common.base.Strings; import org.apache.commons.lang3.RandomStringUtils; import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_REPLICATION; import static org.apache.hadoop.ozone.om.OMConfigKeys.OZONE_OM_ADDRESS_KEY; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import picocli.CommandLine; import picocli.CommandLine.ExecutionException; import picocli.CommandLine.IExceptionHandler2; import picocli.CommandLine.ParameterException; import picocli.CommandLine.ParseResult; import picocli.CommandLine.RunLast; /** * This test class specified for testing Ozone shell command. */ @RunWith(value = Parameterized.class) public class TestOzoneShell { private static final Logger LOG = LoggerFactory.getLogger(TestOzoneShell.class); /** * Set the timeout for every test. */ @Rule public Timeout testTimeout = new Timeout(300000); private static String url; private static File baseDir; private static OzoneConfiguration conf = null; private static MiniOzoneCluster cluster = null; private static ClientProtocol client = null; private static Shell shell = null; private final ByteArrayOutputStream out = new ByteArrayOutputStream(); private final ByteArrayOutputStream err = new ByteArrayOutputStream(); private static final PrintStream OLD_OUT = System.out; private static final PrintStream OLD_ERR = System.err; @Parameterized.Parameters public static Collection<Object[]> clientProtocol() { Object[][] params = new Object[][] { {RpcClient.class}, {RestClient.class}}; return Arrays.asList(params); } @Parameterized.Parameter public Class clientProtocol; /** * Create a MiniDFSCluster for testing with using distributed Ozone * handler type. * * @throws Exception */ @BeforeClass public static void init() throws Exception { conf = new OzoneConfiguration(); String path = GenericTestUtils.getTempPath( TestOzoneShell.class.getSimpleName()); baseDir = new File(path); baseDir.mkdirs(); shell = new Shell(); cluster = MiniOzoneCluster.newBuilder(conf) .setNumDatanodes(3) .build(); conf.setInt(OZONE_REPLICATION, ReplicationFactor.THREE.getValue()); conf.setQuietMode(false); client = new RpcClient(conf); cluster.waitForClusterToBeReady(); } /** * shutdown MiniDFSCluster. */ @AfterClass public static void shutdown() { if (cluster != null) { cluster.shutdown(); } if (baseDir != null) { FileUtil.fullyDelete(baseDir, true); } } @Before public void setup() { System.setOut(new PrintStream(out)); System.setErr(new PrintStream(err)); if(clientProtocol.equals(RestClient.class)) { String hostName = cluster.getOzoneManager().getHttpServer() .getHttpAddress().getHostName(); int port = cluster .getOzoneManager().getHttpServer().getHttpAddress().getPort(); url = String.format("http://" + hostName + ":" + port); } else { List<ServiceInfo> services = null; try { services = cluster.getOzoneManager().getServiceList(); } catch (IOException e) { LOG.error("Could not get service list from OM"); } String hostName = services.stream().filter( a -> a.getNodeType().equals(HddsProtos.NodeType.OM)) .collect(Collectors.toList()).get(0).getHostname(); String port = cluster.getOzoneManager().getRpcPort(); url = String.format("o3://" + hostName + ":" + port); } } @After public void reset() { // reset stream after each unit test out.reset(); err.reset(); // restore system streams System.setOut(OLD_OUT); System.setErr(OLD_ERR); } @Test public void testCreateVolume() throws Exception { LOG.info("Running testCreateVolume"); String volumeName = "volume" + RandomStringUtils.randomNumeric(5); testCreateVolume(volumeName, ""); volumeName = "volume" + RandomStringUtils.randomNumeric(5); testCreateVolume("/////" + volumeName, ""); testCreateVolume("/////", "Volume name is required"); testCreateVolume("/////vol/123", "Invalid volume name. Delimiters (/) not allowed in volume name"); } private void testCreateVolume(String volumeName, String errorMsg) throws Exception { err.reset(); String userName = "bilbo"; String[] args = new String[] {"volume", "create", url + "/" + volumeName, "--user", userName, "--root"}; if (Strings.isNullOrEmpty(errorMsg)) { execute(shell, args); } else { executeWithError(shell, args, errorMsg); return; } String truncatedVolumeName = volumeName.substring(volumeName.lastIndexOf('/') + 1); OzoneVolume volumeInfo = client.getVolumeDetails(truncatedVolumeName); assertEquals(truncatedVolumeName, volumeInfo.getName()); assertEquals(userName, volumeInfo.getOwner()); } private void execute(Shell ozoneShell, String[] args) { List<String> arguments = new ArrayList(Arrays.asList(args)); LOG.info("Executing shell command with args {}", arguments); CommandLine cmd = ozoneShell.getCmd(); IExceptionHandler2<List<Object>> exceptionHandler = new IExceptionHandler2<List<Object>>() { @Override public List<Object> handleParseException(ParameterException ex, String[] args) { throw ex; } @Override public List<Object> handleExecutionException(ExecutionException ex, ParseResult parseResult) { throw ex; } }; cmd.parseWithHandlers(new RunLast(), exceptionHandler, args); } /** * Test to create volume without specifying --user or -u. * @throws Exception */ @Test public void testCreateVolumeWithoutUser() throws Exception { String volumeName = "volume" + RandomStringUtils.randomNumeric(1); String[] args = new String[] {"volume", "create", url + "/" + volumeName, "--root"}; execute(shell, args); String truncatedVolumeName = volumeName.substring(volumeName.lastIndexOf('/') + 1); OzoneVolume volumeInfo = client.getVolumeDetails(truncatedVolumeName); assertEquals(truncatedVolumeName, volumeInfo.getName()); assertEquals(UserGroupInformation.getCurrentUser().getUserName(), volumeInfo.getOwner()); } @Test public void testDeleteVolume() throws Exception { LOG.info("Running testDeleteVolume"); String volumeName = "volume" + RandomStringUtils.randomNumeric(5); VolumeArgs volumeArgs = VolumeArgs.newBuilder() .setOwner("bilbo") .setQuota("100TB") .build(); client.createVolume(volumeName, volumeArgs); OzoneVolume volume = client.getVolumeDetails(volumeName); assertNotNull(volume); String[] args = new String[] {"volume", "delete", url + "/" + volumeName}; execute(shell, args); String output = out.toString(); assertTrue(output.contains("Volume " + volumeName + " is deleted")); // verify if volume has been deleted try { client.getVolumeDetails(volumeName); fail("Get volume call should have thrown."); } catch (IOException e) { GenericTestUtils.assertExceptionContains( "Info Volume failed, error:VOLUME_NOT_FOUND", e); } volumeName = "volume" + RandomStringUtils.randomNumeric(5); volumeArgs = VolumeArgs.newBuilder() .setOwner("bilbo") .setQuota("100TB") .build(); client.createVolume(volumeName, volumeArgs); volume = client.getVolumeDetails(volumeName); assertNotNull(volume); //volumeName prefixed with / String volumeNameWithSlashPrefix = "/" + volumeName; args = new String[] {"volume", "delete", url + "/" + volumeNameWithSlashPrefix}; execute(shell, args); output = out.toString(); assertTrue(output.contains("Volume " + volumeName + " is deleted")); // verify if volume has been deleted try { client.getVolumeDetails(volumeName); fail("Get volume call should have thrown."); } catch (IOException e) { GenericTestUtils.assertExceptionContains( "Info Volume failed, error:VOLUME_NOT_FOUND", e); } } @Test public void testInfoVolume() throws Exception { LOG.info("Running testInfoVolume"); String volumeName = "volume" + RandomStringUtils.randomNumeric(5); VolumeArgs volumeArgs = VolumeArgs.newBuilder() .setOwner("bilbo") .setQuota("100TB") .build(); client.createVolume(volumeName, volumeArgs); //volumeName supplied as-is String[] args = new String[] {"volume", "info", url + "/" + volumeName}; execute(shell, args); String output = out.toString(); assertTrue(output.contains(volumeName)); assertTrue(output.contains("createdOn") && output.contains(OzoneConsts.OZONE_TIME_ZONE)); //volumeName prefixed with / String volumeNameWithSlashPrefix = "/" + volumeName; args = new String[] {"volume", "info", url + "/" + volumeNameWithSlashPrefix}; execute(shell, args); output = out.toString(); assertTrue(output.contains(volumeName)); assertTrue(output.contains("createdOn") && output.contains(OzoneConsts.OZONE_TIME_ZONE)); // test infoVolume with invalid volume name args = new String[] {"volume", "info", url + "/" + volumeName + "/invalid-name"}; executeWithError(shell, args, "Invalid volume name. " + "Delimiters (/) not allowed in volume name"); // get info for non-exist volume args = new String[] {"volume", "info", url + "/invalid-volume"}; executeWithError(shell, args, "VOLUME_NOT_FOUND"); } @Test public void testShellIncompleteCommand() throws Exception { LOG.info("Running testShellIncompleteCommand"); String expectedError = "Incomplete command"; String[] args = new String[] {}; //executing 'ozone sh' executeWithError(shell, args, expectedError, "Usage: ozone sh [-hV] [--verbose] [-D=<String=String>]..." + " [COMMAND]"); args = new String[] {"volume"}; //executing 'ozone sh volume' executeWithError(shell, args, expectedError, "Usage: ozone sh volume [-hV] [COMMAND]"); args = new String[] {"bucket"}; //executing 'ozone sh bucket' executeWithError(shell, args, expectedError, "Usage: ozone sh bucket [-hV] [COMMAND]"); args = new String[] {"key"}; //executing 'ozone sh key' executeWithError(shell, args, expectedError, "Usage: ozone sh key [-hV] [COMMAND]"); } @Test public void testUpdateVolume() throws Exception { LOG.info("Running testUpdateVolume"); String volumeName = "volume" + RandomStringUtils.randomNumeric(5); String userName = "bilbo"; VolumeArgs volumeArgs = VolumeArgs.newBuilder() .setOwner("bilbo") .setQuota("100TB") .build(); client.createVolume(volumeName, volumeArgs); OzoneVolume vol = client.getVolumeDetails(volumeName); assertEquals(userName, vol.getOwner()); assertEquals(OzoneQuota.parseQuota("100TB").sizeInBytes(), vol.getQuota()); String[] args = new String[] {"volume", "update", url + "/" + volumeName, "--quota", "500MB"}; execute(shell, args); vol = client.getVolumeDetails(volumeName); assertEquals(userName, vol.getOwner()); assertEquals(OzoneQuota.parseQuota("500MB").sizeInBytes(), vol.getQuota()); String newUser = "new-user"; args = new String[] {"volume", "update", url + "/" + volumeName, "--user", newUser}; execute(shell, args); vol = client.getVolumeDetails(volumeName); assertEquals(newUser, vol.getOwner()); //volume with / prefix String volumeWithPrefix = "/" + volumeName; String newUser2 = "new-user2"; args = new String[] {"volume", "update", url + "/" + volumeWithPrefix, "--user", newUser2}; execute(shell, args); vol = client.getVolumeDetails(volumeName); assertEquals(newUser2, vol.getOwner()); // test error conditions args = new String[] {"volume", "update", url + "/invalid-volume", "--user", newUser}; executeWithError(shell, args, "Info Volume failed, error:VOLUME_NOT_FOUND"); err.reset(); args = new String[] {"volume", "update", url + "/invalid-volume", "--quota", "500MB"}; executeWithError(shell, args, "Info Volume failed, error:VOLUME_NOT_FOUND"); } /** * Execute command, assert exeception message and returns true if error * was thrown. */ private void executeWithError(Shell ozoneShell, String[] args, String expectedError) { if (Strings.isNullOrEmpty(expectedError)) { execute(ozoneShell, args); } else { try { execute(ozoneShell, args); fail("Exception is expected from command execution " + Arrays .asList(args)); } catch (Exception ex) { if (!Strings.isNullOrEmpty(expectedError)) { Throwable exceptionToCheck = ex; if (exceptionToCheck.getCause() != null) { exceptionToCheck = exceptionToCheck.getCause(); } Assert.assertTrue( String.format( "Error of shell code doesn't contain the " + "exception [%s] in [%s]", expectedError, exceptionToCheck.getMessage()), exceptionToCheck.getMessage().contains(expectedError)); } } } } /** * Execute command, assert exception message and returns true if error * was thrown and contains the specified usage string. */ private void executeWithError(Shell ozoneShell, String[] args, String expectedError, String usage) { if (Strings.isNullOrEmpty(expectedError)) { execute(ozoneShell, args); } else { try { execute(ozoneShell, args); fail("Exception is expected from command execution " + Arrays .asList(args)); } catch (Exception ex) { if (!Strings.isNullOrEmpty(expectedError)) { Throwable exceptionToCheck = ex; if (exceptionToCheck.getCause() != null) { exceptionToCheck = exceptionToCheck.getCause(); } Assert.assertTrue( String.format( "Error of shell code doesn't contain the " + "exception [%s] in [%s]", expectedError, exceptionToCheck.getMessage()), exceptionToCheck.getMessage().contains(expectedError)); Assert.assertTrue( exceptionToCheck instanceof MissingSubcommandException); Assert.assertTrue( ((MissingSubcommandException)exceptionToCheck) .getUsage().contains(usage)); } } } } @Test public void testListVolume() throws Exception { LOG.info("Running testListVolume"); String protocol = clientProtocol.getName().toLowerCase(); String commandOutput, commandError; List<VolumeInfo> volumes; final int volCount = 20; final String user1 = "test-user-a-" + protocol; final String user2 = "test-user-b-" + protocol; // Create 20 volumes, 10 for user1 and another 10 for user2. for (int x = 0; x < volCount; x++) { String volumeName; String userName; if (x % 2 == 0) { // create volume [test-vol0, test-vol2, ..., test-vol18] for user1 userName = user1; volumeName = "test-vol-" + protocol + x; } else { // create volume [test-vol1, test-vol3, ..., test-vol19] for user2 userName = user2; volumeName = "test-vol-" + protocol + x; } VolumeArgs volumeArgs = VolumeArgs.newBuilder() .setOwner(userName) .setQuota("100TB") .build(); client.createVolume(volumeName, volumeArgs); OzoneVolume vol = client.getVolumeDetails(volumeName); assertNotNull(vol); } String[] args = new String[] {"volume", "list", url + "/abcde", "--user", user1, "--length", "100"}; executeWithError(shell, args, "Invalid URI"); err.reset(); // test -length option args = new String[] {"volume", "list", url + "/", "--user", user1, "--length", "100"}; execute(shell, args); commandOutput = out.toString(); volumes = (List<VolumeInfo>) JsonUtils .toJsonList(commandOutput, VolumeInfo.class); assertEquals(10, volumes.size()); for (VolumeInfo volume : volumes) { assertEquals(volume.getOwner().getName(), user1); assertTrue(volume.getCreatedOn().contains(OzoneConsts.OZONE_TIME_ZONE)); } out.reset(); args = new String[] {"volume", "list", url + "/", "--user", user1, "--length", "2"}; execute(shell, args); commandOutput = out.toString(); volumes = (List<VolumeInfo>) JsonUtils .toJsonList(commandOutput, VolumeInfo.class); assertEquals(2, volumes.size()); // test --prefix option out.reset(); args = new String[] {"volume", "list", url + "/", "--user", user1, "--length", "100", "--prefix", "test-vol-" + protocol + "1"}; execute(shell, args); commandOutput = out.toString(); volumes = (List<VolumeInfo>) JsonUtils .toJsonList(commandOutput, VolumeInfo.class); assertEquals(5, volumes.size()); // return volume names should be [test-vol10, test-vol12, ..., test-vol18] for (int i = 0; i < volumes.size(); i++) { assertEquals(volumes.get(i).getVolumeName(), "test-vol-" + protocol + ((i + 5) * 2)); assertEquals(volumes.get(i).getOwner().getName(), user1); } // test -start option out.reset(); args = new String[] {"volume", "list", url + "/", "--user", user2, "--length", "100", "--start", "test-vol-" + protocol + "15"}; execute(shell, args); commandOutput = out.toString(); volumes = (List<VolumeInfo>) JsonUtils .toJsonList(commandOutput, VolumeInfo.class); assertEquals(2, volumes.size()); assertEquals(volumes.get(0).getVolumeName(), "test-vol-" + protocol + "17"); assertEquals(volumes.get(1).getVolumeName(), "test-vol-" + protocol + "19"); assertEquals(volumes.get(0).getOwner().getName(), user2); assertEquals(volumes.get(1).getOwner().getName(), user2); // test error conditions err.reset(); args = new String[] {"volume", "list", url + "/", "--user", user2, "--length", "-1"}; executeWithError(shell, args, "the length should be a positive number"); err.reset(); args = new String[] {"volume", "list", url + "/", "--user", user2, "--length", "invalid-length"}; executeWithError(shell, args, "For input string: \"invalid-length\""); } @Test public void testCreateBucket() throws Exception { LOG.info("Running testCreateBucket"); OzoneVolume vol = creatVolume(); String bucketName = "bucket" + RandomStringUtils.randomNumeric(5); String[] args = new String[] {"bucket", "create", url + "/" + vol.getName() + "/" + bucketName}; execute(shell, args); OzoneBucket bucketInfo = vol.getBucket(bucketName); assertEquals(vol.getName(), bucketInfo.getVolumeName()); assertEquals(bucketName, bucketInfo.getName()); // test create a bucket in a non-exist volume args = new String[] {"bucket", "create", url + "/invalid-volume/" + bucketName}; executeWithError(shell, args, "Info Volume failed, error:VOLUME_NOT_FOUND"); // test createBucket with invalid bucket name args = new String[] {"bucket", "create", url + "/" + vol.getName() + "/" + bucketName + "/invalid-name"}; executeWithError(shell, args, "Invalid bucket name. Delimiters (/) not allowed in bucket name"); } @Test public void testDeleteBucket() throws Exception { LOG.info("Running testDeleteBucket"); OzoneVolume vol = creatVolume(); String bucketName = "bucket" + RandomStringUtils.randomNumeric(5); vol.createBucket(bucketName); OzoneBucket bucketInfo = vol.getBucket(bucketName); assertNotNull(bucketInfo); String[] args = new String[] {"bucket", "delete", url + "/" + vol.getName() + "/" + bucketName}; execute(shell, args); // verify if bucket has been deleted in volume try { vol.getBucket(bucketName); fail("Get bucket should have thrown."); } catch (IOException e) { GenericTestUtils.assertExceptionContains( "Info Bucket failed, error: BUCKET_NOT_FOUND", e); } // test delete bucket in a non-exist volume args = new String[] {"bucket", "delete", url + "/invalid-volume" + "/" + bucketName}; executeWithError(shell, args, "Info Volume failed, error:VOLUME_NOT_FOUND"); err.reset(); // test delete non-exist bucket args = new String[] {"bucket", "delete", url + "/" + vol.getName() + "/invalid-bucket"}; executeWithError(shell, args, "Delete Bucket failed, error:BUCKET_NOT_FOUND"); } @Test public void testInfoBucket() throws Exception { LOG.info("Running testInfoBucket"); OzoneVolume vol = creatVolume(); String bucketName = "bucket" + RandomStringUtils.randomNumeric(5); vol.createBucket(bucketName); String[] args = new String[] {"bucket", "info", url + "/" + vol.getName() + "/" + bucketName}; execute(shell, args); String output = out.toString(); assertTrue(output.contains(bucketName)); assertTrue(output.contains("createdOn") && output.contains(OzoneConsts.OZONE_TIME_ZONE)); // test infoBucket with invalid bucket name args = new String[] {"bucket", "info", url + "/" + vol.getName() + "/" + bucketName + "/invalid-name"}; executeWithError(shell, args, "Invalid bucket name. Delimiters (/) not allowed in bucket name"); // test get info from a non-exist bucket args = new String[] {"bucket", "info", url + "/" + vol.getName() + "/invalid-bucket" + bucketName}; executeWithError(shell, args, "Info Bucket failed, error: BUCKET_NOT_FOUND"); } @Test public void testUpdateBucket() throws Exception { LOG.info("Running testUpdateBucket"); OzoneVolume vol = creatVolume(); String bucketName = "bucket" + RandomStringUtils.randomNumeric(5); vol.createBucket(bucketName); OzoneBucket bucket = vol.getBucket(bucketName); int aclSize = bucket.getAcls().size(); String[] args = new String[] {"bucket", "update", url + "/" + vol.getName() + "/" + bucketName, "--addAcl", "user:frodo:rw,group:samwise:r"}; execute(shell, args); String output = out.toString(); assertTrue(output.contains("createdOn") && output.contains(OzoneConsts.OZONE_TIME_ZONE)); bucket = vol.getBucket(bucketName); assertEquals(2 + aclSize, bucket.getAcls().size()); OzoneAcl acl = bucket.getAcls().get(aclSize); assertTrue(acl.getName().equals("frodo") && acl.getType() == OzoneACLType.USER && acl.getRights()== OzoneACLRights.READ_WRITE); args = new String[] {"bucket", "update", url + "/" + vol.getName() + "/" + bucketName, "--removeAcl", "user:frodo:rw"}; execute(shell, args); bucket = vol.getBucket(bucketName); acl = bucket.getAcls().get(aclSize); assertEquals(1 + aclSize, bucket.getAcls().size()); assertTrue(acl.getName().equals("samwise") && acl.getType() == OzoneACLType.GROUP && acl.getRights()== OzoneACLRights.READ); // test update bucket for a non-exist bucket args = new String[] {"bucket", "update", url + "/" + vol.getName() + "/invalid-bucket", "--addAcl", "user:frodo:rw"}; executeWithError(shell, args, "Info Bucket failed, error: BUCKET_NOT_FOUND"); } @Test public void testListBucket() throws Exception { LOG.info("Running testListBucket"); List<BucketInfo> buckets; String commandOutput; int bucketCount = 11; OzoneVolume vol = creatVolume(); List<String> bucketNames = new ArrayList<>(); // create bucket from test-bucket0 to test-bucket10 for (int i = 0; i < bucketCount; i++) { String name = "test-bucket" + i; bucketNames.add(name); vol.createBucket(name); OzoneBucket bucket = vol.getBucket(name); assertNotNull(bucket); } // test listBucket with invalid volume name String[] args = new String[] {"bucket", "list", url + "/" + vol.getName() + "/invalid-name"}; executeWithError(shell, args, "Invalid volume name. " + "Delimiters (/) not allowed in volume name"); // test -length option args = new String[] {"bucket", "list", url + "/" + vol.getName(), "--length", "100"}; execute(shell, args); commandOutput = out.toString(); buckets = (List<BucketInfo>) JsonUtils.toJsonList(commandOutput, BucketInfo.class); assertEquals(11, buckets.size()); // sort bucket names since the return buckets isn't in created order Collections.sort(bucketNames); // return bucket names should be [test-bucket0, test-bucket1, // test-bucket10, test-bucket2, ,..., test-bucket9] for (int i = 0; i < buckets.size(); i++) { assertEquals(buckets.get(i).getBucketName(), bucketNames.get(i)); assertEquals(buckets.get(i).getVolumeName(), vol.getName()); assertTrue(buckets.get(i).getCreatedOn() .contains(OzoneConsts.OZONE_TIME_ZONE)); } out.reset(); args = new String[] {"bucket", "list", url + "/" + vol.getName(), "--length", "3"}; execute(shell, args); commandOutput = out.toString(); buckets = (List<BucketInfo>) JsonUtils.toJsonList(commandOutput, BucketInfo.class); assertEquals(3, buckets.size()); // return bucket names should be [test-bucket0, // test-bucket1, test-bucket10] assertEquals(buckets.get(0).getBucketName(), "test-bucket0"); assertEquals(buckets.get(1).getBucketName(), "test-bucket1"); assertEquals(buckets.get(2).getBucketName(), "test-bucket10"); // test --prefix option out.reset(); args = new String[] {"bucket", "list", url + "/" + vol.getName(), "--length", "100", "--prefix", "test-bucket1"}; execute(shell, args); commandOutput = out.toString(); buckets = (List<BucketInfo>) JsonUtils.toJsonList(commandOutput, BucketInfo.class); assertEquals(2, buckets.size()); // return bucket names should be [test-bucket1, test-bucket10] assertEquals(buckets.get(0).getBucketName(), "test-bucket1"); assertEquals(buckets.get(1).getBucketName(), "test-bucket10"); // test -start option out.reset(); args = new String[] {"bucket", "list", url + "/" + vol.getName(), "--length", "100", "--start", "test-bucket7"}; execute(shell, args); commandOutput = out.toString(); buckets = (List<BucketInfo>) JsonUtils.toJsonList(commandOutput, BucketInfo.class); assertEquals(2, buckets.size()); assertEquals(buckets.get(0).getBucketName(), "test-bucket8"); assertEquals(buckets.get(1).getBucketName(), "test-bucket9"); // test error conditions err.reset(); args = new String[] {"bucket", "list", url + "/" + vol.getName(), "--length", "-1"}; executeWithError(shell, args, "the length should be a positive number"); } @Test public void testPutKey() throws Exception { LOG.info("Running testPutKey"); OzoneBucket bucket = creatBucket(); String volumeName = bucket.getVolumeName(); String bucketName = bucket.getName(); String keyName = "key" + RandomStringUtils.randomNumeric(5); String[] args = new String[] {"key", "put", url + "/" + volumeName + "/" + bucketName + "/" + keyName, createTmpFile()}; execute(shell, args); OzoneKey keyInfo = bucket.getKey(keyName); assertEquals(keyName, keyInfo.getName()); // test put key in a non-exist bucket args = new String[] {"key", "put", url + "/" + volumeName + "/invalid-bucket/" + keyName, createTmpFile()}; executeWithError(shell, args, "Info Bucket failed, error: BUCKET_NOT_FOUND"); } @Test public void testGetKey() throws Exception { LOG.info("Running testGetKey"); String keyName = "key" + RandomStringUtils.randomNumeric(5); OzoneBucket bucket = creatBucket(); String volumeName = bucket.getVolumeName(); String bucketName = bucket.getName(); String dataStr = "test-data"; OzoneOutputStream keyOutputStream = bucket.createKey(keyName, dataStr.length()); keyOutputStream.write(dataStr.getBytes()); keyOutputStream.close(); String tmpPath = baseDir.getAbsolutePath() + "/testfile-" + UUID.randomUUID().toString(); String[] args = new String[] {"key", "get", url + "/" + volumeName + "/" + bucketName + "/" + keyName, tmpPath}; execute(shell, args); byte[] dataBytes = new byte[dataStr.length()]; try (FileInputStream randFile = new FileInputStream(new File(tmpPath))) { randFile.read(dataBytes); } assertEquals(dataStr, DFSUtil.bytes2String(dataBytes)); tmpPath = baseDir.getAbsolutePath() + File.separatorChar + keyName; args = new String[] {"key", "get", url + "/" + volumeName + "/" + bucketName + "/" + keyName, baseDir.getAbsolutePath()}; execute(shell, args); dataBytes = new byte[dataStr.length()]; try (FileInputStream randFile = new FileInputStream(new File(tmpPath))) { randFile.read(dataBytes); } assertEquals(dataStr, DFSUtil.bytes2String(dataBytes)); } @Test public void testDeleteKey() throws Exception { LOG.info("Running testDeleteKey"); String keyName = "key" + RandomStringUtils.randomNumeric(5); OzoneBucket bucket = creatBucket(); String volumeName = bucket.getVolumeName(); String bucketName = bucket.getName(); String dataStr = "test-data"; OzoneOutputStream keyOutputStream = bucket.createKey(keyName, dataStr.length()); keyOutputStream.write(dataStr.getBytes()); keyOutputStream.close(); OzoneKey keyInfo = bucket.getKey(keyName); assertEquals(keyName, keyInfo.getName()); String[] args = new String[] {"key", "delete", url + "/" + volumeName + "/" + bucketName + "/" + keyName}; execute(shell, args); // verify if key has been deleted in the bucket try { bucket.getKey(keyName); fail("Get key should have thrown."); } catch (IOException e) { GenericTestUtils.assertExceptionContains( "Lookup key failed, error:KEY_NOT_FOUND", e); } // test delete key in a non-exist bucket args = new String[] {"key", "delete", url + "/" + volumeName + "/invalid-bucket/" + keyName}; executeWithError(shell, args, "Info Bucket failed, error: BUCKET_NOT_FOUND"); err.reset(); // test delete a non-exist key in bucket args = new String[] {"key", "delete", url + "/" + volumeName + "/" + bucketName + "/invalid-key"}; executeWithError(shell, args, "Delete key failed, error:KEY_NOT_FOUND"); } @Test public void testInfoKeyDetails() throws Exception { LOG.info("Running testInfoKey"); String keyName = "key" + RandomStringUtils.randomNumeric(5); OzoneBucket bucket = creatBucket(); String volumeName = bucket.getVolumeName(); String bucketName = bucket.getName(); String dataStr = "test-data"; OzoneOutputStream keyOutputStream = bucket.createKey(keyName, dataStr.length()); keyOutputStream.write(dataStr.getBytes()); keyOutputStream.close(); String[] args = new String[] {"key", "info", url + "/" + volumeName + "/" + bucketName + "/" + keyName}; // verify the response output execute(shell, args); String output = out.toString(); assertTrue(output.contains(keyName)); assertTrue( output.contains("createdOn") && output.contains("modifiedOn") && output .contains(OzoneConsts.OZONE_TIME_ZONE)); assertTrue( output.contains("containerID") && output.contains("localID") && output .contains("length") && output.contains("offset")); // reset stream out.reset(); err.reset(); // get the info of a non-exist key args = new String[] {"key", "info", url + "/" + volumeName + "/" + bucketName + "/invalid-key"}; // verify the response output // get the non-exist key info should be failed executeWithError(shell, args, "Lookup key failed, error:KEY_NOT_FOUND"); } @Test public void testInfoDirKey() throws Exception { LOG.info("Running testInfoKey for Dir Key"); String dirKeyName = "test/"; String keyNameOnly = "test"; OzoneBucket bucket = creatBucket(); String volumeName = bucket.getVolumeName(); String bucketName = bucket.getName(); String dataStr = "test-data"; OzoneOutputStream keyOutputStream = bucket.createKey(dirKeyName, dataStr.length()); keyOutputStream.write(dataStr.getBytes()); keyOutputStream.close(); String[] args = new String[] {"key", "info", url + "/" + volumeName + "/" + bucketName + "/" + dirKeyName}; // verify the response output execute(shell, args); String output = out.toString(); assertTrue(output.contains(dirKeyName)); assertTrue(output.contains("createdOn") && output.contains("modifiedOn") && output.contains(OzoneConsts.OZONE_TIME_ZONE)); args = new String[] {"key", "info", url + "/" + volumeName + "/" + bucketName + "/" + keyNameOnly}; executeWithError(shell, args, "Lookup key failed, error:KEY_NOT_FOUND"); out.reset(); err.reset(); } @Test public void testListKey() throws Exception { LOG.info("Running testListKey"); String commandOutput; List<KeyInfo> keys; int keyCount = 11; OzoneBucket bucket = creatBucket(); String volumeName = bucket.getVolumeName(); String bucketName = bucket.getName(); String keyName; List<String> keyNames = new ArrayList<>(); for (int i = 0; i < keyCount; i++) { keyName = "test-key" + i; keyNames.add(keyName); String dataStr = "test-data"; OzoneOutputStream keyOutputStream = bucket.createKey(keyName, dataStr.length()); keyOutputStream.write(dataStr.getBytes()); keyOutputStream.close(); } // test listKey with invalid bucket name String[] args = new String[] {"key", "list", url + "/" + volumeName + "/" + bucketName + "/invalid-name"}; executeWithError(shell, args, "Invalid bucket name. " + "Delimiters (/) not allowed in bucket name"); // test -length option args = new String[] {"key", "list", url + "/" + volumeName + "/" + bucketName, "--length", "100"}; execute(shell, args); commandOutput = out.toString(); keys = (List<KeyInfo>) JsonUtils.toJsonList(commandOutput, KeyInfo.class); assertEquals(11, keys.size()); // sort key names since the return keys isn't in created order Collections.sort(keyNames); // return key names should be [test-key0, test-key1, // test-key10, test-key2, ,..., test-key9] for (int i = 0; i < keys.size(); i++) { assertEquals(keys.get(i).getKeyName(), keyNames.get(i)); // verify the creation/modification time of key assertTrue(keys.get(i).getCreatedOn() .contains(OzoneConsts.OZONE_TIME_ZONE)); assertTrue(keys.get(i).getModifiedOn() .contains(OzoneConsts.OZONE_TIME_ZONE)); } out.reset(); args = new String[] {"key", "list", url + "/" + volumeName + "/" + bucketName, "--length", "3"}; execute(shell, args); commandOutput = out.toString(); keys = (List<KeyInfo>) JsonUtils.toJsonList(commandOutput, KeyInfo.class); assertEquals(3, keys.size()); // return key names should be [test-key0, test-key1, test-key10] assertEquals(keys.get(0).getKeyName(), "test-key0"); assertEquals(keys.get(1).getKeyName(), "test-key1"); assertEquals(keys.get(2).getKeyName(), "test-key10"); // test --prefix option out.reset(); args = new String[] {"key", "list", url + "/" + volumeName + "/" + bucketName, "--length", "100", "--prefix", "test-key1"}; execute(shell, args); commandOutput = out.toString(); keys = (List<KeyInfo>) JsonUtils.toJsonList(commandOutput, KeyInfo.class); assertEquals(2, keys.size()); // return key names should be [test-key1, test-key10] assertEquals(keys.get(0).getKeyName(), "test-key1"); assertEquals(keys.get(1).getKeyName(), "test-key10"); // test -start option out.reset(); args = new String[] {"key", "list", url + "/" + volumeName + "/" + bucketName, "--length", "100", "--start", "test-key7"}; execute(shell, args); commandOutput = out.toString(); keys = (List<KeyInfo>) JsonUtils.toJsonList(commandOutput, KeyInfo.class); assertEquals(keys.get(0).getKeyName(), "test-key8"); assertEquals(keys.get(1).getKeyName(), "test-key9"); // test error conditions err.reset(); args = new String[] {"key", "list", url + "/" + volumeName + "/" + bucketName, "--length", "-1"}; executeWithError(shell, args, "the length should be a positive number"); } @Test public void testS3BucketMapping() throws IOException { List<ServiceInfo> services = cluster.getOzoneManager().getServiceList(); String omHostName = services.stream().filter( a -> a.getNodeType().equals(HddsProtos.NodeType.OM)) .collect(Collectors.toList()).get(0).getHostname(); String omPort = cluster.getOzoneManager().getRpcPort(); String setOmAddress = "--set=" + OZONE_OM_ADDRESS_KEY + "=" + omHostName + ":" + omPort; String s3Bucket = "bucket1"; String commandOutput; createS3Bucket("ozone", s3Bucket); //WHEN String[] args = new String[] {setOmAddress, "bucket", "path", s3Bucket}; execute(shell, args); //THEN commandOutput = out.toString(); String volumeName = client.getOzoneVolumeName(s3Bucket); assertTrue(commandOutput.contains("Volume name for S3Bucket is : " + volumeName)); assertTrue(commandOutput.contains(OzoneConsts.OZONE_URI_SCHEME + "://" + s3Bucket + "." + volumeName)); out.reset(); //Trying to get map for an unknown bucket args = new String[] {setOmAddress, "bucket", "path", "unknownbucket"}; executeWithError(shell, args, "S3_BUCKET_NOT_FOUND"); // No bucket name args = new String[] {setOmAddress, "bucket", "path"}; executeWithError(shell, args, "Missing required parameter"); // Invalid bucket name args = new String[] {setOmAddress, "bucket", "path", "/asd/multipleslash"}; executeWithError(shell, args, "S3_BUCKET_NOT_FOUND"); } private void createS3Bucket(String userName, String s3Bucket) { try { client.createS3Bucket("ozone", s3Bucket); } catch (IOException ex) { GenericTestUtils.assertExceptionContains("S3_BUCKET_ALREADY_EXISTS", ex); } } private OzoneVolume creatVolume() throws OzoneException, IOException { String volumeName = RandomStringUtils.randomNumeric(5) + "volume"; VolumeArgs volumeArgs = VolumeArgs.newBuilder() .setOwner("bilbo") .setQuota("100TB") .build(); try { client.createVolume(volumeName, volumeArgs); } catch (Exception ex) { Assert.assertEquals("PartialGroupNameException", ex.getCause().getClass().getSimpleName()); } OzoneVolume volume = client.getVolumeDetails(volumeName); return volume; } private OzoneBucket creatBucket() throws OzoneException, IOException { OzoneVolume vol = creatVolume(); String bucketName = RandomStringUtils.randomNumeric(5) + "bucket"; vol.createBucket(bucketName); OzoneBucket bucketInfo = vol.getBucket(bucketName); return bucketInfo; } /** * Create a temporary file used for putting key. * @return the created file's path string * @throws Exception */ private String createTmpFile() throws Exception { // write a new file that used for putting key File tmpFile = new File(baseDir, "/testfile-" + UUID.randomUUID().toString()); FileOutputStream randFile = new FileOutputStream(tmpFile); Random r = new Random(); for (int x = 0; x < 10; x++) { char c = (char) (r.nextInt(26) + 'a'); randFile.write(c); } randFile.close(); return tmpFile.getAbsolutePath(); } }
apache-2.0
adaptris/interlok
interlok-core/src/test/java/com/adaptris/core/services/jdbc/JdbcMapInsertCase.java
4375
/* * Copyright 2015 Adaptris Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.adaptris.core.services.jdbc; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.Statement; import org.junit.Test; import com.adaptris.core.CoreException; import com.adaptris.core.jdbc.JdbcConnection; import com.adaptris.core.util.JdbcUtil; import com.adaptris.core.util.LifecycleHelper; import com.adaptris.util.KeyValuePair; import com.adaptris.util.KeyValuePairSet; public abstract class JdbcMapInsertCase { protected static final String CONTENT = "firstname=alice\n" + "lastname=smith\n" + "dob=2017-01-01"; protected static final String INVALID_COLUMN = "fi$rstname=alice\n" + "la$stname=smith\n" + "dob=2017-01-01"; protected static final String JDBC_DRIVER = "org.apache.derby.jdbc.EmbeddedDriver"; protected static final String JDBC_URL = "jdbc:derby:memory:JDCB_OBJ_DB;create=true"; protected static final String TABLE_NAME = "people"; protected static final String DROP_STMT = String.format("DROP TABLE %s", TABLE_NAME); protected static final String CREATE_STMT = String.format("CREATE TABLE %s (firstname VARCHAR(128) NOT NULL, " + "lastname VARCHAR(128) NOT NULL, " + "dob DATE)", TABLE_NAME); protected static final String CREATE_QUOTED = String.format( "CREATE TABLE %s (\"firstname\" VARCHAR(128) NOT NULL, \"lastname\" VARCHAR(128) NOT NULL, \"dob\" DATE)", TABLE_NAME); @Test public void testService_Init() throws Exception { JdbcMapInsert service = createService(); try { LifecycleHelper.init(service); fail(); } catch (CoreException expected) { } service.setTable("hello"); LifecycleHelper.init(service); } protected abstract JdbcMapInsert createService(); protected static void doAssert(int expectedCount) throws Exception { Connection c = null; PreparedStatement p = null; try { c = createConnection(); p = c.prepareStatement(String.format("SELECT * FROM %s", TABLE_NAME)); ResultSet rs = p.executeQuery(); int count = 0; while (rs.next()) { count++; assertEquals("smith", rs.getString("lastname")); } assertEquals(expectedCount, count); JdbcUtil.closeQuietly(rs); } finally { JdbcUtil.closeQuietly(p); JdbcUtil.closeQuietly(c); } } protected static Connection createConnection() throws Exception { Connection c = null; Class.forName(JDBC_DRIVER); c = DriverManager.getConnection(JDBC_URL); c.setAutoCommit(true); return c; } protected static void createDatabase() throws Exception { createDatabase(CREATE_STMT); } protected static void createDatabase(String createStmt) throws Exception { Connection c = null; Statement s = null; try { c = createConnection(); s = c.createStatement(); executeQuietly(s, DROP_STMT); s.execute(createStmt); } finally { JdbcUtil.closeQuietly(s); JdbcUtil.closeQuietly(c); } } protected static void executeQuietly(Statement s, String sql) { try { s.execute(sql); } catch (Exception e) { ; } } protected static <T extends JdbcMapInsert> T configureForTests(T t) { JdbcMapInsert service = t; JdbcConnection connection = new JdbcConnection(); connection.setConnectUrl(JDBC_URL); connection.setDriverImp(JDBC_DRIVER); service.setConnection(connection); KeyValuePairSet mappings = new KeyValuePairSet(); mappings.add(new KeyValuePair("dob", JdbcMapInsert.BasicType.Date.name())); service.withTable(TABLE_NAME).withMappings(mappings); return t; } }
apache-2.0
icza/scelight
src-app/hu/scelight/gui/page/replist/column/impl/DateColumn.java
896
/* * Project Scelight * * Copyright (c) 2013 Andras Belicza <iczaaa@gmail.com> * * This software is the property of Andras Belicza. * Copying, modifying, distributing, refactoring without the author's permission * is prohibited and protected by Law. */ package hu.scelight.gui.page.replist.column.impl; import hu.scelight.gui.icon.Icons; import hu.scelight.gui.page.replist.column.BaseColumn; import hu.scelight.sc2.rep.repproc.RepProcessor; import java.util.Date; /** * Replay date column. * * @author Andras Belicza */ public class DateColumn extends BaseColumn< Date > { /** * Creates a new {@link DateColumn}. */ public DateColumn() { super( "Date", Icons.F_CALENDAR_BLUE, "Replay date", Date.class, true ); } @Override public Date getData( final RepProcessor repProc ) { return repProc.replay.details.getTime(); } }
apache-2.0
perka/flatpack-java
core/src/test/java/com/getperka/flatpack/codex/CollectionCodexTest.java
4155
/* * #%L * FlatPack serialization code * %% * Copyright (C) 2012 Perka Inc. * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package com.getperka.flatpack.codex; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import javax.inject.Inject; import org.junit.Test; import com.getperka.flatpack.FlatPackTest; import com.getperka.flatpack.HasUuid; import com.getperka.flatpack.codexes.ArrayCodex; import com.getperka.flatpack.codexes.ListCodex; import com.getperka.flatpack.codexes.SetCodex; import com.getperka.flatpack.domain.Employee; import com.getperka.flatpack.domain.Person; import com.getperka.flatpack.util.FlatPackCollections; import com.google.inject.TypeLiteral; /** * Tests serializing collections of things. */ public class CollectionCodexTest extends FlatPackTest { @Inject private TypeLiteral<ArrayCodex<Person>> arrayPerson; @Inject private TypeLiteral<ArrayCodex<String>> arrayString; @Inject private TypeLiteral<ListCodex<Person>> listPerson; @Inject private TypeLiteral<ListCodex<String>> listString; @Inject private TypeLiteral<SetCodex<String>> setString; @Inject private Employee employee; @Test public void testArray() { String[] in = { "Hello", " ", "", null, "World!" }; String[] out = testCodex(arrayString, in); assertArrayEquals(in, out); Set<HasUuid> scanned = FlatPackCollections.setForIteration(); Employee[] in2 = { employee, null, employee }; Person[] out2 = testCodex(arrayPerson, in2, scanned); assertEquals(Collections.singleton(employee), scanned); /* * Because we're testing without a full flatpack structure, all we can expect is that a HasUuid * is created with the same UUID. The concrete type would normally be specified in the data * section, however it is missing, so we expect the configured type of the codex instead. */ Person p = out2[0]; assertNotNull(p); assertEquals(Person.class, p.getClass()); assertEquals(employee.getUuid(), p.getUuid()); } @Test public void testList() { List<String> in = Arrays.asList("Hello", " ", "", null, "World!"); Collection<String> out = testCodex(listString, in); assertEquals(in, out); Set<HasUuid> scanned = FlatPackCollections.setForIteration(); List<Person> in2 = Arrays.<Person> asList(employee, null, employee); Collection<Person> out2 = testCodex(listPerson, in2, scanned); assertEquals(Collections.singleton(employee), scanned); /* * Because we're testing without a full flatpack structure, all we can expect is that a HasUuid * is created with the same UUID. The concrete type would normally be specified in the data * section, however it is missing, so we expect the configured type of the codex instead. */ Person p = ((List<Person>) out2).get(0); assertNotNull(p); assertEquals(Person.class, p.getClass()); assertEquals(employee.getUuid(), p.getUuid()); } @Test public void testNull() { assertNull(testCodex(arrayString, null)); assertNull(testCodex(listString, null)); assertNull(testCodex(setString, null)); } @Test public void testSet() { Set<String> in = new LinkedHashSet<String>(Arrays.asList("Hello", " ", "", null, "World!")); Set<String> out = testCodex(setString, in); assertEquals(in, out); } }
apache-2.0
google/supl-client
src/main/java/com/google/location/suplclient/asn1/supl2/lpp/GNSS_RealTimeIntegrityReq.java
4587
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.location.suplclient.asn1.supl2.lpp; // Copyright 2008 Google Inc. All Rights Reserved. /* * This class is AUTOMATICALLY GENERATED. Do NOT EDIT. */ // // import com.google.location.suplclient.asn1.base.Asn1Sequence; import com.google.location.suplclient.asn1.base.Asn1Tag; import com.google.location.suplclient.asn1.base.BitStream; import com.google.location.suplclient.asn1.base.BitStreamReader; import com.google.location.suplclient.asn1.base.SequenceComponent; import com.google.common.collect.ImmutableList; import java.util.Collection; import javax.annotation.Nullable; /** * */ public class GNSS_RealTimeIntegrityReq extends Asn1Sequence { // private static final Asn1Tag TAG_GNSS_RealTimeIntegrityReq = Asn1Tag.fromClassAndNumber(-1, -1); public GNSS_RealTimeIntegrityReq() { super(); } @Override @Nullable protected Asn1Tag getTag() { return TAG_GNSS_RealTimeIntegrityReq; } @Override protected boolean isTagImplicit() { return true; } public static Collection<Asn1Tag> getPossibleFirstTags() { if (TAG_GNSS_RealTimeIntegrityReq != null) { return ImmutableList.of(TAG_GNSS_RealTimeIntegrityReq); } else { return Asn1Sequence.getPossibleFirstTags(); } } /** * Creates a new GNSS_RealTimeIntegrityReq from encoded stream. */ public static GNSS_RealTimeIntegrityReq fromPerUnaligned(byte[] encodedBytes) { GNSS_RealTimeIntegrityReq result = new GNSS_RealTimeIntegrityReq(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } /** * Creates a new GNSS_RealTimeIntegrityReq from encoded stream. */ public static GNSS_RealTimeIntegrityReq fromPerAligned(byte[] encodedBytes) { GNSS_RealTimeIntegrityReq result = new GNSS_RealTimeIntegrityReq(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; } @Override protected boolean isExtensible() { return true; } @Override public boolean containsExtensionValues() { for (SequenceComponent extensionComponent : getExtensionComponents()) { if (extensionComponent.isExplicitlySet()) return true; } return false; } @Override public Iterable<? extends SequenceComponent> getComponents() { ImmutableList.Builder<SequenceComponent> builder = ImmutableList.builder(); return builder.build(); } @Override public Iterable<? extends SequenceComponent> getExtensionComponents() { ImmutableList.Builder<SequenceComponent> builder = ImmutableList.builder(); return builder.build(); } @Override public Iterable<BitStream> encodePerUnaligned() { return super.encodePerUnaligned(); } @Override public Iterable<BitStream> encodePerAligned() { return super.encodePerAligned(); } @Override public void decodePerUnaligned(BitStreamReader reader) { super.decodePerUnaligned(reader); } @Override public void decodePerAligned(BitStreamReader reader) { super.decodePerAligned(reader); } @Override public String toString() { return toIndentedString(""); } public String toIndentedString(String indent) { StringBuilder builder = new StringBuilder(); builder.append("GNSS_RealTimeIntegrityReq = {\n"); final String internalIndent = indent + " "; for (SequenceComponent component : getComponents()) { if (component.isExplicitlySet()) { builder.append(internalIndent) .append(component.toIndentedString(internalIndent)); } } if (isExtensible()) { builder.append(internalIndent).append("...\n"); for (SequenceComponent component : getExtensionComponents()) { if (component.isExplicitlySet()) { builder.append(internalIndent) .append(component.toIndentedString(internalIndent)); } } } builder.append(indent).append("};\n"); return builder.toString(); } }
apache-2.0
eisnerh/PCT_315
TropiCabinas/src/modelo/formularios/Interfaz_Factura.java
2200
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package modelo.formularios; import controlador.dbConnection; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import javax.swing.JOptionPane; /** * * @author Eisner López Acevedo <eisner.lopez at gmail.com> */ public class Interfaz_Factura { private final dbConnection myLink = new dbConnection(); private final Connection conexion = dbConnection.getConnection(); private String querySQL = ""; ResultSet rs = null; PreparedStatement pst = null; public boolean mostrarFactura(String Buscar) { String[] registro = new String[8]; querySQL = "SELECT `factura_cabina`.`factura_id`, " + "`factura_cabina`.`cant_dia`, " + "`factura_cabina`.`fecha`, " + "`factura_cabina`.`impuesto_cabina`, " + "`factura_cabina`.`precio_total_cabina`, " + "`factura_cabina`.`cabina_cabina_id`, " + "`factura_cabina`.`colaborador_empleado_id`, " + "`factura_cabina`.`numero_factura`" + "FROM `pct3`.`factura_cabina`" + "WHERE " + "`factura_cabina`.`numero_factura` = '" + Buscar + "'" + "order by `factura_cabina`.`numero_factura`;"; try { Statement st = conexion.createStatement(); rs = st.executeQuery(querySQL); while (rs.next()) { registro[0] = rs.getString(1); registro[1] = rs.getString(2); registro[2] = rs.getString(3); registro[3] = rs.getString(4); registro[4] = rs.getString(5); registro[5] = rs.getString(6); registro[6] = rs.getString(7); registro[7] = rs.getString(8); } } catch (SQLException sqle) { JOptionPane.showConfirmDialog(null, sqle); } return false; } }
apache-2.0
Talvish/Tales-Samples
user_client/src/main/java/com/talvish/tales/samples/userclient/UserClient.java
7943
// *************************************************************************** // * Copyright 2014 Joseph Molnar // * // * Licensed under the Apache License, Version 2.0 (the "License"); // * you may not use this file except in compliance with the License. // * You may obtain a copy of the License at // * // * http://www.apache.org/licenses/LICENSE-2.0 // * // * Unless required by applicable law or agreed to in writing, software // * distributed under the License is distributed on an "AS IS" BASIS, // * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // * See the License for the specific language governing permissions and // * limitations under the License. // *************************************************************************** package com.talvish.tales.samples.userclient; import java.time.LocalDate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.talvish.tales.businessobjects.ObjectId; import com.talvish.tales.client.http.ResourceClient; import com.talvish.tales.client.http.ResourceConfiguration; import com.talvish.tales.client.http.ResourceMethod; import com.talvish.tales.client.http.ResourceResult; import com.talvish.tales.communication.HttpVerb; import com.talvish.tales.parts.ArgumentParser; import com.talvish.tales.system.configuration.ConfigurationManager; import com.talvish.tales.system.configuration.MapSource; import com.talvish.tales.system.configuration.PropertyFileSource; /** * The client for talking to the UserService. * @author jmolnar * */ public class UserClient extends ResourceClient { private static final Logger logger = LoggerFactory.getLogger( UserClient.class ); /** * This main is really just to demonstrate calling and would not exist in an actual client. */ public static void main( String[ ] theArgs ) throws Exception { // get the configuration system up and running ConfigurationManager configurationManager = new ConfigurationManager( ); // we prepare two sources for configurations // first the command line source configurationManager.addSource( new MapSource( "command-line", ArgumentParser.parse( theArgs ) ) ); // second the file source, if the command-line indicates a file is to be used String filename = configurationManager.getStringValue( "settings.file", null ); // we will store config in a file ideally if( !Strings.isNullOrEmpty( filename ) ) { configurationManager.addSource( new PropertyFileSource( filename ) ); } UserClient client = new UserClient( configurationManager.getValues( "user_service", ResourceConfiguration.class ), "sample_user_client/1.0" ); // client.setHeaderOverride( "Authorization", "random" ); //<= for testing, perhaps want to override this value, assuming server allows overrides // next we see what mode we are in, setup or not setup String operation = configurationManager.getStringValue( "operation", "update_user" ); ResourceResult<User> result; switch( operation ) { case "update_user": result = client.getUser( new ObjectId( 1, 1, 100 ) ); if( result.getResult() != null ) { logger.debug( "Found user: '{}'/'{}'", result.getResult().getId(), result.getResult().getFirstName( ) ); result.getResult().setFirstName( "Bilbo" ); result.getResult().getAliases( ).add( "billy" ); result.getResult().getSettings().put( "favourite_category", "games" ); result = client.updateUser( result.getResult() ); logger.debug( "Updated user: '{}'", result.getResult().getFirstName( ) ); } else { logger.debug( "Did not find user." ); } break; case "create_user": //for( int i = 0; i < 1; i += 1 ) { User user = new User( ); user.setFirstName( "Jimmy" ); user.setMiddleName( "Scott" ); user.setLastName( "McWhalter" ); user.setBirthdate( LocalDate.of( 1992, 1, 31 ) ); user.getAliases().add( "alias1" ); result = client.createUser( user ); if( result.getResult() != null ) { logger.debug( "Created user: '{}'/'{}'", result.getResult().getId(), result.getResult().getFirstName( ) ); } else { logger.debug( "Did not create user." ); } //} break; default: break; } // TODO: this doesn't exit at the end of the main here, need to understand why // (which is why I added the System.exit(0) // TODO: one time when this ran it throw some form of SSL EOF related error that // I need to track down (this happened on the server too) System.console().writer().print( "Please <Enter> to quit ..." ); System.console().writer().flush(); System.console().readLine(); System.exit( 0 ); } private String authToken = "Sample key=\"42349840984\""; /** * The constructor used to create the client. * @param theConfiguration the configuration needed to talk to the service * @param theUserAgent the user agent to use while talking to the service */ public UserClient( ResourceConfiguration theConfiguration, String theUserAgent ) { super( theConfiguration, "/user", "20140124", theUserAgent ); // we now define the methods that we are going to expose for calling this.methods = new ResourceMethod[ 3 ]; this.methods[ 0 ] = this.defineMethod( "get_user", User.class, HttpVerb.GET, "users/{id}" ) .definePathParameter("id", ObjectId.class ) .defineHeaderParameter( "Authorization", String.class ); this.methods[ 1 ] = this.defineMethod( "update_user", User.class, HttpVerb.POST, "users/{id}/update" ) .definePathParameter( "id", ObjectId.class ) .defineBodyParameter( "user", User.class ) .defineHeaderParameter( "Authorization", String.class ); this.methods[ 2 ] = this.defineMethod( "create_user", User.class, HttpVerb.POST, "users/create" ) .defineBodyParameter( "user", User.class ) .defineHeaderParameter( "Authorization", String.class ); } /** * Requests a particular user. * @param theUserId the id of the user being requested * @return the requested user, if found, null otherwise * @throws InterruptedException thrown if the calling thread is interrupted */ public ResourceResult<User> getUser( ObjectId theUserId ) throws InterruptedException { Preconditions.checkNotNull( theUserId, "need a user id to retrieve a user" ); return this.createRequest( this.methods[ 0 ], theUserId ) .setHeaderParameter( "Authorization", this.authToken ) .call(); } /** * A call to save the values of a user on the server. * @param theUser the user to save * @return the server returned version of the saved user * @throws InterruptedException thrown if the calling thread is interrupted */ public ResourceResult<User> updateUser( User theUser ) throws InterruptedException { Preconditions.checkNotNull( theUser, "need a user to be able to update" ); return this.createRequest( this.methods[ 1 ], theUser.getId() ) .setBodyParameter( "user", theUser ) .setHeaderParameter( "Authorization", this.authToken ) .call(); } /** * A call to create a new user * @param theFirstName the first name of the user * @param theLastName the last name of the user * @return the freshly created user * @throws InterruptedException thrown if the calling thread is interrupted */ public ResourceResult<User> createUser( User theUser) throws InterruptedException { Preconditions.checkNotNull( theUser, "need a user" ); Preconditions.checkArgument( theUser.getId( ) == null, "user's id must be null" ); Preconditions.checkArgument( !Strings.isNullOrEmpty( theUser.getFirstName() ), "to create a user you need a first name" ); return this.createRequest( this.methods[ 2 ] ) .setBodyParameter( "user", theUser ) .setHeaderParameter( "Authorization", this.authToken ) .call(); } }
apache-2.0
IHTSDO/snomed-publish
model/src/main/java/com/ihtsdo/snomed/model/xml/RefsetDtoShort.java
6129
package com.ihtsdo.snomed.model.xml; import java.sql.Date; import javax.xml.bind.annotation.XmlRootElement; import com.google.common.base.Objects; import com.google.common.primitives.Longs; import com.ihtsdo.snomed.dto.refset.RefsetDto; import com.ihtsdo.snomed.model.refset.Refset; @XmlRootElement(name="refset") public class RefsetDtoShort { private long id; private XmlRefsetConcept concept; private String publicId; private String title; private String description; private Date created; private Date lastModified; private int memberSize; private String snomedExtension; private String snomedReleaseDate; private boolean pendingChanges; public RefsetDtoShort(Refset r){ setId(r.getId()); setConcept(new XmlRefsetConcept(r.getRefsetConcept())); setPublicId(r.getPublicId()); setTitle(r.getTitle()); setDescription(r.getDescription()); setCreated(r.getCreationTime()); setLastModified(r.getModificationTime()); setPendingChanges(r.isPendingChanges()); setMemberSize(r.getMemberSize()); setSnomedExtension(r.getOntologyVersion().getFlavour().getPublicId()); setSnomedReleaseDate(RefsetDto.dateFormat.format(r.getOntologyVersion().getTaggedOn())); } public RefsetDtoShort(){} @Override public String toString() { return Objects.toStringHelper(this) .add("id", getId()) .add("concept", getConcept()) .add("publicId", getPublicId()) .add("title", getTitle()) .add("description", getDescription()) .add("created", getCreated()) .add("lastModified", getLastModified()) .add("pendingChanges", isPendingChanges()) .add("memberSize", getMemberSize()) .add("snomedExtension", getSnomedExtension()) .add("snomedReleaseDate", getSnomedReleaseDate()) .toString(); } @Override public int hashCode(){ return Longs.hashCode(getId()); } @Override public boolean equals(Object o){ if (o instanceof RefsetDtoShort){ RefsetDtoShort r = (RefsetDtoShort) o; if (r.getId() == this.getId()){ return true; } } return false; } public boolean isPendingChanges() { return pendingChanges; } public void setPendingChanges(boolean pendingChanges) { this.pendingChanges = pendingChanges; } public long getId() { return id; } public void setId(long id) { this.id = id; } public XmlRefsetConcept getConcept() { return concept; } public void setConcept(XmlRefsetConcept concept) { this.concept = concept; } public String getPublicId() { return publicId; } public void setPublicId(String publicId) { this.publicId = publicId; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public Date getCreated() { return created; } public void setCreated(Date created) { this.created = created; } public Date getLastModified() { return lastModified; } public void setLastModified(Date lastModified) { this.lastModified = lastModified; } public int getMemberSize() { return memberSize; } public void setMemberSize(int memberSize) { this.memberSize = memberSize; } public String getSnomedExtension() { return snomedExtension; } public void setSnomedExtension(String snomedExtension) { this.snomedExtension = snomedExtension; } public String getSnomedReleaseDate() { return snomedReleaseDate; } public void setSnomedReleaseDate(String snomedReleaseDate) { this.snomedReleaseDate = snomedReleaseDate; } public static RefsetDtoShort parse(Refset r){ return getBuilder(new XmlRefsetConcept(r.getRefsetConcept()), r.getPublicId(), r.getTitle(), r.getDescription(), r.getCreationTime(), r.getModificationTime(), r.isPendingChanges(), r.getMemberSize(), r.getOntologyVersion().getFlavour().getPublicId(), r.getOntologyVersion().getTaggedOn()).build(); } public static Builder getBuilder(XmlRefsetConcept concept, String publicId, String title, String description, Date created, Date lastModified, boolean pendingChanges, int memberSize, String snomedExtension, Date snomedReleaseDate) { return new Builder(concept, publicId, title, description, created, lastModified, pendingChanges, memberSize, snomedExtension, snomedReleaseDate); } public static class Builder { private RefsetDtoShort built; Builder(XmlRefsetConcept concept, String publicId, String title, String description, Date created, Date lastModified, boolean pendingChanges, int memberSize, String snomedExtension, Date snomedReleaseDate){ built = new RefsetDtoShort(); built.concept = concept; built.publicId = publicId; built.title = title; built.description = description; built.created = created; built.lastModified = lastModified; built.pendingChanges = pendingChanges; built.memberSize = memberSize; built.setSnomedExtension(snomedExtension); built.setSnomedReleaseDate(RefsetDto.dateFormat.format(snomedReleaseDate)); } public RefsetDtoShort build() { return built; } } }
apache-2.0
newbieandroid/AppBase
app/src/main/java/com/fuyoul/sanwenseller/bean/pickerview/ProvinceModel.java
915
package com.fuyoul.sanwenseller.bean.pickerview; import java.util.List; public class ProvinceModel implements IPickerViewData { private String name; private List<CityModel> cityList; @Override public String getPickerViewText() { return name; } public ProvinceModel() { super(); } public ProvinceModel(String name, List<CityModel> cityList) { super(); this.name = name; this.cityList = cityList; } public String getName() { return name; } public void setName(String name) { this.name = name; } public List<CityModel> getCityList() { return cityList; } public void setCityList(List<CityModel> cityList) { this.cityList = cityList; } @Override public String toString() { return "ProvinceModel [name=" + name + ", cityList=" + cityList + "]"; } }
apache-2.0
140293816/Hawkular-fork
api/metrics-api-jaxrs/src/main/java/org/hawkular/metrics/api/jaxrs/handler/BaseHandler.java
2802
/* * Copyright 2014-2015 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.metrics.api.jaxrs.handler; import static javax.ws.rs.core.MediaType.APPLICATION_JSON; import static javax.ws.rs.core.MediaType.APPLICATION_XHTML_XML; import static javax.ws.rs.core.MediaType.TEXT_HTML; import com.wordnik.swagger.annotations.ApiOperation; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import org.jboss.resteasy.spi.ResteasyProviderFactory; /** * @author mwringe */ @Path("/") public class BaseHandler { public static final String PATH = "/"; @GET @Produces(APPLICATION_JSON) @ApiOperation(value = "Returns some basic information about the Hawkular Metrics service.", response = String.class, responseContainer = "Map") public Response baseJSON(@Context ServletContext context) { String version = context.getInitParameter("hawkular.metrics.version"); if (version == null) { version = "undefined"; } HawkularMetricsBase hawkularMetrics = new HawkularMetricsBase(); hawkularMetrics.version = version; return Response.ok(hawkularMetrics).build(); } @GET @Produces({APPLICATION_XHTML_XML, TEXT_HTML}) public void baseHTML(@Context ServletContext context) throws Exception { HttpServletRequest request = ResteasyProviderFactory.getContextData(HttpServletRequest.class); HttpServletResponse response = ResteasyProviderFactory.getContextData(HttpServletResponse.class); request.getRequestDispatcher("/static/index.html").forward(request,response); } private class HawkularMetricsBase { String name = "Hawkular-Metrics"; String version; public String getName() { return name; } public void setVersion(String version) { this.version = version; } public String getVersion() { return version; } } }
apache-2.0
mike10004/appengine-imaging
gaecompat-awt-imaging/src/common/javax/accessibility/AccessibleText.java
1610
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Dennis Ushakov */ package javax.accessibility; import com.gaecompat.javax.swing.text.AttributeSet; import com.google.code.appengine.awt.Point; import com.google.code.appengine.awt.Rectangle; public interface AccessibleText { static final int CHARACTER = 1; static final int WORD = 2; static final int SENTENCE = 3; int getIndexAtPoint(Point p); Rectangle getCharacterBounds(int i); int getCharCount(); int getCaretPosition(); String getAtIndex(int part, int index); String getAfterIndex(int part, int index); String getBeforeIndex(int part, int index); AttributeSet getCharacterAttribute(int i); int getSelectionStart(); int getSelectionEnd(); String getSelectedText(); }
apache-2.0
jpkrohling/srcdeps-maven-plugin
srcdeps-core/src/main/java/org/l2x6/srcdeps/core/shell/ShellCommand.java
3821
/** * Copyright 2015-2016 Maven Source Dependencies * Plugin contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.l2x6.srcdeps.core.shell; import java.nio.file.Path; import java.util.List; import java.util.Map; import org.l2x6.srcdeps.core.util.SrcdepsCoreUtils; /** * A definition of a shell command that can be executed by {@link Shell#execute(ShellCommand)}. * * @author <a href="https://github.com/ppalaga">Peter Palaga</a> */ public class ShellCommand { private final List<String> arguments; private final Map<String, String> environment; private final String executable; private final IoRedirects ioRedirects; private final long timeoutMs; private final Path workingDirectory; public ShellCommand(String executable, List<String> arguments, Path workingDirectory, Map<String, String> environment, IoRedirects ioRedirects, long timeoutMs) { super(); SrcdepsCoreUtils.assertArgNotNull(executable, "executable"); SrcdepsCoreUtils.assertArgNotNull(arguments, "arguments"); SrcdepsCoreUtils.assertArgNotNull(workingDirectory, "workingDirectory"); SrcdepsCoreUtils.assertArgNotNull(environment, "environment"); SrcdepsCoreUtils.assertArgNotNull(ioRedirects, "ioRedirects"); this.executable = executable; this.arguments = arguments; this.workingDirectory = workingDirectory; this.environment = environment; this.ioRedirects = ioRedirects; this.timeoutMs = timeoutMs; } /** * @return an array containing the executable and its arguments that can be passed e.g. to * {@link ProcessBuilder#command(String...)} */ public String[] asCmdArray() { String[] result = new String[arguments.size() + 1]; int i = 0; result[i++] = executable; for (String arg : arguments) { result[i++] = arg; } return result; } /** * @return the {@link List} arguments for the executable. Cannot be {@code null}. */ public List<String> getArguments() { return arguments; } /** * @return a {@link Map} of environment variables that should be used when executing this {@link ShellCommand}. * Cannot be {@code null}. Note that these are just overlay variables - when a new {@link Process} is * spawned, the environment is copied from the present process and only the variables the provided by the * present method are overwritten. */ public Map<String, String> getEnvironment() { return environment; } /** * @return the executable file that should be called */ public String getExecutable() { return executable; } /** * @return the {@link IoRedirects} to use when the {@link Shell} spawns a new {@link Process} */ public IoRedirects getIoRedirects() { return ioRedirects; } /** * @return timeout in milliseconds */ public long getTimeoutMs() { return timeoutMs; } /** * @return the directory in which this {@link ShellCommand} should be executed */ public Path getWorkingDirectory() { return workingDirectory; } }
apache-2.0
dingwpmz/Mycat-Demo
src/main/java/persistent/prestige/modules/edu/service/OrganizationService.java
381
/* * Powered By agile * Web Site: http://www.agile.com * Since 2008 - 2016 */ package persistent.prestige.modules.edu.service; import java.util.Map; /** * Organization service类 * @author 雅居乐 2016-9-10 22:28:24 * @version 1.0 */ public interface OrganizationService{ /** * 保存信息 * @param datas * @return */ Integer saveOrganization(Map datas); }
apache-2.0
AlexBischof/buchungstool
src/test/java/buchungstool/model/importer/KonfigurationEventTest.java
562
package buchungstool.model.importer; import org.junit.Test; import static java.time.LocalDateTime.now; import static org.assertj.core.api.Assertions.assertThat; public class KonfigurationEventTest { @Test public void test() { KonfigurationEvent konfigurationEvent = new KonfigurationEvent(now(), now(), "@Konfiguration", "Max:16\nMin: 4"); assertThat(konfigurationEvent.getMax()).isEqualTo(16); assertThat(konfigurationEvent.getMin()).isEqualTo(4); } }
apache-2.0
akathorn/squall
squall-core/src/main/java/ch/epfl/data/squall/components/dbtoaster/DBToasterJoinComponent.java
3862
/* * * * Copyright (c) 2011-2015 EPFL DATA Laboratory * * Copyright (c) 2014-2015 The Squall Collaboration (see NOTICE) * * * * All rights reserved. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * */ package ch.epfl.data.squall.components.dbtoaster; import backtype.storm.Config; import backtype.storm.topology.TopologyBuilder; import ch.epfl.data.squall.components.Component; import ch.epfl.data.squall.components.JoinerComponent; import ch.epfl.data.squall.components.AbstractJoinerComponent; import ch.epfl.data.squall.operators.AggregateStream; import ch.epfl.data.squall.predicates.Predicate; import ch.epfl.data.squall.storm_components.StormComponent; import ch.epfl.data.squall.storm_components.dbtoaster.StormDBToasterJoin; import ch.epfl.data.squall.storm_components.synchronization.TopologyKiller; import ch.epfl.data.squall.types.Type; import ch.epfl.data.squall.utilities.MyUtilities; import org.apache.log4j.Logger; import java.util.List; import java.util.Map; import java.util.Set; public class DBToasterJoinComponent extends AbstractJoinerComponent<DBToasterJoinComponent> { protected DBToasterJoinComponent getThis() { return this; } private static final long serialVersionUID = 1L; private static Logger LOG = Logger.getLogger(DBToasterJoinComponent.class); private Map<String, Type[]> _parentNameColTypes; private Set<String> _parentsWithMultiplicity; private Map<String, AggregateStream> _parentsWithAggregator; private String _equivalentSQL; protected DBToasterJoinComponent(List<Component> relations, Map<String, Type[]> relationTypes, Set<String> relationsWithMultiplicity, Map<String, AggregateStream> relationsWithAggregator, String sql, String name) { super(relations, name); _parentsWithMultiplicity = relationsWithMultiplicity; _parentsWithAggregator = relationsWithAggregator; _parentNameColTypes = relationTypes; _equivalentSQL = sql; } @Override public void makeBolts(TopologyBuilder builder, TopologyKiller killer, List<String> allCompNames, Config conf, int hierarchyPosition) { // by default print out for the last component // for other conditions, can be set via setPrintOut if (hierarchyPosition == StormComponent.FINAL_COMPONENT && !getPrintOutSet()) setPrintOut(true); MyUtilities.checkBatchOutput(getBatchOutputMillis(), getChainOperator().getAggregation(), conf); setStormEmitter(new StormDBToasterJoin(getParents(), this, allCompNames, _parentNameColTypes, _parentsWithMultiplicity, _parentsWithAggregator, hierarchyPosition, builder, killer, conf)); } @Override public DBToasterJoinComponent setJoinPredicate(Predicate predicate) { throw new UnsupportedOperationException(); } public String getSQLQuery() { return _equivalentSQL; } }
apache-2.0
cursem/ScriptCompressor
ScriptCompressor1.0/src/dk/brics/tajs/analysis/WorkListStrategy.java
2303
/* * Copyright 2009-2013 Aarhus University * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package dk.brics.tajs.analysis; import dk.brics.tajs.flowgraph.BasicBlock; import dk.brics.tajs.lattice.CallEdge; import dk.brics.tajs.solver.CallGraph; import dk.brics.tajs.solver.IWorkListStrategy; /** * Work list strategy. */ public class WorkListStrategy implements IWorkListStrategy<Context> { private CallGraph<State,Context,CallEdge<State>> call_graph; /** * Constructs a new WorkListStrategy object. */ public WorkListStrategy() {} /** * Sets the call graph. */ public void setCallGraph(CallGraph<State,Context,CallEdge<State>> call_graph) { this.call_graph = call_graph; } @Override public int compare(IEntry<Context> e1, IEntry<Context> e2) { BasicBlock n1 = e1.getBlock(); BasicBlock n2 = e2.getBlock(); int serial1 = e1.getSerial(); int serial2 = e2.getSerial(); if (serial1 == serial2) return 0; final int E1_FIRST = -1; final int E2_FIRST = 1; if (n1.getFunction().equals(n2.getFunction()) && e1.getContext().equals(e2.getContext())) { // same function and same context: use block order if (n1.getOrder() < n2.getOrder()) return E1_FIRST; else if (n2.getOrder() < n1.getOrder()) return E2_FIRST; } int function_context_order1 = call_graph.getBlockContextOrder(e1.getContext().getEntryBlockAndContext()); int function_context_order2 = call_graph.getBlockContextOrder(e2.getContext().getEntryBlockAndContext()); // different function/context: order by occurrence number if (function_context_order1 < function_context_order2) return E2_FIRST; else if (function_context_order2 < function_context_order1) return E1_FIRST; // strategy: breadth first return serial1 - serial2; } }
apache-2.0
PhilZeppe/CaaS
pm/src/main/java/at/ac/tuwien/dsg/pm/resources/CollectiveInfoResource.java
1385
package at.ac.tuwien.dsg.pm.resources; import at.ac.tuwien.dsg.pm.PeerManager; import at.ac.tuwien.dsg.pm.model.Collective; import at.ac.tuwien.dsg.smartcom.model.CollectiveInfo; import at.ac.tuwien.dsg.smartcom.model.Identifier; import javax.inject.Inject; import javax.ws.rs.*; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.util.ArrayList; import java.util.List; /** * @author Philipp Zeppezauer (philipp.zeppezauer@gmail.com) * @version 1.0 */ @Path("collectiveInfo") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public class CollectiveInfoResource { @Inject private PeerManager manager; @GET @Path("/{id}") public CollectiveInfo getCollectiveInfo(@PathParam("id") String id) { Collective collective = manager.getCollective(id); if (collective == null) { throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND).build()); } CollectiveInfo info = new CollectiveInfo(); info.setId(Identifier.collective(id)); info.setDeliveryPolicy(collective.getDeliveryPolicy()); List<Identifier> peers = new ArrayList<>(collective.getPeers().size()); for (String s : collective.getPeers()) { peers.add(Identifier.peer(s)); } info.setPeers(peers); return info; } }
apache-2.0
642638112/-1.0
EarlySleep/app/src/main/java/com/earlysleep/model/AllData.java
353
package com.earlysleep.model; import org.litepal.crud.DataSupport; import java.util.ArrayList; import java.util.List; /** * Created by zml on 2016/6/23. * 介绍: */ public class AllData extends DataSupport { private String music; private int musictime; private boolean musicchosse; List<TimeSeting> list=new ArrayList<>(); }
apache-2.0
metaborg/jsglr
org.spoofax.jsglr2.integrationtest/src/test/java/org/spoofax/jsglr2/integrationtest/features/OriginsTest.java
839
package org.spoofax.jsglr2.integrationtest.features; import java.util.Arrays; import java.util.stream.Stream; import org.junit.jupiter.api.DynamicTest; import org.junit.jupiter.api.TestFactory; import org.spoofax.jsglr2.integrationtest.BaseTestWithSdf3ParseTables; import org.spoofax.jsglr2.integrationtest.OriginDescriptor; import org.spoofax.terms.ParseError; public class OriginsTest extends BaseTestWithSdf3ParseTables { public OriginsTest() { super("tokenization.sdf3"); } @TestFactory public Stream<DynamicTest> operator() throws ParseError { return testOrigins("x+x", Arrays.asList( //@formatter:off new OriginDescriptor("AddOperator", 0, 2), new OriginDescriptor("Id", 0, 0), new OriginDescriptor("Id", 2, 2) //@formatter:on )); } }
apache-2.0
wso2/andes
modules/andes-core/broker/src/main/java/org/wso2/andes/server/handler/BasicPublishMethodHandler.java
3860
/* * Copyright (c) 2005-2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.andes.server.handler; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.andes.AMQException; import org.wso2.andes.amqp.AMQPUtils; import org.wso2.andes.exchange.ExchangeDefaults; import org.wso2.andes.framing.AMQShortString; import org.wso2.andes.framing.BasicPublishBody; import org.wso2.andes.framing.abstraction.MessagePublishInfo; import org.wso2.andes.protocol.AMQConstant; import org.wso2.andes.server.AMQChannel; import org.wso2.andes.server.exchange.Exchange; import org.wso2.andes.server.protocol.AMQProtocolSession; import org.wso2.andes.server.state.AMQStateManager; import org.wso2.andes.server.state.StateAwareMethodListener; import org.wso2.andes.server.virtualhost.VirtualHost; public class BasicPublishMethodHandler implements StateAwareMethodListener<BasicPublishBody> { private static final Log _logger = LogFactory.getLog(BasicPublishMethodHandler.class); private static final BasicPublishMethodHandler _instance = new BasicPublishMethodHandler(); public static BasicPublishMethodHandler getInstance() { return _instance; } private BasicPublishMethodHandler() { } public void methodReceived(AMQStateManager stateManager, BasicPublishBody body, int channelId) throws AMQException { AMQProtocolSession session = stateManager.getProtocolSession(); if (_logger.isDebugEnabled()) { _logger.debug("Publish received on channel " + channelId); } AMQShortString exchangeName = body.getExchange(); // TODO: check the delivery tag field details - is it unique across the broker or per subscriber? if (exchangeName == null) { exchangeName = ExchangeDefaults.DEFAULT_EXCHANGE_NAME; } VirtualHost vHost = session.getVirtualHost(); Exchange exch = vHost.getExchangeRegistry().getExchange(exchangeName); // if the exchange does not exist we raise a channel exception if (exch == null) { throw body.getChannelException(AMQConstant.NOT_FOUND, "Unknown exchange name"); } else { // The partially populated BasicDeliver frame plus the received route body // is stored in the channel. Once the final body frame has been received // it is routed to the exchange. AMQChannel channel = session.getChannel(channelId); if (channel == null) { throw body.getChannelNotFoundException(channelId); } MessagePublishInfo info = session.getMethodRegistry().getProtocolVersionMethodConverter().convertToInfo(body); if (ExchangeDefaults.TOPIC_EXCHANGE_NAME.equals(exchangeName) && AMQPUtils.isWildCardDestination(info.getRoutingKey().toString())) { throw body.getChannelException(AMQConstant.INVALID_ROUTING_KEY, "Publishing messages to a wildcard " + "destination is not allowed"); } info.setExchange(exchangeName); channel.setPublishFrame(info, exch); } } }
apache-2.0
charithag/carbon-device-mgt-framework
components/policy-mgt/org.wso2.carbon.policy.mgt.core/src/main/java/org/wso2/carbon/policy/mgt/core/task/TaskScheduleServiceImpl.java
5611
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.policy.mgt.core.task; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.ntask.common.TaskException; import org.wso2.carbon.ntask.core.TaskInfo; import org.wso2.carbon.ntask.core.TaskManager; import org.wso2.carbon.ntask.core.service.TaskService; import org.wso2.carbon.policy.mgt.common.PolicyMonitoringTaskException; import org.wso2.carbon.policy.mgt.core.internal.PolicyManagementDataHolder; import org.wso2.carbon.policy.mgt.core.util.PolicyManagementConstants; import org.wso2.carbon.policy.mgt.core.util.PolicyManagerUtil; import org.wso2.carbon.ntask.core.TaskInfo.TriggerInfo; import java.util.HashMap; import java.util.Map; public class TaskScheduleServiceImpl implements TaskScheduleService { private static Log log = LogFactory.getLog(TaskScheduleServiceImpl.class); @Override public void startTask(int monitoringFrequency) throws PolicyMonitoringTaskException { if (monitoringFrequency <= 0) { throw new PolicyMonitoringTaskException("Time interval cannot be 0 or less than 0."); } try { int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); TaskService taskService = PolicyManagementDataHolder.getInstance().getTaskService(); taskService.registerTaskType(PolicyManagementConstants.TASK_TYPE); if (log.isDebugEnabled()) { log.debug("Monitoring task is started for the tenant id " + tenantId); } TaskManager taskManager = taskService.getTaskManager(PolicyManagementConstants.TASK_TYPE); TriggerInfo triggerInfo = new TriggerInfo(); triggerInfo.setIntervalMillis(monitoringFrequency); triggerInfo.setRepeatCount(-1); Map<String, String> properties = new HashMap<>(); properties.put(PolicyManagementConstants.TENANT_ID, String.valueOf(tenantId)); String taskName = PolicyManagementConstants.TASK_NAME + "_" + String.valueOf(tenantId); TaskInfo taskInfo = new TaskInfo(taskName, PolicyManagementConstants.TASK_CLAZZ, properties, triggerInfo); taskManager.registerTask(taskInfo); taskManager.rescheduleTask(taskInfo.getName()); } catch (TaskException e) { String msg = "Error occurred while creating the task for tenant " + PrivilegedCarbonContext. getThreadLocalCarbonContext().getTenantId(); log.error(msg, e); throw new PolicyMonitoringTaskException(msg, e); } } @Override public void stopTask() throws PolicyMonitoringTaskException { try { int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); String taskName = PolicyManagementConstants.TASK_NAME + "_" + String.valueOf(tenantId); TaskService taskService = PolicyManagementDataHolder.getInstance().getTaskService(); TaskManager taskManager = taskService.getTaskManager(PolicyManagementConstants.TASK_TYPE); taskManager.deleteTask(taskName); } catch (TaskException e) { String msg = "Error occurred while deleting the task for tenant " + PrivilegedCarbonContext. getThreadLocalCarbonContext().getTenantId(); log.error(msg, e); throw new PolicyMonitoringTaskException(msg, e); } } @Override public void updateTask(int monitoringFrequency) throws PolicyMonitoringTaskException { try { int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); String taskName = PolicyManagementConstants.TASK_NAME + "_" + String.valueOf(tenantId); TaskService taskService = PolicyManagementDataHolder.getInstance().getTaskService(); TaskManager taskManager = taskService.getTaskManager(PolicyManagementConstants.TASK_TYPE); taskManager.deleteTask(taskName); TriggerInfo triggerInfo = new TriggerInfo(); triggerInfo.setIntervalMillis(monitoringFrequency); triggerInfo.setRepeatCount(-1); Map<String, String> properties = new HashMap<>(); properties.put("tenantId", String.valueOf(tenantId)); TaskInfo taskInfo = new TaskInfo(taskName, PolicyManagementConstants.TASK_CLAZZ, properties, triggerInfo); taskManager.registerTask(taskInfo); taskManager.rescheduleTask(taskInfo.getName()); } catch (TaskException e) { String msg = "Error occurred while updating the task for tenant " + PrivilegedCarbonContext. getThreadLocalCarbonContext().getTenantId(); log.error(msg, e); throw new PolicyMonitoringTaskException(msg, e); } } }
apache-2.0
googleapis/google-api-java-client
google-api-client/src/main/java/com/google/api/client/googleapis/testing/json/GoogleJsonResponseExceptionFactoryTesting.java
3555
/* * Copyright 2013 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.google.api.client.googleapis.testing.json; import com.google.api.client.googleapis.json.GoogleJsonResponseException; import com.google.api.client.http.HttpRequest; import com.google.api.client.http.HttpResponse; import com.google.api.client.json.Json; import com.google.api.client.json.JsonFactory; import com.google.api.client.testing.http.HttpTesting; import com.google.api.client.testing.http.MockHttpTransport; import com.google.api.client.testing.http.MockLowLevelHttpResponse; import com.google.api.client.util.Beta; import java.io.IOException; /** * {@link Beta} <br> * Factory class that builds {@link GoogleJsonResponseException} instances for testing. * * @since 1.18 */ @Beta public final class GoogleJsonResponseExceptionFactoryTesting { /** * Convenience factory method that builds a {@link GoogleJsonResponseException} from its * arguments. The method builds a dummy {@link HttpRequest} and {@link HttpResponse}, sets the * response's status to a user-specified HTTP error code, suppresses exceptions, and executes the * request. This forces the underlying framework to create, but not throw, a {@link * GoogleJsonResponseException}, which the method retrieves and returns to the invoker. * * @param jsonFactory the JSON factory that will create all JSON required by the underlying * framework * @param httpCode the desired HTTP error code. Note: do nut specify any codes that indicate * successful completion, e.g. 2XX. * @param reasonPhrase the HTTP reason code that explains the error. For example, if {@code * httpCode} is {@code 404}, the reason phrase should be {@code NOT FOUND}. * @return the generated {@link GoogleJsonResponseException}, as specified. * @throws IOException if request transport fails. */ public static GoogleJsonResponseException newMock( JsonFactory jsonFactory, int httpCode, String reasonPhrase) throws IOException { MockLowLevelHttpResponse otherServiceUnavaiableLowLevelResponse = new MockLowLevelHttpResponse() .setStatusCode(httpCode) .setReasonPhrase(reasonPhrase) .setContentType(Json.MEDIA_TYPE) .setContent( "{ \"error\": { \"errors\": [ { \"reason\": \"" + reasonPhrase + "\" } ], " + "\"code\": " + httpCode + " } }"); MockHttpTransport otherTransport = new MockHttpTransport.Builder() .setLowLevelHttpResponse(otherServiceUnavaiableLowLevelResponse) .build(); HttpRequest otherRequest = otherTransport.createRequestFactory().buildGetRequest(HttpTesting.SIMPLE_GENERIC_URL); otherRequest.setThrowExceptionOnExecuteError(false); HttpResponse otherServiceUnavailableResponse = otherRequest.execute(); return GoogleJsonResponseException.from(jsonFactory, otherServiceUnavailableResponse); } }
apache-2.0
Mobilization/mobandroid5
app/src/test/java/pl/mobilization/conference2015/sponsor/SponsorRestModelPresenterTest.java
3058
package pl.mobilization.conference2015.sponsor; import android.content.Context; import android.content.Intent; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import java.util.ArrayList; import java.util.List; import de.greenrobot.event.EventBus; import lombok.extern.slf4j.Slf4j; import pl.mobilization.conference2015.sponsor.events.OnSponsorClickEvent; import pl.mobilization.conference2015.sponsor.events.SponsorUpdatedEvent; import pl.mobilization.conference2015.sponsor.repository.SponsorRepoModel; import pl.mobilization.conference2015.sponsor.repository.SponsorRepository; import pl.mobilization.conference2015.sponsor.rest.SponsorRestService; import pl.mobilization.conference2015.sponsor.rest.SponsorListRestModel; import pl.mobilization.conference2015.sponsor.view.SponsorsView; import pl.mobilization.conference2015.sponsor.view.SponsorsListViewModel; import rx.Observable; import static org.fest.assertions.api.Assertions.assertThat; import static org.mockito.Mockito.*; /** * Created by msaramak on 19.08.15. */ @Slf4j public class SponsorRestModelPresenterTest { @Mock SponsorRestService sponsorRestService; @Mock EventBus eventBus; @Mock SponsorsView view; @Mock SponsorRepository sponsorRepository; @Mock Context context; private SponsorPresenter testedSp; @Before public void setUp() throws Exception { MockitoAnnotations.initMocks(this); //GIVEN a sponsor presenter.. testedSp = new SponsorPresenter(sponsorRepository, eventBus); List<SponsorRepoModel> l = new ArrayList<>(); when(sponsorRepository.getSponsors()).thenReturn(Observable.<List<SponsorRepoModel>>just(l)); } @After public void tearDown() throws Exception { } @SuppressWarnings("ResourceType") @Test public void testOnBindView() throws Exception { //GIVEN a sponsor presenter verify(eventBus).register(testedSp); //WHEN bind view testedSp.onBindView(context, view); //THEN check if background service is setup verify(context).bindService(any(Intent.class), any(), eq(Context.BIND_AUTO_CREATE)); } @Test public void shouldDisplayDialogWhenOnSponsorClickEventCalled() throws Exception { //GIVEN a tested sponsor presenter with binded view testedSp.onBindView(context, view); //WHEN event come OnSponsorClickEvent event = new OnSponsorClickEvent(null); testedSp.onEvent(event); //THEN verify(view).showSponsorDialog(event); } @Test public void testOnUpdateSponsorList() throws Exception { //GIVEN a tested sponsor presenter with binded view testedSp.onBindView(context, view); //WHEN sponsors list is updated SponsorUpdatedEvent event = new SponsorUpdatedEvent(); testedSp.onEvent(event); //THEN verify(view).updateSponsors(any(SponsorsListViewModel.class)); } }
apache-2.0
liveontologies/protege-proof-justification
src/main/java/org/liveontologies/protege/justification/proof/preferences/ProofPreferencesPanelPluginLoader.java
1491
package org.liveontologies.protege.justification.proof.preferences; /*- * #%L * Protege Proof Justification * $Id:$ * $HeadURL:$ * %% * Copyright (C) 2016 - 2017 Live Ontologies Project * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.eclipse.core.runtime.IExtension; import org.protege.editor.core.editorkit.EditorKit; import org.protege.editor.core.plugin.AbstractPluginLoader; public class ProofPreferencesPanelPluginLoader extends AbstractPluginLoader<ProofPreferencesPanelPlugin> { private final EditorKit kit; private static final String ID = "JustificationProofPreferences"; private static final String KEY = "org.liveontologies.protege.justification.proof"; public ProofPreferencesPanelPluginLoader(EditorKit kit) { super(KEY, ID); this.kit = kit; } @Override protected ProofPreferencesPanelPlugin createInstance(IExtension extension) { return new ProofPreferencesPanelPlugin(kit, extension); } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-workdocs/src/main/java/com/amazonaws/services/workdocs/model/transform/DeleteFolderRequestProtocolMarshaller.java
2620
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.workdocs.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.Request; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.workdocs.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.protocol.*; import com.amazonaws.protocol.Protocol; import com.amazonaws.annotation.SdkInternalApi; /** * DeleteFolderRequest Marshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class DeleteFolderRequestProtocolMarshaller implements Marshaller<Request<DeleteFolderRequest>, DeleteFolderRequest> { private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.REST_JSON).requestUri("/api/v1/folders/{FolderId}") .httpMethodName(HttpMethodName.DELETE).hasExplicitPayloadMember(false).hasPayloadMembers(false).serviceName("AmazonWorkDocs").build(); private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory; public DeleteFolderRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) { this.protocolFactory = protocolFactory; } public Request<DeleteFolderRequest> marshall(DeleteFolderRequest deleteFolderRequest) { if (deleteFolderRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { final ProtocolRequestMarshaller<DeleteFolderRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(SDK_OPERATION_BINDING, deleteFolderRequest); protocolMarshaller.startMarshalling(); DeleteFolderRequestMarshaller.getInstance().marshall(deleteFolderRequest, protocolMarshaller); return protocolMarshaller.finishMarshalling(); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
soundcloud/gocd
domain/test/com/thoughtworks/go/config/materials/git/GitMaterialShallowCloneTest.java
9537
/* * Copyright 2017 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.config.materials.git; import com.googlecode.junit.ext.JunitExtRunner; import com.thoughtworks.go.domain.materials.Modification; import com.thoughtworks.go.domain.materials.RevisionContext; import com.thoughtworks.go.domain.materials.TestSubprocessExecutionContext; import com.thoughtworks.go.domain.materials.git.GitCommand; import com.thoughtworks.go.domain.materials.git.GitTestRepo; import com.thoughtworks.go.domain.materials.mercurial.StringRevision; import com.thoughtworks.go.helper.TestRepo; import com.thoughtworks.go.util.SystemEnvironment; import com.thoughtworks.go.util.TestFileUtil; import org.hamcrest.Matchers; import org.hamcrest.core.Is; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; import static com.thoughtworks.go.domain.materials.git.GitTestRepo.*; import static com.thoughtworks.go.util.command.ProcessOutputStreamConsumer.inMemoryConsumer; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @RunWith(JunitExtRunner.class) public class GitMaterialShallowCloneTest { private GitTestRepo repo; private File workingDir; @Before public void setup() throws Exception { repo = new GitTestRepo(); workingDir = TestFileUtil.createUniqueTempFolder("working"); } @After public void teardown() throws Exception { TestRepo.internalTearDown(); } @Test public void defaultShallowFlagIsOff() throws Exception { assertThat(new GitMaterial(repo.projectRepositoryUrl()).isShallowClone(), is(false)); assertThat(new GitMaterial(repo.projectRepositoryUrl(), null).isShallowClone(), is(false)); assertThat(new GitMaterial(repo.projectRepositoryUrl(), true).isShallowClone(), is(true)); assertThat(new GitMaterial(new GitMaterialConfig(repo.projectRepositoryUrl())).isShallowClone(), is(false)); assertThat(new GitMaterial(new GitMaterialConfig(repo.projectRepositoryUrl(), GitMaterialConfig.DEFAULT_BRANCH, true)).isShallowClone(), is(true)); assertThat(new GitMaterial(new GitMaterialConfig(repo.projectRepositoryUrl(), GitMaterialConfig.DEFAULT_BRANCH, false)).isShallowClone(), is(false)); TestRepo.internalTearDown(); } @Test public void shouldGetLatestModificationWithShallowClone() throws IOException { GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true); List<Modification> mods = material.latestModification(workingDir, context()); assertThat(mods.size(), is(1)); assertThat(mods.get(0).getComment(), Matchers.is("Added 'run-till-file-exists' ant target")); assertThat(localRepoFor(material).isShallow(), is(true)); assertThat(localRepoFor(material).containsRevisionInBranch(REVISION_0), is(false)); assertThat(localRepoFor(material).currentRevision(), is(REVISION_4.getRevision())); } @Test public void shouldGetModificationSinceANotInitiallyClonedRevision() { GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true); List<Modification> modifications = material.modificationsSince(workingDir, REVISION_0, context()); assertThat(modifications.size(), is(4)); assertThat(modifications.get(0).getRevision(), is(REVISION_4.getRevision())); assertThat(modifications.get(0).getComment(), is("Added 'run-till-file-exists' ant target")); assertThat(modifications.get(1).getRevision(), is(REVISION_3.getRevision())); assertThat(modifications.get(1).getComment(), is("adding build.xml")); assertThat(modifications.get(2).getRevision(), is(REVISION_2.getRevision())); assertThat(modifications.get(2).getComment(), is("Created second.txt from first.txt")); assertThat(modifications.get(3).getRevision(), is(REVISION_1.getRevision())); assertThat(modifications.get(3).getComment(), is("Added second line")); } @Test public void shouldBeAbleToUpdateToRevisionNotFetched() { GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true); material.updateTo(inMemoryConsumer(), workingDir, new RevisionContext(REVISION_3, REVISION_2, 2), context()); assertThat(localRepoFor(material).currentRevision(), is(REVISION_3.getRevision())); assertThat(localRepoFor(material).containsRevisionInBranch(REVISION_2), is(true)); assertThat(localRepoFor(material).containsRevisionInBranch(REVISION_3), is(true)); } @Test public void configShouldIncludesShallowFlag() { GitMaterialConfig shallowConfig = (GitMaterialConfig) new GitMaterial(repo.projectRepositoryUrl(), true).config(); assertThat(shallowConfig.isShallowClone(), is(true)); GitMaterialConfig normalConfig = (GitMaterialConfig) new GitMaterial(repo.projectRepositoryUrl(), null).config(); assertThat(normalConfig.isShallowClone(), is(false)); } @Test public void xmlAttributesShouldIncludesShallowFlag() { GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true); assertThat(material.getAttributesForXml().get("shallowClone"), Is.<Object>is(true)); } @Test public void attributesShouldIncludeShallowFlag() { GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true); Map gitConfig = (Map) (material.getAttributes(false).get("git-configuration")); assertThat(gitConfig.get("shallow-clone"), Is.<Object>is(true)); } @Test public void shouldConvertExistingRepoToFullRepoWhenShallowCloneIsOff() { GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true); material.latestModification(workingDir, context()); assertThat(localRepoFor(material).isShallow(), is(true)); material = new GitMaterial(repo.projectRepositoryUrl(), false); material.latestModification(workingDir, context()); assertThat(localRepoFor(material).isShallow(), is(false)); } @Test public void withShallowCloneShouldGenerateANewMaterialWithOverriddenShallowConfig() { GitMaterial original = new GitMaterial(repo.projectRepositoryUrl(), false); assertThat(original.withShallowClone(true).isShallowClone(), is(true)); assertThat(original.withShallowClone(false).isShallowClone(), is(false)); assertThat(original.isShallowClone(), is(false)); } @Test public void updateToANewRevisionShouldNotResultInUnshallowing() throws IOException { GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true); material.updateTo(inMemoryConsumer(), workingDir, new RevisionContext(REVISION_4, REVISION_4, 1), context()); assertThat(localRepoFor(material).isShallow(), is(true)); List<Modification> modifications = repo.addFileAndPush("newfile", "add new file"); StringRevision newRevision = new StringRevision(modifications.get(0).getRevision()); material.updateTo(inMemoryConsumer(), workingDir, new RevisionContext(newRevision, newRevision, 1), context()); assertThat(new File(workingDir, "newfile").exists(), is(true)); assertThat(localRepoFor(material).isShallow(), is(true)); } @Test public void shouldUnshallowServerSideRepoCompletelyOnRetrievingModificationsSincePreviousRevision() { SystemEnvironment mockSystemEnvironment = mock(SystemEnvironment.class); GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true); when(mockSystemEnvironment.get(SystemEnvironment.GO_SERVER_SHALLOW_CLONE)).thenReturn(false); material.modificationsSince(workingDir, REVISION_4, new TestSubprocessExecutionContext(mockSystemEnvironment, true)); assertThat(localRepoFor(material).isShallow(), is(false)); } @Test public void shouldNotUnshallowOnServerSideIfShallowClonePropertyIsOnAndRepoIsAlreadyShallow() { SystemEnvironment mockSystemEnvironment = mock(SystemEnvironment.class); GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true); when(mockSystemEnvironment.get(SystemEnvironment.GO_SERVER_SHALLOW_CLONE)).thenReturn(true); material.modificationsSince(workingDir, REVISION_4, new TestSubprocessExecutionContext(mockSystemEnvironment, false)); assertThat(localRepoFor(material).isShallow(), is(true)); } private TestSubprocessExecutionContext context() { return new TestSubprocessExecutionContext(); } private GitCommand localRepoFor(GitMaterial material) { return new GitCommand(material.getFingerprint(), workingDir, GitMaterialConfig.DEFAULT_BRANCH, false, new HashMap<>()); } }
apache-2.0
CloudComLab/Voting-CAP
src/wei_chih/service/handler/wei_chih/WeiChihHandler.java
5740
package wei_chih.service.handler.wei_chih; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.IOException; import java.net.Socket; import java.security.KeyPair; import java.security.PublicKey; import java.security.SignatureException; import java.util.concurrent.locks.ReentrantLock; import java.util.logging.Level; import java.util.logging.Logger; import message.Operation; import message.OperationType; import service.Key; import service.KeyManager; import service.handler.ConnectionHandler; import wei_chih.service.Config; import wei_chih.service.SocketServer; import wei_chih.utility.MerkleTree; import wei_chih.utility.Utils; import wei_chih.message.wei_chih.Request; import wei_chih.message.wei_chih.Acknowledgement; /** * * @author Chienweichih */ public class WeiChihHandler extends ConnectionHandler { private static final ReentrantLock LOCK; private static final MerkleTree[] merkleTree; private static final String[] digestBeforeUpdate; private static final Operation[] lastOP; private static final Integer[] sequenceNumbers; static { merkleTree = new MerkleTree[Config.SERVICE_NUM]; digestBeforeUpdate = new String[Config.SERVICE_NUM]; lastOP = new Operation[Config.SERVICE_NUM]; sequenceNumbers = new Integer[Config.SERVICE_NUM]; for (int i = 0; i < Config.SERVICE_NUM; ++i) { merkleTree[i] = new MerkleTree(new File(SocketServer.dataDirPath)); digestBeforeUpdate[i] = ""; lastOP[i] = new Operation(OperationType.DOWNLOAD, "", merkleTree[i].getRootHash()); sequenceNumbers[i] = 0; } LOCK = new ReentrantLock(); } public WeiChihHandler(Socket socket, KeyPair keyPair) { super(socket, keyPair); } @Override protected void handle(DataOutputStream out, DataInputStream in) { PublicKey clientPubKey = KeyManager.getInstance().getPublicKey(Key.CLIENT); int portIndex = 0; if (Math.abs(socket.getPort() - Config.SERVICE_PORT[0]) < 10) { portIndex = socket.getPort() - Config.SERVICE_PORT[0]; } else if (Math.abs(socket.getLocalPort() - Config.SERVICE_PORT[0]) < 10) { portIndex = socket.getLocalPort() - Config.SERVICE_PORT[0]; } try { Request req = Request.parse(Utils.receive(in)); LOCK.lock(); if (!req.validate(clientPubKey)) { throw new SignatureException("REQ validation failure"); } Operation op = req.getOperation(); switch (op.getType()) { case UPLOAD: digestBeforeUpdate[portIndex] = merkleTree[portIndex].getDigest(op.getPath()); merkleTree[portIndex].update(op.getPath(), op.getMessage()); case DOWNLOAD: // both upload and download, so no break if (0 != op.getClientID().compareTo(String.valueOf(sequenceNumbers[portIndex]))) { throw new java.security.InvalidParameterException(); } sequenceNumbers[portIndex]++; default: } File file = new File(SocketServer.dataDirPath + op.getPath()); String rootHash = merkleTree[portIndex].getRootHash(); String fileHash = null; if (file.exists()) { fileHash = Utils.digest(file, Config.DIGEST_ALGORITHM); } Acknowledgement ack = new Acknowledgement(rootHash, fileHash, req); ack.sign(keyPair); Utils.send(out, ack.toString()); switch (op.getType()) { case DOWNLOAD: lastOP[portIndex] = op; if (portIndex + Config.SERVICE_PORT[0] == Config.SERVICE_PORT[0]) { Utils.send(out, file); } break; case UPLOAD: lastOP[portIndex] = op; if (portIndex + Config.SERVICE_PORT[0] == Config.SERVICE_PORT[0]) { file = new File(Config.DOWNLOADS_DIR_PATH + op.getPath()); Utils.receive(in, file); String digest = Utils.digest(file, Config.DIGEST_ALGORITHM); if (0 != op.getMessage().compareTo(digest)) { throw new java.io.IOException(); } } break; case AUDIT: file = new File(Config.ATTESTATION_DIR_PATH + "/service-provider/voting"); switch (lastOP[portIndex].getType()) { case DOWNLOAD: Utils.write(file, rootHash); break; case UPLOAD: MerkleTree prevMerkleTree = new MerkleTree(merkleTree[portIndex]); prevMerkleTree.update(lastOP[portIndex].getPath(), digestBeforeUpdate[portIndex]); Utils.Serialize(file, prevMerkleTree); break; default: throw new java.lang.Error(); } Utils.send(out, file); break; default: } socket.close(); } catch (IOException | SignatureException ex) { Logger.getLogger(WeiChihHandler.class.getName()).log(Level.SEVERE, null, ex); } finally { if (LOCK != null) { LOCK.unlock(); } } } }
apache-2.0
mociek124/java_pft
addressbook-web-tests/src/test/java/ru/stqa/pft/addressbook/tests/ContactPhone.java
1225
package ru.stqa.pft.addressbook.tests; import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import org.testng.annotations.Test; import ru.stqa.pft.addressbook.model.ContactData; import ru.stqa.pft.addressbook.tests.TestBase; import java.util.Arrays; import java.util.stream.Collectors; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; /** * Created by mocius on 2017-04-16. */ public class ContactPhone extends TestBase { @Test public void testContactPhones(){ app.goTo().homePage(); ContactData contact = app.contactHelper().all().iterator().next(); ContactData contactInfoFromEditForm = app.contactHelper().infoFromEditForm(contact); assertThat(contact.getAllPhones(), equalTo(mergePhones(contactInfoFromEditForm))); } private <T> String mergePhones(ContactData contact) { return Arrays.asList(contact.getHomePhone(), contact.getMobilePhone(),contact.getWorkPhone()).stream(). filter((s) -> ! s.equals("")).map(ContactPhone::cleaned) .collect(Collectors.joining("\n")); } public static String cleaned(String phone){ return phone.replaceAll("\\s", "").replaceAll("[-()]", ""); } }
apache-2.0
kinddevil/course-service
oauth/src/main/java/com/oauth/services/security/Test.java
1438
package com.oauth.services.security; import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder; import org.springframework.security.crypto.keygen.BytesKeyGenerator; import org.springframework.security.crypto.keygen.KeyGenerators; import org.springframework.security.crypto.password.StandardPasswordEncoder; /** * Created by yichen.wei on 6/24/17. */ public class Test { public static void main(String args[]) { BytesKeyGenerator saltGenerator = KeyGenerators.secureRandom(); // StandardPasswordEncoder encode = new StandardPasswordEncoder("SHA-256", ""); // StandardPasswordEncoder encode = new StandardPasswordEncoder(""); StandardPasswordEncoder encode = new StandardPasswordEncoder(); System.out.println("abcfwef..."); //a8ba715d5a076c99b95995d357651df5c296bf308abaa154a54d2418885ec622e9fe8624f2e06524 //be1e54adbd1c5c5d58a714fad7d529c73198c8c51e1f9d43edc79dac4784b5e93460605fe7082b0d //910a6df88a99d5d81f3376628f3fd6a91a2152a366f2d450ef9220ff32f0c74952f754da62cd5a13 System.out.println(encode.encode("abcdef")); // System.out.println(encode.encode("mypass")); String salt = saltGenerator.generateKey().toString(); System.out.println(salt); System.out.println(saltGenerator.getKeyLength()); BCryptPasswordEncoder bc = new BCryptPasswordEncoder(); System.out.println(bc.encode("admin")); } }
apache-2.0
code-distillery/httpclient-configuration-support
src/test/java/net/distilledcode/httpclient/impl/metatype/reflection/InvokersTest.java
3178
package net.distilledcode.httpclient.impl.metatype.reflection; import org.apache.http.client.config.RequestConfig; import org.junit.Test; import java.util.Map; import static org.hamcrest.CoreMatchers.allOf; import static org.hamcrest.CoreMatchers.hasItem; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.junit.Assert.assertThat; public class InvokersTest { private static class TestBean { private boolean featureEnabled = true; // getters public String getFooBar() { return null; } public void getFooBarVoid() {} // setters public void setBarFoo(String fooBar) {} public void setBarFooNoArgs() {} // boolean switch (only called for enabling, disabled by default public void enableFeature() { featureEnabled = true; } // boolean switch (only called for disabling, enabled by default void disableFeature() { featureEnabled = false; } } @Test public void invokeMethods() throws Exception { // builder.setMaxRedirects(5) Invokers.Invoker<Void> setMaxRedirects = new Invokers.Invoker<>(RequestConfig.Builder.class.getDeclaredMethod("setMaxRedirects", int.class)); RequestConfig.Builder builder = RequestConfig.custom(); setMaxRedirects.invoke(builder, 17); // requestConfig.getMaxRedirects() Invokers.Invoker<Integer> getMaxRedirects = new Invokers.Invoker<>(RequestConfig.class.getDeclaredMethod("getMaxRedirects")); RequestConfig requestConfig = builder.build(); assertThat(getMaxRedirects.invoke(requestConfig), is(17)); } @Test public void beanGetters() throws Exception { Map<String, Invokers.Invoker<?>> testBeanGetters = Invokers.beanGetters(TestBean.class); assertThat(testBeanGetters.keySet(), allOf( hasItem("foo.bar"), not(hasItem("foo.bar.void")) )); } @Test public void beanSetters() throws Exception { Map<String, Invokers.Invoker<?>> testBeanGetters = Invokers.beanSetters(TestBean.class); assertThat(testBeanGetters.keySet(), allOf( hasItem("bar.foo"), not(hasItem("bar.foo.no.args")) )); } @Test public void conditionalSetter() throws Exception { Invokers.Invoker<?> featureDisabler = Invokers.conditionalNoArgsSetter(TestBean.class.getDeclaredMethod("disableFeature"), false); TestBean testBean = new TestBean(); assertThat(testBean.featureEnabled, is(true)); featureDisabler.invoke(testBean, false); assertThat(testBean.featureEnabled, is(false)); } @Test public void conditionalSetterIgnored() throws Exception { Invokers.Invoker<?> featureDisabler = Invokers.conditionalNoArgsSetter(TestBean.class.getDeclaredMethod("disableFeature"), true); TestBean testBean = new TestBean(); assertThat(testBean.featureEnabled, is(true)); featureDisabler.invoke(testBean, false); assertThat(testBean.featureEnabled, is(true)); } }
apache-2.0
Zhuinden/flowless
flowless-library/src/main/java/flowless/KeyParceler.java
964
/* * Copyright 2013 Square Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package flowless; import android.os.Parcelable; import android.support.annotation.NonNull; /** * Used by History to convert your key objects to and from instances of * {@link android.os.Parcelable}. */ public interface KeyParceler { @NonNull Parcelable toParcelable(@NonNull Object key); @NonNull Object toKey(@NonNull Parcelable parcelable); }
apache-2.0
jpaw/jpaw
jpaw-util/src/main/java/de/jpaw/util/ByteArray.java
18751
/* * Copyright 2012 Michael Bischoff * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.jpaw.util; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInput; import java.io.DataOutput; import java.io.Externalizable; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInput; import java.io.ObjectOutput; import java.io.OutputStream; import java.nio.charset.Charset; /** * Functionality which corresponds to String, but for byte arrays. * Essential feature is that the class is immutable, so you can use it in messaging without making deep copies. * Mimicking {@link java.lang.String}, the class contains offset and length fields to allow sharing of the buffer. * <p> * This should really exist in Java SE already. * * @author Michael Bischoff * */ public final class ByteArray implements Externalizable, Cloneable { private static final long serialVersionUID = 2782729564297256974L; public static final Charset CHARSET_UTF8 = Charset.forName("UTF-8"); // default character set is available on all platforms private static final int MAGIC_LENGTH_INDICATING_32_BIT_SIZE = 247; // if a single byte length of this value is written in the // serialized form, it indicates a full four byte length must be read instead. Not used 0 or 255 due to their frequent use. private final byte[] buffer; private final int offset; private final int length; private ByteArray extraFieldJustRequiredForDeserialization = null; // transient temporary field private static final byte[] ZERO_JAVA_BYTE_ARRAY = new byte[0]; public static final ByteArray ZERO_BYTE_ARRAY = new ByteArray(ZERO_JAVA_BYTE_ARRAY); /** No-arg constructor required for Serializable interface. */ @Deprecated public ByteArray() { this(ZERO_JAVA_BYTE_ARRAY); } /** Constructs a ByteArray from a source byte[], which is defensively copied. */ public ByteArray(final byte[] source) { if (source == null || source.length == 0) { buffer = ZERO_JAVA_BYTE_ARRAY; offset = 0; length = 0; } else { buffer = source.clone(); // benchmarks have shown that clone() is equally fast as System.arraycopy for all lengths > 0 offset = 0; length = buffer.length; } } // construct a ByteArray from a trusted source byte[] // this method is always called with unsafeTrustedReuseOfJavaByteArray = true, the parameter is only required in order to distinguish the constructor // from the copying one private ByteArray(final byte[] source, final boolean unsafeTrustedReuseOfJavaByteArray) { if (source == null || source.length == 0) { buffer = ZERO_JAVA_BYTE_ARRAY; offset = 0; length = 0; } else { buffer = unsafeTrustedReuseOfJavaByteArray ? source : source.clone(); offset = 0; length = buffer.length; } } /** Constructs a ByteArray from a ByteArrayOutputStream, which has just been contructed by some previous process. * @throws IOException */ public static ByteArray fromByteArrayOutputStream(final ByteArrayOutputStream baos) throws IOException { baos.flush(); return new ByteArray(baos.toByteArray(), true); } /** Writes the contents of this ByteArray to an OutputStream. */ public void toOutputStream(final OutputStream os) throws IOException { os.write(buffer, offset, length); } /** Constructs a ByteArray from the provided DataInput, with a predefined length. */ public static ByteArray fromDataInput(final DataInput in, final int len) throws IOException { if (len <= 0) return ZERO_BYTE_ARRAY; final byte[] tmp = new byte[len]; in.readFully(tmp); return new ByteArray(tmp, true); } /** read bytes from an input stream, up to maxBytes (or all which exist, if maxBytes = 0). */ public static ByteArray fromInputStream(final InputStream is, final int maxBytes) throws IOException { final ByteBuilder tmp = maxBytes > 0 ? new ByteBuilder(maxBytes, CHARSET_UTF8) : new ByteBuilder(); tmp.readFromInputStream(is, maxBytes); if (tmp.length() == 0) return ZERO_BYTE_ARRAY; return new ByteArray(tmp.getCurrentBuffer(), 0, tmp.length()); } /** Constructs a ByteArray from the provided ByteBuilder. */ public static ByteArray fromByteBuilder(final ByteBuilder in) { if (in == null || in.length() == 0) return ZERO_BYTE_ARRAY; return new ByteArray(in.getCurrentBuffer(), 0, in.length()); } /** Constructs a ByteArray from the provided String, using the UTF8 character set. */ public static ByteArray fromString(final String in) { return fromString(in, CHARSET_UTF8); } /** Constructs a ByteArray from the provided String, using the specified character set. */ public static ByteArray fromString(final String in, final Charset cs) { if (in == null || in.length() == 0) return ZERO_BYTE_ARRAY; return new ByteArray(in.getBytes(cs), true); // we know these bytes are never changed, so no extra copy required } /** returns the byte array as a string. Unlike toString(), which uses the JVM default character set, this method always uses UTF-8. */ public String asString() { return asString(CHARSET_UTF8); } /** returns the byte array as a string, using a specified character set. */ public String asString(final Charset cs) { return new String(buffer, offset, length, cs); } /** construct a ByteArray from a source byte[], with offset and length. source may not be null. */ public ByteArray(final byte[] source, final int offset, final int length) { if (source == null || offset < 0 || length < 0 || offset + length > source.length) throw new IllegalArgumentException(); buffer = new byte[length]; System.arraycopy(source, offset, buffer, 0, length); this.offset = 0; this.length = length; } /** Construct a ByteArray from another one. Could also just assign it due to immutability. * The only benefit of this constructor is that it converts a null parameter into the non-null empty ByteArray. */ public ByteArray(final ByteArray source) { if (source == null) { buffer = ZERO_JAVA_BYTE_ARRAY; offset = 0; length = 0; } else { buffer = source.buffer; // no array copy required due to immutability offset = source.offset; length = source.length; } } /** Construct a ByteArray from a source byte[], with offset and length. source may not be null. * Similar to the subArray member method. */ public ByteArray(final ByteArray source, final int offset, final int length) { if (source == null || offset < 0 || length < 0 || offset + length > source.length) throw new IllegalArgumentException(); this.buffer = source.buffer; // no array copy required due to immutability this.offset = source.offset + offset; this.length = length; } /** Returns a ByteArray which contains a subsequence of the bytes of this one. The underlying buffer is shared. * Functionality wise this corresponds to String.substring (before Java 6) or ByteBuffer.slice. */ public ByteArray subArray(final int xoffset, final int xlength) { // create a new ByteArray sharing the same buffer return new ByteArray(this, xoffset, xlength); } /** Returns a ByteArray which contains a subsequence of the bytes of this one. The underlying buffer is not shared. * Use this variant if the original ByteArray holds a much larger byte[] and can be GCed afterwards. */ public ByteArray subArrayUnshared(final int xoffset, final int xlength) { if (xoffset < 0 || xlength < 0 || xoffset + xlength > this.length) throw new IllegalArgumentException(); final byte[] newBuffer = new byte[xlength]; System.arraycopy(buffer, xoffset, newBuffer, 0, xlength); // create a new ByteArray using the new buffer return new ByteArray(newBuffer, true); } @Override public ByteArray clone() { return new ByteArray(this); } public int length() { return this.length; } // public int getOffset() { // return this.offset; // } // // /** Returns the internal buffer of this object. It may only be used for read-only access. // * Java is missing a "const" specifier for arrays as it is available in C and C++. // * // * Java-purists will complain against exposing this internal state of an immutable object, but as long as // * access is possible via reflection anyway, just with performance penalty, it would be outright stupid // * to force people to use reflection, or even defensive copies. Instead I hope the name of the method // * documents the intended use. // */ // public byte /* const */[] unsafe$getConstBufferOfConstBytes() { // return this.buffer; // } public int indexOf(final byte x) { int i = 0; while (i < length) { if (buffer[offset + i] == x) return i; ++i; } return -1; } public int indexOf(final byte x, final int fromIndex) { int i = fromIndex >= 0 ? fromIndex : 0; while (i < length) { if (buffer[offset + i] == x) return i; ++i; } return -1; } public int lastIndexOf(final byte x) { int i = length; while (i > 0) { if (buffer[offset + --i] == x) return i; } return -1; } public int lastIndexOf(final byte x, final int fromIndex) { int i = fromIndex >= length ? length - 1 : fromIndex; while (i >= 0) { if (buffer[offset + i] == x) return i; --i; } return -1; } public byte byteAt(final int pos) { if (pos < 0 || pos >= length) throw new IllegalArgumentException(); return buffer[offset + pos]; } /** Provides the contents of this ByteArray to some InputStream. */ public ByteArrayInputStream asByteArrayInputStream() { return new ByteArrayInputStream(buffer, offset, length()); } // return a defensive copy of the contents public byte[] getBytes() { final byte[] result = new byte[length]; System.arraycopy(buffer, offset, result, 0, length); return result; } // return a defensive copy of part of the contents. Shorthand for subArray(offset, length).getBytes(), // which would create a temporary object public byte[] getBytes(final int xoffset, final int xlength) { if (xoffset < 0 || xlength < 0 || xoffset + xlength > this.length) throw new IllegalArgumentException(); final byte[] result = new byte[xlength]; System.arraycopy(buffer, xoffset + this.offset, result, 0, xlength); return result; } private boolean contentEqualsSub(final byte[] dst, final int dstOffset, final int dstLength) { if (length != dstLength) return false; for (int i = 0; i < dstLength; ++i) { if (buffer[offset + i] != dst[dstOffset + i]) return false; } return true; } // following: all arguments must be not null public boolean contentEquals(final ByteArray that) { return contentEqualsSub(that.buffer, that.offset, that.length); } public boolean contentEquals(final byte[] that) { return contentEqualsSub(that, 0, that.length); } public boolean contentEquals(final byte[] that, final int thatOffset, final int thatLength) { if (thatOffset < 0 || thatLength < 0 || thatOffset + thatLength > that.length) throw new IllegalArgumentException(); return contentEqualsSub(that, thatOffset, thatLength); } // returns if the two instances share the same backing buffer (for debugging) public boolean shareBuffer(final ByteArray that) { return buffer == that.buffer; } @Override public int hashCode() { int hash = 997; for (int i = 0; i < length; ++i) { hash = 29 * hash + buffer[offset + i]; } return hash; } // two ByteArrays are considered equal if they have the same visible contents @Override public boolean equals(final Object that) { if (this == that) return true; if (that == null || getClass() != that.getClass()) return false; final ByteArray xthat = (ByteArray)that; // same as contentEqualsSub(..) now if (this.length != xthat.length) return false; for (int i = 0; i < length; ++i) { if (buffer[offset + i] != xthat.buffer[xthat.offset + i]) return false; } return true; } // support function to allow dumping contents to DataOutput without the need to expose our internal buffer public void writeToDataOutput(final DataOutput out) throws IOException { out.write(buffer, offset, length); } public String hexdump(final int startAt, final int maxlength) { if (length <= startAt) return ""; // no data to dump return ByteUtil.dump(buffer, offset + startAt, (maxlength > 0 && maxlength < length) ? maxlength : length); } @Override public void writeExternal(final ObjectOutput out) throws IOException { //writeBytes(out, buffer, offset, length); if (length < 256 && length != MAGIC_LENGTH_INDICATING_32_BIT_SIZE) { out.writeByte(length); } else { out.writeByte(MAGIC_LENGTH_INDICATING_32_BIT_SIZE); out.writeInt(length); } out.write(buffer, offset, length); } // support function to allow ordinary byte[] to be written in same fashion public static void writeBytes(final ObjectOutput out, final byte[] buffer, final int offset, final int length) throws IOException { if (length < 256 && length != MAGIC_LENGTH_INDICATING_32_BIT_SIZE) { out.writeByte(length); } else { out.writeByte(MAGIC_LENGTH_INDICATING_32_BIT_SIZE); out.writeInt(length); } out.write(buffer, offset, length); } public static byte[] readBytes(final ObjectInput in) throws IOException { int newlength = in.readByte(); if (newlength < 0) newlength += 256; // want full unsigned range if (newlength == MAGIC_LENGTH_INDICATING_32_BIT_SIZE) // magic to indicate four byte length newlength = in.readInt(); // System.out.println("ByteArray.readExternal() with length " + newlength); if (newlength == 0) return ZERO_JAVA_BYTE_ARRAY; final byte[] localBuffer = new byte[newlength]; int done = 0; while (done < newlength) { final int nRead = in.read(localBuffer, done, newlength - done); // may return less bytes than requested! if (nRead <= 0) throw new IOException("deserialization of ByteArray returned " + nRead + " while expecting " + (newlength - done)); done += nRead; } return localBuffer; } // factory method to read from objectInput via above helper function public static ByteArray read(final ObjectInput in) throws IOException { return new ByteArray(readBytes(in), true); } // a direct implementation of this method would conflict with the immutability / "final" attributes of the field // Weird Java language design again. If readExternal() is kind of a constructor, why are assignments to final fields not allowed here? // alternatives around are to add artificial fields and use readResolve / proxies or to discard the "final" attributes, // or using reflection to set the values (!?). Bleh! // We're using kind of Bloch's "proxy" pattern (Essential Java, #78), namely a single-sided variant with just a single additonal member field, // which lets us preserve the immutability // see also http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6379948 for discussion around this @Override public void readExternal(final ObjectInput in) throws IOException { extraFieldJustRequiredForDeserialization = new ByteArray(readBytes(in), true); } public Object readResolve() { // System.out.println("ByteArray.readResolve()"); if (extraFieldJustRequiredForDeserialization == null) throw new RuntimeException("readResolve() called on instance not obtained via readExternal()"); return extraFieldJustRequiredForDeserialization; } // factory method to construct a byte array from a prevalidated base64 byte sequence. returns null if length is suspicious public static ByteArray fromBase64(final byte[] data, final int offset, final int length) { if (length == 0) return ZERO_BYTE_ARRAY; final byte[] tmp = Base64.decode(data, offset, length); if (tmp == null) return null; return new ByteArray(tmp, true); } public void appendBase64(final ByteBuilder b) { Base64.encodeToByte(b, buffer, offset, length); } public void appendToRaw(final ByteBuilder b) { b.write(buffer, offset, length); } /** Returns the contents of this ByteArray as a base64 encoded string. * @since 1.2.12 */ public String asBase64() { final ByteBuilder tmp = new ByteBuilder(0, null); Base64.encodeToByte(tmp, buffer, offset, length); return tmp.toString(); } // returns the String representation of the visible bytes portion @Override public String toString() { return new String(buffer, offset, length); } }
apache-2.0
songwie/elasticsearch-river-jdbc
src/main/java/org/xbib/elasticsearch/plugin/jdbc/feeder/JDBCFeeder.java
12908
/* * Copyright (C) 2014 Jörg Prante * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.xbib.elasticsearch.plugin.jdbc.feeder; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.metrics.MeterMetric; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.JsonSettingsLoader; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.river.RiverName; import org.xbib.elasticsearch.plugin.jdbc.RiverRunnable; import org.xbib.elasticsearch.plugin.jdbc.classloader.uri.URIClassLoader; import org.xbib.elasticsearch.plugin.jdbc.client.Ingest; import org.xbib.elasticsearch.plugin.jdbc.client.IngestFactory; import org.xbib.elasticsearch.plugin.jdbc.client.transport.BulkTransportClient; import org.xbib.elasticsearch.plugin.jdbc.cron.CronExpression; import org.xbib.elasticsearch.plugin.jdbc.cron.CronThreadPoolExecutor; import org.xbib.elasticsearch.plugin.jdbc.state.RiverStatesMetaData; import org.xbib.elasticsearch.plugin.jdbc.util.RiverServiceLoader; import org.xbib.elasticsearch.river.jdbc.RiverFlow; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintStream; import java.io.Reader; import java.io.Writer; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentLinkedDeque; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.collect.Lists.newLinkedList; import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; /** * Standalone feeder for JDBC */ public class JDBCFeeder { private final static ESLogger logger = ESLoggerFactory.getLogger("JDBCFeeder"); /** * Register metadata factory in Elasticsearch for being able to decode * ClusterStateResponse with RiverStatesMetadata */ static { MetaData.registerFactory(RiverStatesMetaData.TYPE, RiverStatesMetaData.FACTORY); } protected Reader reader; protected Writer writer; protected PrintStream printStream; protected IngestFactory ingestFactory; /** * This ingest is the client for the river flow state operations */ private Ingest ingest; private RiverFlow riverFlow; private List<Map<String, Object>> definitions; private ThreadPoolExecutor threadPoolExecutor; private volatile Thread feederThread; private volatile boolean closed; /** * Constructor for running this from command line */ public JDBCFeeder() { Runtime.getRuntime().addShutdownHook(shutdownHook()); } public void exec() throws Exception { readFrom(new InputStreamReader(System.in, "UTF-8")) .writeTo(new OutputStreamWriter(System.out, "UTF-8")) .errorsTo(System.err) .start(); } @SuppressWarnings("unchecked") public JDBCFeeder readFrom(Reader reader) { this.reader = reader; try { Map<String, Object> map = XContentFactory.xContent(XContentType.JSON).createParser(reader).mapOrderedAndClose(); Settings settings = settingsBuilder() .put(new JsonSettingsLoader().load(jsonBuilder().map(map).string())) .build(); this.definitions = newLinkedList(); Object pipeline = map.get("jdbc"); if (pipeline instanceof Map) { definitions.add((Map<String, Object>) pipeline); } if (pipeline instanceof List) { definitions.addAll((List<Map<String, Object>>) pipeline); } // before running, create the river flow createRiverFlow(map, settings); } catch (IOException e) { logger.error(e.getMessage(), e); } return this; } protected RiverFlow createRiverFlow(Map<String, Object> spec, Settings settings) throws IOException { String strategy = XContentMapValues.nodeStringValue(spec.get("strategy"), "simple"); this.riverFlow = RiverServiceLoader.newRiverFlow(strategy); logger.debug("strategy {}: river flow class {}, spec = {} settings = {}", strategy, riverFlow.getClass().getName(), spec, settings.getAsMap()); this.ingestFactory = createIngestFactory(settings); // out private ingest, needed for having a client in the river flow this.ingest = ingestFactory.create(); riverFlow.setRiverName(new RiverName("jdbc", "feeder")) .setSettings(settings) .setClient(ingest.client()) .setIngestFactory(ingestFactory) .setMetric(new MeterMetric(Executors.newScheduledThreadPool(1), TimeUnit.SECONDS)) .setQueue(new ConcurrentLinkedDeque<Map<String, Object>>()); return riverFlow; } public JDBCFeeder writeTo(Writer writer) { this.writer = writer; return this; } public JDBCFeeder errorsTo(PrintStream printStream) { this.printStream = printStream; return this; } public JDBCFeeder start() throws Exception { this.closed = false; if (ingest.getConnectedNodes().isEmpty()) { throw new IOException("no nodes connected, can't continue"); } this.feederThread = new Thread(new RiverRunnable(riverFlow, definitions)); List<Future<?>> futures = schedule(feederThread); // wait for all threads to finish for (Future<?> future : futures) { future.get(); } ingest.shutdown(); return this; } private List<Future<?>> schedule(Thread thread) { Settings settings = riverFlow.getSettings(); String[] schedule = settings.getAsArray("schedule"); List<Future<?>> futures = newLinkedList(); Long seconds = settings.getAsTime("interval", TimeValue.timeValueSeconds(0)).seconds(); if (schedule != null && schedule.length > 0) { CronThreadPoolExecutor cronThreadPoolExecutor = new CronThreadPoolExecutor(settings.getAsInt("threadpoolsize", 1)); for (String cron : schedule) { futures.add(cronThreadPoolExecutor.schedule(thread, new CronExpression(cron))); } this.threadPoolExecutor = cronThreadPoolExecutor; logger.debug("scheduled feeder instance with cron expressions {}", Arrays.asList(schedule)); } else if (seconds > 0L) { ScheduledThreadPoolExecutor scheduledThreadPoolExecutor = new ScheduledThreadPoolExecutor(settings.getAsInt("threadpoolsize", 4)); futures.add(scheduledThreadPoolExecutor.scheduleAtFixedRate(thread, 0L, seconds, TimeUnit.SECONDS)); logger.debug("scheduled feeder instance at fixed rate of {} seconds", seconds); this.threadPoolExecutor = scheduledThreadPoolExecutor; } else { this.threadPoolExecutor = new ThreadPoolExecutor(1, 1, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>()); futures.add(threadPoolExecutor.submit(thread)); logger.debug("started feeder instance"); } return futures; } /** * Shut down feeder instance by Ctrl-C * * @return shutdown thread */ public Thread shutdownHook() { return new Thread() { public void run() { try { shutdown(); } catch (Exception e) { e.printStackTrace(printStream); } } }; } public synchronized void shutdown() throws Exception { if (closed) { return; } closed = true; if (threadPoolExecutor != null) { threadPoolExecutor.shutdownNow(); threadPoolExecutor = null; } if (feederThread != null) { feederThread.interrupt(); } if (!ingest.isShutdown()) { ingest.shutdown(); } reader.close(); writer.close(); printStream.close(); } private IngestFactory createIngestFactory(final Settings settings) { return new IngestFactory() { @Override public Ingest create() { Integer maxbulkactions = settings.getAsInt("max_bulk_actions", 10000); Integer maxconcurrentbulkrequests = settings.getAsInt("max_concurrent_bulk_requests", Runtime.getRuntime().availableProcessors() * 2); ByteSizeValue maxvolume = settings.getAsBytesSize("max_bulk_volume", ByteSizeValue.parseBytesSizeValue("10m")); TimeValue maxrequestwait = settings.getAsTime("max_request_wait", TimeValue.timeValueSeconds(60)); TimeValue flushinterval = settings.getAsTime("flush_interval", TimeValue.timeValueSeconds(5)); File home = new File(settings.get("home", ".")); BulkTransportClient ingest = new BulkTransportClient(); Settings clientSettings = ImmutableSettings.settingsBuilder() .put("cluster.name", settings.get("elasticsearch.cluster", "elasticsearch")) .put("host", settings.get("elasticsearch.host", "localhost")) .put("port", settings.getAsInt("elasticsearch.port", 9300)) .put("sniff", settings.getAsBoolean("elasticsearch.sniff", false)) .put("name", "feeder") // prevents lookup of names.txt, we don't have it, and marks this node as "feeder". See also module load skipping in JDBCRiverPlugin .put("client.transport.ignore_cluster_name", true) // ignore cluster name setting .put("client.transport.ping_timeout", settings.getAsTime("elasticsearch.timeout", TimeValue.timeValueSeconds(10))) // ping timeout .put("client.transport.nodes_sampler_interval", settings.getAsTime("elasticsearch.timeout", TimeValue.timeValueSeconds(5))) // for sniff sampling .put("path.plugins", ".dontexist") // pointing to a non-exiting folder means, this disables loading site plugins // adding our custom class loader is tricky, actions may not be registered to ActionService .classLoader(getClassLoader(getClass().getClassLoader(), home)) .build(); ingest.maxActionsPerBulkRequest(maxbulkactions) .maxConcurrentBulkRequests(maxconcurrentbulkrequests) .maxVolumePerBulkRequest(maxvolume) .maxRequestWait(maxrequestwait) .flushIngestInterval(flushinterval) .newClient(clientSettings); return ingest; } }; } /** * We have to add Elasticsearch to our classpath, but exclude all jvm plugins * for starting our TransportClient. * * @param home ES_HOME * @return a custom class loader with our dependencies */ private ClassLoader getClassLoader(ClassLoader parent, File home) { URIClassLoader classLoader = new URIClassLoader(parent); File[] libs = new File(home + "/lib").listFiles(); if (libs != null) { for (File file : libs) { if (file.getName().toLowerCase().endsWith(".jar")) { classLoader.addURI(file.toURI()); } } } return classLoader; } }
apache-2.0
jitsi/libjitsi
src/main/java/org/jitsi/service/neomedia/stats/MediaStreamStats2.java
2461
/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jitsi.service.neomedia.stats; import org.jitsi.service.neomedia.*; import java.util.*; /** * An extended interface for accessing the statistics of a {@link MediaStream}. * * The reason to extend the {@link MediaStreamStats} interface rather than * adding methods into it is to allow the implementation to reside in a separate * class. This is desirable in order to: * 1. Help to keep the old interface for backward compatibility. * 2. Provide a "clean" place where future code can be added, thus avoiding * further cluttering of the already overly complicated * {@link org.jitsi.impl.neomedia.MediaStreamStatsImpl}. * * @author Boris Grozev */ public interface MediaStreamStats2 extends MediaStreamStats { /** * @return the instance which keeps aggregate statistics for the associated * {@link MediaStream} in the receive direction. */ ReceiveTrackStats getReceiveStats(); /** * @return the instance which keeps aggregate statistics for the associated * {@link MediaStream} in the send direction. */ SendTrackStats getSendStats(); /** * @return the instance which keeps statistics for a particular SSRC in the * receive direction. */ ReceiveTrackStats getReceiveStats(long ssrc); /** * @return the instance which keeps statistics for a particular SSRC in the * send direction. */ SendTrackStats getSendStats(long ssrc); /** * @return all per-SSRC statistics for the send direction. */ Collection<? extends SendTrackStats> getAllSendStats(); /** * @return all per-SSRC statistics for the receive direction. */ Collection<? extends ReceiveTrackStats> getAllReceiveStats(); /** * Clears send ssrc stats. * @param ssrc the ssrc to clear. */ void clearSendSsrc(long ssrc); }
apache-2.0
Saligia-eva/mobvista_druid
api/src/test/java/io/druid/data/input/impl/DelimitedParseSpecTest.java
3790
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.data.input.impl; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; import io.druid.TestObjectMapper; import org.junit.Assert; import org.junit.Test; import javax.validation.constraints.Null; import java.io.IOException; import java.util.Arrays; public class DelimitedParseSpecTest { private final ObjectMapper jsonMapper = new TestObjectMapper(); @Test public void testSerde() throws IOException { DelimitedParseSpec spec = new DelimitedParseSpec( new TimestampSpec("abc", "iso", null,null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Arrays.asList("abc")), null, null), "\u0001", "\u0002", Arrays.asList("abc") ); final DelimitedParseSpec serde = jsonMapper.readValue( jsonMapper.writeValueAsString(spec), DelimitedParseSpec.class ); Assert.assertEquals("abc", serde.getTimestampSpec().getTimestampColumn()); Assert.assertEquals("iso", serde.getTimestampSpec().getTimestampFormat()); Assert.assertEquals(Arrays.asList("abc"), serde.getColumns()); Assert.assertEquals("\u0001", serde.getDelimiter()); Assert.assertEquals("\u0002", serde.getListDelimiter()); Assert.assertEquals(Arrays.asList("abc"), serde.getDimensionsSpec().getDimensionNames()); } @Test(expected = IllegalArgumentException.class) public void testColumnMissing() throws Exception { final ParseSpec spec = new DelimitedParseSpec( new TimestampSpec( "timestamp", "auto", null, null ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("a", "b")), Lists.<String>newArrayList(), Lists.<SpatialDimensionSchema>newArrayList() ), ",", " ", Arrays.asList("a") ); } @Test(expected = IllegalArgumentException.class) public void testComma() throws Exception { final ParseSpec spec = new DelimitedParseSpec( new TimestampSpec( "timestamp", "auto", null, null ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("a,", "b")), Lists.<String>newArrayList(), Lists.<SpatialDimensionSchema>newArrayList() ), ",", null, Arrays.asList("a") ); } @Test(expected = NullPointerException.class) public void testDefaultColumnList(){ final DelimitedParseSpec spec = new DelimitedParseSpec( new TimestampSpec( "timestamp", "auto", null, null ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("a", "b")), Lists.<String>newArrayList(), Lists.<SpatialDimensionSchema>newArrayList() ), ",", null, // pass null columns not allowed null ); } }
apache-2.0
trasa/aws-sdk-java
aws-java-sdk-elasticloadbalancing/src/main/java/com/amazonaws/services/elasticloadbalancing/model/RegisterInstancesWithLoadBalancerResult.java
5518
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.elasticloadbalancing.model; import java.io.Serializable; /** * */ public class RegisterInstancesWithLoadBalancerResult implements Serializable, Cloneable { /** * The updated list of instances for the load balancer. */ private com.amazonaws.internal.ListWithAutoConstructFlag<Instance> instances; /** * The updated list of instances for the load balancer. * * @return The updated list of instances for the load balancer. */ public java.util.List<Instance> getInstances() { if (instances == null) { instances = new com.amazonaws.internal.ListWithAutoConstructFlag<Instance>(); instances.setAutoConstruct(true); } return instances; } /** * The updated list of instances for the load balancer. * * @param instances The updated list of instances for the load balancer. */ public void setInstances(java.util.Collection<Instance> instances) { if (instances == null) { this.instances = null; return; } com.amazonaws.internal.ListWithAutoConstructFlag<Instance> instancesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Instance>(instances.size()); instancesCopy.addAll(instances); this.instances = instancesCopy; } /** * The updated list of instances for the load balancer. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setInstances(java.util.Collection)} or {@link * #withInstances(java.util.Collection)} if you want to override the * existing values. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param instances The updated list of instances for the load balancer. * * @return A reference to this updated object so that method calls can be chained * together. */ public RegisterInstancesWithLoadBalancerResult withInstances(Instance... instances) { if (getInstances() == null) setInstances(new java.util.ArrayList<Instance>(instances.length)); for (Instance value : instances) { getInstances().add(value); } return this; } /** * The updated list of instances for the load balancer. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param instances The updated list of instances for the load balancer. * * @return A reference to this updated object so that method calls can be chained * together. */ public RegisterInstancesWithLoadBalancerResult withInstances(java.util.Collection<Instance> instances) { if (instances == null) { this.instances = null; } else { com.amazonaws.internal.ListWithAutoConstructFlag<Instance> instancesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Instance>(instances.size()); instancesCopy.addAll(instances); this.instances = instancesCopy; } return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getInstances() != null) sb.append("Instances: " + getInstances() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getInstances() == null) ? 0 : getInstances().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof RegisterInstancesWithLoadBalancerResult == false) return false; RegisterInstancesWithLoadBalancerResult other = (RegisterInstancesWithLoadBalancerResult)obj; if (other.getInstances() == null ^ this.getInstances() == null) return false; if (other.getInstances() != null && other.getInstances().equals(this.getInstances()) == false) return false; return true; } @Override public RegisterInstancesWithLoadBalancerResult clone() { try { return (RegisterInstancesWithLoadBalancerResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
apache-2.0
GodisGod/godtemper
src/com/example/godtemper/db/GodTemperDB.java
4037
package com.example.godtemper.db; import java.util.ArrayList; import java.util.List; import com.example.godtemper.model.City; import com.example.godtemper.model.County; import com.example.godtemper.model.Province; import android.R.integer; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; public class GodTemperDB { /** * Êý¾Ý¿âÃû */ public static final String DB_NAME = "GodTemper"; /** * Êý¾Ý¿â°æ±¾ */ public static final int VERSION = 1; private static GodTemperDB godTemperDB; private SQLiteDatabase db; private GodTemperDB(Context context){ GodTemperOpenHelper dbHelper = new GodTemperOpenHelper(context, DB_NAME, null, VERSION); db = dbHelper.getWritableDatabase(); } /** * »ñÈ¡godTemperDBµÄʵÀý * @param context * @return */ public synchronized static GodTemperDB getInstance(Context context){ if(godTemperDB == null){ godTemperDB = new GodTemperDB(context); } return godTemperDB; } /** * ½«ProvinceʵÀý´æ´¢µ½Êý¾Ý¿â * @param province */ public void saveProvince(Province province){ if(province != null){ ContentValues values = new ContentValues(); values.put("province_name", province.getProvinceName()); values.put("province_code", province.getProvinceCode()); db.insert("Province", null, values); } } /** * ´ÓÊý¾Ý¿â¶Áȡȫ¹úËùÓÐÊ¡·ÝµÄÐÅÏ¢ * @return */ public List<Province>loadProvinces(){ List<Province>list = new ArrayList<Province>(); Cursor cursor = db.query("Province", null, null, null, null, null, null); if(cursor.moveToFirst()){ do{ Province province = new Province(); province.setId(cursor.getInt(cursor.getColumnIndex("id"))); province.setProvinceName(cursor.getString(cursor.getColumnIndex("province_name"))); province.setProvinceCode(cursor.getString(cursor.getColumnIndex("province_code"))); list.add(province); }while(cursor.moveToNext()); } return list; } /** * ½«CityʵÀý´æ´¢µ½Êý¾Ý¿â * @param city */ public void saveCity(City city) { if(city!=null){ ContentValues values = new ContentValues(); values.put("city_name", city.getCityName()); values.put("city_code", city.getCityCode()); values.put("province_id", city.getProvinceId()); db.insert("City", null, values); } } /** * ´ÓÊý¾Ý¿â¶ÁȡijʡÏÂËùÓеijÇÊÐÐÅÏ¢ * @param provinceId * @return */ public List<City> loadCities(int provinceId) { List<City>list = new ArrayList<City>(); Cursor cursor = db.query("City", null, "province_id = ?", new String[]{String.valueOf(provinceId)}, null,null,null); if(cursor.moveToFirst()){ do{ City city = new City(); city.setId(cursor.getInt(cursor.getColumnIndex("id"))); city.setCityName(cursor.getString(cursor.getColumnIndex("city_name"))); city.setCityCode(cursor.getString(cursor.getColumnIndex("city_code"))); city.setProvinceId(provinceId); list.add(city); }while(cursor.moveToNext()); } return list; } /** * ½«CountyʵÀý´æ´¢µ½Êý¾Ý¿â */ public void saveCounty(County county){ if(county != null){ ContentValues values = new ContentValues(); values.put("county_name", county.getCountyName()); values.put("county_code", county.getCountyCode()); values.put("city_id", county.getCityId()); db.insert("County", null, values); } } /** * ´ÓÊý¾Ý¿â¶Áȡij³ÇÊÐÏÂËùÓÐÏØµÄÐÅÏ¢ */ public List<County>loadCounties (int cityId){ List<County>list = new ArrayList<County>(); Cursor cursor = db.query("County", null, "city_id = ?", new String[]{String.valueOf(cityId)}, null, null, null); if(cursor.moveToFirst()){ do{ County county = new County(); county.setId(cursor.getInt(cursor.getColumnIndex("id"))); county.setCountyName(cursor.getString(cursor.getColumnIndex("county_name"))); county.setCountyCode(cursor.getString(cursor.getColumnIndex("county_code"))); county.setCityId(cityId); list.add(county); }while(cursor.moveToNext()); } return list; } }
apache-2.0
Axway/Grapes
server/src/test/java/org/axway/grapes/server/webapp/resources/WebSearchResourceTest.java
5448
package org.axway.grapes.server.webapp.resources; import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.GenericType; import com.sun.jersey.api.client.WebResource; import com.yammer.dropwizard.auth.basic.BasicAuthProvider; import com.yammer.dropwizard.testing.ResourceTest; import org.axway.grapes.commons.api.ServerAPI; import org.axway.grapes.server.GrapesTestUtils; import org.axway.grapes.server.config.GrapesServerConfig; import org.axway.grapes.server.core.options.FiltersHolder; import org.axway.grapes.server.db.RepositoryHandler; import org.axway.grapes.server.db.datamodel.DbCredential; import org.axway.grapes.server.db.datamodel.DbSearch; import org.axway.grapes.server.webapp.auth.GrapesAuthenticator; import org.eclipse.jetty.http.HttpStatus; import org.junit.Test; import javax.ws.rs.core.MediaType; import java.util.ArrayList; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.mockito.Matchers.anyObject; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class WebSearchResourceTest extends ResourceTest { private RepositoryHandler repositoryHandler; @Override protected void setUpResources() throws Exception { repositoryHandler = GrapesTestUtils.getRepoHandlerMock(); final GrapesServerConfig config = mock(GrapesServerConfig.class); final WebSearchResource resource = new WebSearchResource(repositoryHandler, config); addProvider(new BasicAuthProvider<DbCredential>(new GrapesAuthenticator(repositoryHandler), "test auth")); addResource(resource); } @Test public void getSearchResult() throws Exception { List<String> moduleIds = new ArrayList<>(); moduleIds.add("testSearch_id_1"); moduleIds.add("testSearch_id_2"); List<String> artifactIds = new ArrayList<>(); artifactIds.add("testSearch_artifact_id_1"); artifactIds.add("testSearch_artifact_id_2"); DbSearch search = new DbSearch(); search.setModules(moduleIds); search.setArtifacts(artifactIds); when(repositoryHandler.getSearchResult(eq("testSearch"), (FiltersHolder) anyObject())).thenReturn(search); final WebResource resource = client().resource("/" + ServerAPI.SEARCH_RESOURCE + "/testSearch"); final ClientResponse response = resource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertNotNull(response); assertEquals(HttpStatus.OK_200, response.getStatus()); final String results = response.getEntity(new GenericType<String>() { }); assertEquals("{\"modules\":[\"testSearch_id_1\",\"testSearch_id_2\"],\"artifacts\":[\"testSearch_artifact_id_1\",\"testSearch_artifact_id_2\"]}", results); } @Test public void getNullSearchResult() { DbSearch search = new DbSearch(); when(repositoryHandler.getSearchResult(eq("testSearch"), (FiltersHolder) anyObject())).thenReturn(search); final WebResource resource = client().resource("/" + ServerAPI.SEARCH_RESOURCE + "/testSearch"); final ClientResponse response = resource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertNotNull(response); assertEquals(HttpStatus.OK_200, response.getStatus()); final String results = response.getEntity(new GenericType<String>() { }); assertEquals("{\"modules\":null,\"artifacts\":null}", results); } @Test public void getModulesSearchResult() { DbSearch search = new DbSearch(); List<String> moduleIds = new ArrayList<>(); moduleIds.add("testSearch_id_1"); moduleIds.add("testSearch_id_2"); search.setModules(moduleIds); when(repositoryHandler.getSearchResult(eq("testSearch"), (FiltersHolder) anyObject())).thenReturn(search); final WebResource resource = client().resource("/" + ServerAPI.SEARCH_RESOURCE + "/testSearch"); final ClientResponse response = resource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertNotNull(response); assertEquals(HttpStatus.OK_200, response.getStatus()); final String results = response.getEntity(new GenericType<String>() { }); assertEquals("{\"modules\":[\"testSearch_id_1\",\"testSearch_id_2\"],\"artifacts\":null}", results); } @Test public void getArtifactsSearchResult() { DbSearch search = new DbSearch(); List<String> artifactIds = new ArrayList<>(); artifactIds.add("testSearch_artifact_id_1"); artifactIds.add("testSearch_artifact_id_2"); search.setArtifacts(artifactIds); when(repositoryHandler.getSearchResult(eq("testSearch"), (FiltersHolder) anyObject())).thenReturn(search); final WebResource resource = client().resource("/" + ServerAPI.SEARCH_RESOURCE + "/testSearch"); final ClientResponse response = resource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertNotNull(response); assertEquals(HttpStatus.OK_200, response.getStatus()); final String results = response.getEntity(new GenericType<String>() { }); assertEquals("{\"modules\":null,\"artifacts\":[\"testSearch_artifact_id_1\",\"testSearch_artifact_id_2\"]}", results); } }
apache-2.0
uschindler/elasticsearch
client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java
199205
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.client.documentation; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.MachineLearningGetResultsIT; import org.elasticsearch.client.MachineLearningIT; import org.elasticsearch.client.MlTestStateCleaner; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.core.PageParams; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.ml.CloseJobRequest; import org.elasticsearch.client.ml.CloseJobResponse; import org.elasticsearch.client.ml.DeleteCalendarEventRequest; import org.elasticsearch.client.ml.DeleteCalendarJobRequest; import org.elasticsearch.client.ml.DeleteCalendarRequest; import org.elasticsearch.client.ml.DeleteDataFrameAnalyticsRequest; import org.elasticsearch.client.ml.DeleteDatafeedRequest; import org.elasticsearch.client.ml.DeleteExpiredDataRequest; import org.elasticsearch.client.ml.DeleteExpiredDataResponse; import org.elasticsearch.client.ml.DeleteFilterRequest; import org.elasticsearch.client.ml.DeleteForecastRequest; import org.elasticsearch.client.ml.DeleteJobRequest; import org.elasticsearch.client.ml.DeleteJobResponse; import org.elasticsearch.client.ml.DeleteModelSnapshotRequest; import org.elasticsearch.client.ml.DeleteTrainedModelRequest; import org.elasticsearch.client.ml.EstimateModelMemoryRequest; import org.elasticsearch.client.ml.EstimateModelMemoryResponse; import org.elasticsearch.client.ml.EvaluateDataFrameRequest; import org.elasticsearch.client.ml.EvaluateDataFrameResponse; import org.elasticsearch.client.ml.ExplainDataFrameAnalyticsRequest; import org.elasticsearch.client.ml.ExplainDataFrameAnalyticsResponse; import org.elasticsearch.client.ml.FindFileStructureRequest; import org.elasticsearch.client.ml.FindFileStructureResponse; import org.elasticsearch.client.ml.FlushJobRequest; import org.elasticsearch.client.ml.FlushJobResponse; import org.elasticsearch.client.ml.ForecastJobRequest; import org.elasticsearch.client.ml.ForecastJobResponse; import org.elasticsearch.client.ml.GetBucketsRequest; import org.elasticsearch.client.ml.GetBucketsResponse; import org.elasticsearch.client.ml.GetCalendarEventsRequest; import org.elasticsearch.client.ml.GetCalendarEventsResponse; import org.elasticsearch.client.ml.GetCalendarsRequest; import org.elasticsearch.client.ml.GetCalendarsResponse; import org.elasticsearch.client.ml.GetCategoriesRequest; import org.elasticsearch.client.ml.GetCategoriesResponse; import org.elasticsearch.client.ml.GetDataFrameAnalyticsRequest; import org.elasticsearch.client.ml.GetDataFrameAnalyticsResponse; import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsRequest; import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsResponse; import org.elasticsearch.client.ml.GetDatafeedRequest; import org.elasticsearch.client.ml.GetDatafeedResponse; import org.elasticsearch.client.ml.GetDatafeedStatsRequest; import org.elasticsearch.client.ml.GetDatafeedStatsResponse; import org.elasticsearch.client.ml.GetFiltersRequest; import org.elasticsearch.client.ml.GetFiltersResponse; import org.elasticsearch.client.ml.GetInfluencersRequest; import org.elasticsearch.client.ml.GetInfluencersResponse; import org.elasticsearch.client.ml.GetJobRequest; import org.elasticsearch.client.ml.GetJobResponse; import org.elasticsearch.client.ml.GetJobStatsRequest; import org.elasticsearch.client.ml.GetJobStatsResponse; import org.elasticsearch.client.ml.GetModelSnapshotsRequest; import org.elasticsearch.client.ml.GetModelSnapshotsResponse; import org.elasticsearch.client.ml.GetOverallBucketsRequest; import org.elasticsearch.client.ml.GetOverallBucketsResponse; import org.elasticsearch.client.ml.GetRecordsRequest; import org.elasticsearch.client.ml.GetRecordsResponse; import org.elasticsearch.client.ml.GetTrainedModelsRequest; import org.elasticsearch.client.ml.GetTrainedModelsResponse; import org.elasticsearch.client.ml.GetTrainedModelsStatsRequest; import org.elasticsearch.client.ml.GetTrainedModelsStatsResponse; import org.elasticsearch.client.ml.MlInfoRequest; import org.elasticsearch.client.ml.MlInfoResponse; import org.elasticsearch.client.ml.OpenJobRequest; import org.elasticsearch.client.ml.OpenJobResponse; import org.elasticsearch.client.ml.PostCalendarEventRequest; import org.elasticsearch.client.ml.PostCalendarEventResponse; import org.elasticsearch.client.ml.PostDataRequest; import org.elasticsearch.client.ml.PostDataResponse; import org.elasticsearch.client.ml.PreviewDatafeedRequest; import org.elasticsearch.client.ml.PreviewDatafeedResponse; import org.elasticsearch.client.ml.PutCalendarJobRequest; import org.elasticsearch.client.ml.PutCalendarRequest; import org.elasticsearch.client.ml.PutCalendarResponse; import org.elasticsearch.client.ml.PutDataFrameAnalyticsRequest; import org.elasticsearch.client.ml.PutDataFrameAnalyticsResponse; import org.elasticsearch.client.ml.PutDatafeedRequest; import org.elasticsearch.client.ml.PutDatafeedResponse; import org.elasticsearch.client.ml.PutFilterRequest; import org.elasticsearch.client.ml.PutFilterResponse; import org.elasticsearch.client.ml.PutJobRequest; import org.elasticsearch.client.ml.PutJobResponse; import org.elasticsearch.client.ml.PutTrainedModelRequest; import org.elasticsearch.client.ml.PutTrainedModelResponse; import org.elasticsearch.client.ml.RevertModelSnapshotRequest; import org.elasticsearch.client.ml.RevertModelSnapshotResponse; import org.elasticsearch.client.ml.SetUpgradeModeRequest; import org.elasticsearch.client.ml.StartDataFrameAnalyticsRequest; import org.elasticsearch.client.ml.StartDataFrameAnalyticsResponse; import org.elasticsearch.client.ml.StartDatafeedRequest; import org.elasticsearch.client.ml.StartDatafeedResponse; import org.elasticsearch.client.ml.StopDataFrameAnalyticsRequest; import org.elasticsearch.client.ml.StopDataFrameAnalyticsResponse; import org.elasticsearch.client.ml.StopDatafeedRequest; import org.elasticsearch.client.ml.StopDatafeedResponse; import org.elasticsearch.client.ml.UpdateDatafeedRequest; import org.elasticsearch.client.ml.UpdateFilterRequest; import org.elasticsearch.client.ml.UpdateJobRequest; import org.elasticsearch.client.ml.UpdateModelSnapshotRequest; import org.elasticsearch.client.ml.UpdateModelSnapshotResponse; import org.elasticsearch.client.ml.calendars.Calendar; import org.elasticsearch.client.ml.calendars.ScheduledEvent; import org.elasticsearch.client.ml.calendars.ScheduledEventTests; import org.elasticsearch.client.ml.datafeed.ChunkingConfig; import org.elasticsearch.client.ml.datafeed.DatafeedConfig; import org.elasticsearch.client.ml.datafeed.DatafeedStats; import org.elasticsearch.client.ml.datafeed.DatafeedUpdate; import org.elasticsearch.client.ml.datafeed.DelayedDataCheckConfig; import org.elasticsearch.client.ml.dataframe.Classification; import org.elasticsearch.client.ml.dataframe.DataFrameAnalysis; import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig; import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsDest; import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsSource; import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsState; import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsStats; import org.elasticsearch.client.ml.dataframe.OutlierDetection; import org.elasticsearch.client.ml.dataframe.QueryConfig; import org.elasticsearch.client.ml.dataframe.Regression; import org.elasticsearch.client.ml.dataframe.evaluation.Evaluation; import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; import org.elasticsearch.client.ml.dataframe.evaluation.classification.AccuracyMetric; import org.elasticsearch.client.ml.dataframe.evaluation.classification.MulticlassConfusionMatrixMetric; import org.elasticsearch.client.ml.dataframe.evaluation.classification.MulticlassConfusionMatrixMetric.ActualClass; import org.elasticsearch.client.ml.dataframe.evaluation.classification.MulticlassConfusionMatrixMetric.PredictedClass; import org.elasticsearch.client.ml.dataframe.evaluation.regression.MeanSquaredErrorMetric; import org.elasticsearch.client.ml.dataframe.evaluation.regression.RSquaredMetric; import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.AucRocMetric; import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.BinarySoftClassification; import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric; import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric.ConfusionMatrix; import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.PrecisionMetric; import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.RecallMetric; import org.elasticsearch.client.ml.dataframe.explain.FieldSelection; import org.elasticsearch.client.ml.dataframe.explain.MemoryEstimation; import org.elasticsearch.client.ml.filestructurefinder.FileStructure; import org.elasticsearch.client.ml.inference.InferenceToXContentCompressor; import org.elasticsearch.client.ml.inference.MlInferenceNamedXContentProvider; import org.elasticsearch.client.ml.inference.TrainedModelConfig; import org.elasticsearch.client.ml.inference.TrainedModelDefinition; import org.elasticsearch.client.ml.inference.TrainedModelDefinitionTests; import org.elasticsearch.client.ml.inference.TrainedModelInput; import org.elasticsearch.client.ml.inference.TrainedModelStats; import org.elasticsearch.client.ml.inference.trainedmodel.RegressionConfig; import org.elasticsearch.client.ml.inference.trainedmodel.TargetType; import org.elasticsearch.client.ml.job.config.AnalysisConfig; import org.elasticsearch.client.ml.job.config.AnalysisLimits; import org.elasticsearch.client.ml.job.config.DataDescription; import org.elasticsearch.client.ml.job.config.DetectionRule; import org.elasticsearch.client.ml.job.config.Detector; import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.client.ml.job.config.JobUpdate; import org.elasticsearch.client.ml.job.config.MlFilter; import org.elasticsearch.client.ml.job.config.ModelPlotConfig; import org.elasticsearch.client.ml.job.config.Operator; import org.elasticsearch.client.ml.job.config.RuleCondition; import org.elasticsearch.client.ml.job.process.DataCounts; import org.elasticsearch.client.ml.job.process.ModelSnapshot; import org.elasticsearch.client.ml.job.results.AnomalyRecord; import org.elasticsearch.client.ml.job.results.Bucket; import org.elasticsearch.client.ml.job.results.CategoryDefinition; import org.elasticsearch.client.ml.job.results.Influencer; import org.elasticsearch.client.ml.job.results.OverallBucket; import org.elasticsearch.client.ml.job.stats.JobStats; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.tasks.TaskId; import org.junit.After; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.core.Is.is; public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { @After public void cleanUp() throws IOException { new MlTestStateCleaner(logger, highLevelClient().machineLearning()).clearMlMetadata(); } public void testCreateJob() throws Exception { RestHighLevelClient client = highLevelClient(); // tag::put-job-detector Detector.Builder detectorBuilder = new Detector.Builder() .setFunction("sum") // <1> .setFieldName("total") // <2> .setDetectorDescription("Sum of total"); // <3> // end::put-job-detector // tag::put-job-analysis-config List<Detector> detectors = Collections.singletonList(detectorBuilder.build()); // <1> AnalysisConfig.Builder analysisConfigBuilder = new AnalysisConfig.Builder(detectors) // <2> .setBucketSpan(TimeValue.timeValueMinutes(10)); // <3> // end::put-job-analysis-config // tag::put-job-data-description DataDescription.Builder dataDescriptionBuilder = new DataDescription.Builder() .setTimeField("timestamp"); // <1> // end::put-job-data-description { String id = "job_1"; // tag::put-job-config Job.Builder jobBuilder = new Job.Builder(id) // <1> .setAnalysisConfig(analysisConfigBuilder) // <2> .setDataDescription(dataDescriptionBuilder) // <3> .setDescription("Total sum of requests"); // <4> // end::put-job-config // tag::put-job-request PutJobRequest request = new PutJobRequest(jobBuilder.build()); // <1> // end::put-job-request // tag::put-job-execute PutJobResponse response = client.machineLearning().putJob(request, RequestOptions.DEFAULT); // end::put-job-execute // tag::put-job-response Date createTime = response.getResponse().getCreateTime(); // <1> // end::put-job-response assertThat(createTime.getTime(), greaterThan(0L)); } { String id = "job_2"; Job.Builder jobBuilder = new Job.Builder(id) .setAnalysisConfig(analysisConfigBuilder) .setDataDescription(dataDescriptionBuilder) .setDescription("Total sum of requests"); PutJobRequest request = new PutJobRequest(jobBuilder.build()); // tag::put-job-execute-listener ActionListener<PutJobResponse> listener = new ActionListener<PutJobResponse>() { @Override public void onResponse(PutJobResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::put-job-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::put-job-execute-async client.machineLearning().putJobAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::put-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetJob() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("get-machine-learning-job1"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); Job secondJob = MachineLearningIT.buildJob("get-machine-learning-job2"); client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); { // tag::get-job-request GetJobRequest request = new GetJobRequest("get-machine-learning-job1", "get-machine-learning-job*"); // <1> request.setAllowNoJobs(true); // <2> // end::get-job-request // tag::get-job-execute GetJobResponse response = client.machineLearning().getJob(request, RequestOptions.DEFAULT); // end::get-job-execute // tag::get-job-response long numberOfJobs = response.count(); // <1> List<Job> jobs = response.jobs(); // <2> // end::get-job-response assertEquals(2, response.count()); assertThat(response.jobs(), hasSize(2)); assertThat(response.jobs().stream().map(Job::getId).collect(Collectors.toList()), containsInAnyOrder(job.getId(), secondJob.getId())); } { GetJobRequest request = new GetJobRequest("get-machine-learning-job1", "get-machine-learning-job*"); // tag::get-job-execute-listener ActionListener<GetJobResponse> listener = new ActionListener<GetJobResponse>() { @Override public void onResponse(GetJobResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-job-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-job-execute-async client.machineLearning().getJobAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteJob() throws Exception { RestHighLevelClient client = highLevelClient(); String jobId = "my-first-machine-learning-job"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); Job secondJob = MachineLearningIT.buildJob("my-second-machine-learning-job"); client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); { //tag::delete-job-request DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-first-machine-learning-job"); // <1> //end::delete-job-request //tag::delete-job-request-force deleteJobRequest.setForce(false); // <1> //end::delete-job-request-force //tag::delete-job-request-wait-for-completion deleteJobRequest.setWaitForCompletion(true); // <1> //end::delete-job-request-wait-for-completion //tag::delete-job-execute DeleteJobResponse deleteJobResponse = client.machineLearning().deleteJob(deleteJobRequest, RequestOptions.DEFAULT); //end::delete-job-execute //tag::delete-job-response Boolean isAcknowledged = deleteJobResponse.getAcknowledged(); // <1> TaskId task = deleteJobResponse.getTask(); // <2> //end::delete-job-response assertTrue(isAcknowledged); assertNull(task); } { //tag::delete-job-execute-listener ActionListener<DeleteJobResponse> listener = new ActionListener<DeleteJobResponse>() { @Override public void onResponse(DeleteJobResponse deleteJobResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-job-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-second-machine-learning-job"); // tag::delete-job-execute-async client.machineLearning().deleteJobAsync(deleteJobRequest, RequestOptions.DEFAULT, listener); // <1> // end::delete-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testOpenJob() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("opening-my-first-machine-learning-job"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); Job secondJob = MachineLearningIT.buildJob("opening-my-second-machine-learning-job"); client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); { // tag::open-job-request OpenJobRequest openJobRequest = new OpenJobRequest("opening-my-first-machine-learning-job"); // <1> openJobRequest.setTimeout(TimeValue.timeValueMinutes(10)); // <2> // end::open-job-request // tag::open-job-execute OpenJobResponse openJobResponse = client.machineLearning().openJob(openJobRequest, RequestOptions.DEFAULT); // end::open-job-execute // tag::open-job-response boolean isOpened = openJobResponse.isOpened(); // <1> String node = openJobResponse.getNode(); // <2> // end::open-job-response assertThat(node, notNullValue()); } { // tag::open-job-execute-listener ActionListener<OpenJobResponse> listener = new ActionListener<OpenJobResponse>() { @Override public void onResponse(OpenJobResponse openJobResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::open-job-execute-listener OpenJobRequest openJobRequest = new OpenJobRequest("opening-my-second-machine-learning-job"); // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::open-job-execute-async client.machineLearning().openJobAsync(openJobRequest, RequestOptions.DEFAULT, listener); // <1> // end::open-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testCloseJob() throws Exception { RestHighLevelClient client = highLevelClient(); { Job job = MachineLearningIT.buildJob("closing-my-first-machine-learning-job"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); // tag::close-job-request CloseJobRequest closeJobRequest = new CloseJobRequest("closing-my-first-machine-learning-job", "otherjobs*"); // <1> closeJobRequest.setForce(false); // <2> closeJobRequest.setAllowNoJobs(true); // <3> closeJobRequest.setTimeout(TimeValue.timeValueMinutes(10)); // <4> // end::close-job-request // tag::close-job-execute CloseJobResponse closeJobResponse = client.machineLearning().closeJob(closeJobRequest, RequestOptions.DEFAULT); // end::close-job-execute // tag::close-job-response boolean isClosed = closeJobResponse.isClosed(); // <1> // end::close-job-response } { Job job = MachineLearningIT.buildJob("closing-my-second-machine-learning-job"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); // tag::close-job-execute-listener ActionListener<CloseJobResponse> listener = new ActionListener<CloseJobResponse>() { @Override public void onResponse(CloseJobResponse closeJobResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::close-job-execute-listener CloseJobRequest closeJobRequest = new CloseJobRequest("closing-my-second-machine-learning-job"); // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::close-job-execute-async client.machineLearning().closeJobAsync(closeJobRequest, RequestOptions.DEFAULT, listener); // <1> // end::close-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testUpdateJob() throws Exception { RestHighLevelClient client = highLevelClient(); String jobId = "test-update-job"; Job tempJob = MachineLearningIT.buildJob(jobId); Job job = new Job.Builder(tempJob) .setAnalysisConfig(new AnalysisConfig.Builder(tempJob.getAnalysisConfig()) .setCategorizationFieldName("categorization-field") .setDetector(0, new Detector.Builder().setFieldName("total") .setFunction("sum") .setPartitionFieldName("mlcategory") .setDetectorDescription(randomAlphaOfLength(10)) .build())) .build(); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); { List<DetectionRule> detectionRules = Arrays.asList( new DetectionRule.Builder(Arrays.asList(RuleCondition.createTime(Operator.GT, 100L))).build()); Map<String, Object> customSettings = new HashMap<>(); customSettings.put("custom-setting-1", "custom-value"); // tag::update-job-detector-options JobUpdate.DetectorUpdate detectorUpdate = new JobUpdate.DetectorUpdate(0, // <1> "detector description", // <2> detectionRules); // <3> // end::update-job-detector-options // tag::update-job-options JobUpdate update = new JobUpdate.Builder(jobId) // <1> .setDescription("My description") // <2> .setAnalysisLimits(new AnalysisLimits(1000L, null)) // <3> .setBackgroundPersistInterval(TimeValue.timeValueHours(3)) // <4> .setCategorizationFilters(Arrays.asList("categorization-filter")) // <5> .setDetectorUpdates(Arrays.asList(detectorUpdate)) // <6> .setGroups(Arrays.asList("job-group-1")) // <7> .setResultsRetentionDays(10L) // <8> .setModelPlotConfig(new ModelPlotConfig(true, null, true)) // <9> .setModelSnapshotRetentionDays(7L) // <10> .setCustomSettings(customSettings) // <11> .setRenormalizationWindowDays(3L) // <12> .build(); // end::update-job-options // tag::update-job-request UpdateJobRequest updateJobRequest = new UpdateJobRequest(update); // <1> // end::update-job-request // tag::update-job-execute PutJobResponse updateJobResponse = client.machineLearning().updateJob(updateJobRequest, RequestOptions.DEFAULT); // end::update-job-execute // tag::update-job-response Job updatedJob = updateJobResponse.getResponse(); // <1> // end::update-job-response assertEquals(update.getDescription(), updatedJob.getDescription()); } { // tag::update-job-execute-listener ActionListener<PutJobResponse> listener = new ActionListener<PutJobResponse>() { @Override public void onResponse(PutJobResponse updateJobResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::update-job-execute-listener UpdateJobRequest updateJobRequest = new UpdateJobRequest(new JobUpdate.Builder(jobId).build()); // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::update-job-execute-async client.machineLearning().updateJobAsync(updateJobRequest, RequestOptions.DEFAULT, listener); // <1> // end::update-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testPutDatafeed() throws Exception { RestHighLevelClient client = highLevelClient(); { // We need to create a job for the datafeed request to be valid String jobId = "put-datafeed-job-1"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); String id = "datafeed-1"; // tag::put-datafeed-config DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder(id, jobId) // <1> .setIndices("index_1", "index_2"); // <2> // end::put-datafeed-config AggregatorFactories.Builder aggs = AggregatorFactories.builder(); // tag::put-datafeed-config-set-aggregations datafeedBuilder.setAggregations(aggs); // <1> // end::put-datafeed-config-set-aggregations // Clearing aggregation to avoid complex validation rules datafeedBuilder.setAggregations((String) null); // tag::put-datafeed-config-set-chunking-config datafeedBuilder.setChunkingConfig(ChunkingConfig.newAuto()); // <1> // end::put-datafeed-config-set-chunking-config // tag::put-datafeed-config-set-frequency datafeedBuilder.setFrequency(TimeValue.timeValueSeconds(30)); // <1> // end::put-datafeed-config-set-frequency // tag::put-datafeed-config-set-query datafeedBuilder.setQuery(QueryBuilders.matchAllQuery()); // <1> // end::put-datafeed-config-set-query // tag::put-datafeed-config-set-query-delay datafeedBuilder.setQueryDelay(TimeValue.timeValueMinutes(1)); // <1> // end::put-datafeed-config-set-query-delay // tag::put-datafeed-config-set-delayed-data-check-config datafeedBuilder.setDelayedDataCheckConfig(DelayedDataCheckConfig .enabledDelayedDataCheckConfig(TimeValue.timeValueHours(1))); // <1> // end::put-datafeed-config-set-delayed-data-check-config // no need to accidentally trip internal validations due to job bucket size datafeedBuilder.setDelayedDataCheckConfig(null); List<SearchSourceBuilder.ScriptField> scriptFields = Collections.emptyList(); // tag::put-datafeed-config-set-script-fields datafeedBuilder.setScriptFields(scriptFields); // <1> // end::put-datafeed-config-set-script-fields // tag::put-datafeed-config-set-scroll-size datafeedBuilder.setScrollSize(1000); // <1> // end::put-datafeed-config-set-scroll-size // tag::put-datafeed-request PutDatafeedRequest request = new PutDatafeedRequest(datafeedBuilder.build()); // <1> // end::put-datafeed-request // tag::put-datafeed-execute PutDatafeedResponse response = client.machineLearning().putDatafeed(request, RequestOptions.DEFAULT); // end::put-datafeed-execute // tag::put-datafeed-response DatafeedConfig datafeed = response.getResponse(); // <1> // end::put-datafeed-response assertThat(datafeed.getId(), equalTo("datafeed-1")); } { // We need to create a job for the datafeed request to be valid String jobId = "put-datafeed-job-2"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); String id = "datafeed-2"; DatafeedConfig datafeed = new DatafeedConfig.Builder(id, jobId).setIndices("index_1", "index_2").build(); PutDatafeedRequest request = new PutDatafeedRequest(datafeed); // tag::put-datafeed-execute-listener ActionListener<PutDatafeedResponse> listener = new ActionListener<PutDatafeedResponse>() { @Override public void onResponse(PutDatafeedResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::put-datafeed-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::put-datafeed-execute-async client.machineLearning().putDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::put-datafeed-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testUpdateDatafeed() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("update-datafeed-job"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); String datafeedId = job.getId() + "-feed"; DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId()).setIndices("foo").build(); client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); { AggregatorFactories.Builder aggs = AggregatorFactories.builder(); List<SearchSourceBuilder.ScriptField> scriptFields = Collections.emptyList(); // tag::update-datafeed-config DatafeedUpdate.Builder datafeedUpdateBuilder = new DatafeedUpdate.Builder(datafeedId) // <1> .setAggregations(aggs) // <2> .setIndices("index_1", "index_2") // <3> .setChunkingConfig(ChunkingConfig.newAuto()) // <4> .setFrequency(TimeValue.timeValueSeconds(30)) // <5> .setQuery(QueryBuilders.matchAllQuery()) // <6> .setQueryDelay(TimeValue.timeValueMinutes(1)) // <7> .setScriptFields(scriptFields) // <8> .setScrollSize(1000); // <9> // end::update-datafeed-config // Clearing aggregation to avoid complex validation rules datafeedUpdateBuilder.setAggregations((String) null); // tag::update-datafeed-request UpdateDatafeedRequest request = new UpdateDatafeedRequest(datafeedUpdateBuilder.build()); // <1> // end::update-datafeed-request // tag::update-datafeed-execute PutDatafeedResponse response = client.machineLearning().updateDatafeed(request, RequestOptions.DEFAULT); // end::update-datafeed-execute // tag::update-datafeed-response DatafeedConfig updatedDatafeed = response.getResponse(); // <1> // end::update-datafeed-response assertThat(updatedDatafeed.getId(), equalTo(datafeedId)); } { DatafeedUpdate datafeedUpdate = new DatafeedUpdate.Builder(datafeedId).setIndices("index_1", "index_2").build(); UpdateDatafeedRequest request = new UpdateDatafeedRequest(datafeedUpdate); // tag::update-datafeed-execute-listener ActionListener<PutDatafeedResponse> listener = new ActionListener<PutDatafeedResponse>() { @Override public void onResponse(PutDatafeedResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::update-datafeed-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::update-datafeed-execute-async client.machineLearning().updateDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::update-datafeed-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetDatafeed() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("get-datafeed-job"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); String datafeedId = job.getId() + "-feed"; DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId()).setIndices("foo").build(); client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); { // tag::get-datafeed-request GetDatafeedRequest request = new GetDatafeedRequest(datafeedId); // <1> request.setAllowNoDatafeeds(true); // <2> // end::get-datafeed-request // tag::get-datafeed-execute GetDatafeedResponse response = client.machineLearning().getDatafeed(request, RequestOptions.DEFAULT); // end::get-datafeed-execute // tag::get-datafeed-response long numberOfDatafeeds = response.count(); // <1> List<DatafeedConfig> datafeeds = response.datafeeds(); // <2> // end::get-datafeed-response assertEquals(1, numberOfDatafeeds); assertEquals(1, datafeeds.size()); } { GetDatafeedRequest request = new GetDatafeedRequest(datafeedId); // tag::get-datafeed-execute-listener ActionListener<GetDatafeedResponse> listener = new ActionListener<GetDatafeedResponse>() { @Override public void onResponse(GetDatafeedResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-datafeed-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-datafeed-execute-async client.machineLearning().getDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-datafeed-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteDatafeed() throws Exception { RestHighLevelClient client = highLevelClient(); String jobId = "test-delete-datafeed-job"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); String datafeedId = "test-delete-datafeed"; DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, jobId).setIndices("foo").build(); client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); { // tag::delete-datafeed-request DeleteDatafeedRequest deleteDatafeedRequest = new DeleteDatafeedRequest(datafeedId); deleteDatafeedRequest.setForce(false); // <1> // end::delete-datafeed-request // tag::delete-datafeed-execute AcknowledgedResponse deleteDatafeedResponse = client.machineLearning().deleteDatafeed( deleteDatafeedRequest, RequestOptions.DEFAULT); // end::delete-datafeed-execute // tag::delete-datafeed-response boolean isAcknowledged = deleteDatafeedResponse.isAcknowledged(); // <1> // end::delete-datafeed-response } // Recreate datafeed to allow second deletion client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); { // tag::delete-datafeed-execute-listener ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() { @Override public void onResponse(AcknowledgedResponse acknowledgedResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-datafeed-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); DeleteDatafeedRequest deleteDatafeedRequest = new DeleteDatafeedRequest(datafeedId); // tag::delete-datafeed-execute-async client.machineLearning().deleteDatafeedAsync(deleteDatafeedRequest, RequestOptions.DEFAULT, listener); // <1> // end::delete-datafeed-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testPreviewDatafeed() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("preview-datafeed-job"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); String datafeedId = job.getId() + "-feed"; String indexName = "preview_data_2"; createIndex(indexName); DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId()) .setIndices(indexName) .build(); client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); { // tag::preview-datafeed-request PreviewDatafeedRequest request = new PreviewDatafeedRequest(datafeedId); // <1> // end::preview-datafeed-request // tag::preview-datafeed-execute PreviewDatafeedResponse response = client.machineLearning().previewDatafeed(request, RequestOptions.DEFAULT); // end::preview-datafeed-execute // tag::preview-datafeed-response BytesReference rawPreview = response.getPreview(); // <1> List<Map<String, Object>> semiParsedPreview = response.getDataList(); // <2> // end::preview-datafeed-response assertTrue(semiParsedPreview.isEmpty()); } { PreviewDatafeedRequest request = new PreviewDatafeedRequest(datafeedId); // tag::preview-datafeed-execute-listener ActionListener<PreviewDatafeedResponse> listener = new ActionListener<PreviewDatafeedResponse>() { @Override public void onResponse(PreviewDatafeedResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::preview-datafeed-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::preview-datafeed-execute-async client.machineLearning().previewDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::preview-datafeed-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testStartDatafeed() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("start-datafeed-job"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); String datafeedId = job.getId() + "-feed"; String indexName = "start_data_2"; createIndex(indexName); DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId()) .setIndices(indexName) .build(); client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); { // tag::start-datafeed-request StartDatafeedRequest request = new StartDatafeedRequest(datafeedId); // <1> // end::start-datafeed-request // tag::start-datafeed-request-options request.setEnd("2018-08-21T00:00:00Z"); // <1> request.setStart("2018-08-20T00:00:00Z"); // <2> request.setTimeout(TimeValue.timeValueMinutes(10)); // <3> // end::start-datafeed-request-options // tag::start-datafeed-execute StartDatafeedResponse response = client.machineLearning().startDatafeed(request, RequestOptions.DEFAULT); // end::start-datafeed-execute // tag::start-datafeed-response boolean started = response.isStarted(); // <1> String node = response.getNode(); // <2> // end::start-datafeed-response assertTrue(started); assertThat(node, notNullValue()); } { StartDatafeedRequest request = new StartDatafeedRequest(datafeedId); // tag::start-datafeed-execute-listener ActionListener<StartDatafeedResponse> listener = new ActionListener<StartDatafeedResponse>() { @Override public void onResponse(StartDatafeedResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::start-datafeed-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::start-datafeed-execute-async client.machineLearning().startDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::start-datafeed-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testStopDatafeed() throws Exception { RestHighLevelClient client = highLevelClient(); { // tag::stop-datafeed-request StopDatafeedRequest request = new StopDatafeedRequest("datafeed_id1", "datafeed_id*"); // <1> // end::stop-datafeed-request request = StopDatafeedRequest.stopAllDatafeedsRequest(); // tag::stop-datafeed-request-options request.setAllowNoDatafeeds(true); // <1> request.setForce(true); // <2> request.setTimeout(TimeValue.timeValueMinutes(10)); // <3> // end::stop-datafeed-request-options // tag::stop-datafeed-execute StopDatafeedResponse response = client.machineLearning().stopDatafeed(request, RequestOptions.DEFAULT); // end::stop-datafeed-execute // tag::stop-datafeed-response boolean stopped = response.isStopped(); // <1> // end::stop-datafeed-response assertTrue(stopped); } { StopDatafeedRequest request = StopDatafeedRequest.stopAllDatafeedsRequest(); // tag::stop-datafeed-execute-listener ActionListener<StopDatafeedResponse> listener = new ActionListener<StopDatafeedResponse>() { @Override public void onResponse(StopDatafeedResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::stop-datafeed-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::stop-datafeed-execute-async client.machineLearning().stopDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::stop-datafeed-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetDatafeedStats() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("get-machine-learning-datafeed-stats1"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); Job secondJob = MachineLearningIT.buildJob("get-machine-learning-datafeed-stats2"); client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); String datafeedId1 = job.getId() + "-feed"; String indexName = "datafeed_stats_data_2"; createIndex(indexName); DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId1, job.getId()) .setIndices(indexName) .build(); client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT); String datafeedId2 = secondJob.getId() + "-feed"; DatafeedConfig secondDatafeed = DatafeedConfig.builder(datafeedId2, secondJob.getId()) .setIndices(indexName) .build(); client.machineLearning().putDatafeed(new PutDatafeedRequest(secondDatafeed), RequestOptions.DEFAULT); { //tag::get-datafeed-stats-request GetDatafeedStatsRequest request = new GetDatafeedStatsRequest("get-machine-learning-datafeed-stats1-feed", "get-machine-learning-datafeed*"); // <1> request.setAllowNoDatafeeds(true); // <2> //end::get-datafeed-stats-request //tag::get-datafeed-stats-execute GetDatafeedStatsResponse response = client.machineLearning().getDatafeedStats(request, RequestOptions.DEFAULT); //end::get-datafeed-stats-execute //tag::get-datafeed-stats-response long numberOfDatafeedStats = response.count(); // <1> List<DatafeedStats> datafeedStats = response.datafeedStats(); // <2> //end::get-datafeed-stats-response assertEquals(2, response.count()); assertThat(response.datafeedStats(), hasSize(2)); assertThat(response.datafeedStats().stream().map(DatafeedStats::getDatafeedId).collect(Collectors.toList()), containsInAnyOrder(datafeed.getId(), secondDatafeed.getId())); } { GetDatafeedStatsRequest request = new GetDatafeedStatsRequest("*"); // tag::get-datafeed-stats-execute-listener ActionListener<GetDatafeedStatsResponse> listener = new ActionListener<GetDatafeedStatsResponse>() { @Override public void onResponse(GetDatafeedStatsResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-datafeed-stats-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-datafeed-stats-execute-async client.machineLearning().getDatafeedStatsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-datafeed-stats-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetBuckets() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String jobId = "test-get-buckets"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); // Let us index a bucket IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared"); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); indexRequest.source("{\"job_id\":\"test-get-buckets\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000," + "\"bucket_span\": 600,\"is_interim\": false, \"anomaly_score\": 80.0}", XContentType.JSON); client.index(indexRequest, RequestOptions.DEFAULT); { // tag::get-buckets-request GetBucketsRequest request = new GetBucketsRequest(jobId); // <1> // end::get-buckets-request // tag::get-buckets-timestamp request.setTimestamp("2018-08-17T00:00:00Z"); // <1> // end::get-buckets-timestamp // Set timestamp to null as it is incompatible with other args request.setTimestamp(null); // tag::get-buckets-anomaly-score request.setAnomalyScore(75.0); // <1> // end::get-buckets-anomaly-score // tag::get-buckets-desc request.setDescending(true); // <1> // end::get-buckets-desc // tag::get-buckets-end request.setEnd("2018-08-21T00:00:00Z"); // <1> // end::get-buckets-end // tag::get-buckets-exclude-interim request.setExcludeInterim(true); // <1> // end::get-buckets-exclude-interim // tag::get-buckets-expand request.setExpand(true); // <1> // end::get-buckets-expand // tag::get-buckets-page request.setPageParams(new PageParams(100, 200)); // <1> // end::get-buckets-page // Set page params back to null so the response contains the bucket we indexed request.setPageParams(null); // tag::get-buckets-sort request.setSort("anomaly_score"); // <1> // end::get-buckets-sort // tag::get-buckets-start request.setStart("2018-08-01T00:00:00Z"); // <1> // end::get-buckets-start // tag::get-buckets-execute GetBucketsResponse response = client.machineLearning().getBuckets(request, RequestOptions.DEFAULT); // end::get-buckets-execute // tag::get-buckets-response long count = response.count(); // <1> List<Bucket> buckets = response.buckets(); // <2> // end::get-buckets-response assertEquals(1, buckets.size()); } { GetBucketsRequest request = new GetBucketsRequest(jobId); // tag::get-buckets-execute-listener ActionListener<GetBucketsResponse> listener = new ActionListener<GetBucketsResponse>() { @Override public void onResponse(GetBucketsResponse getBucketsResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-buckets-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-buckets-execute-async client.machineLearning().getBucketsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-buckets-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testFlushJob() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("flushing-my-first-machine-learning-job"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); Job secondJob = MachineLearningIT.buildJob("flushing-my-second-machine-learning-job"); client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); client.machineLearning().openJob(new OpenJobRequest(secondJob.getId()), RequestOptions.DEFAULT); { // tag::flush-job-request FlushJobRequest flushJobRequest = new FlushJobRequest("flushing-my-first-machine-learning-job"); // <1> // end::flush-job-request // tag::flush-job-request-options flushJobRequest.setCalcInterim(true); // <1> flushJobRequest.setAdvanceTime("2018-08-31T16:35:07+00:00"); // <2> flushJobRequest.setStart("2018-08-31T16:35:17+00:00"); // <3> flushJobRequest.setEnd("2018-08-31T16:35:27+00:00"); // <4> flushJobRequest.setSkipTime("2018-08-31T16:35:00+00:00"); // <5> // end::flush-job-request-options // tag::flush-job-execute FlushJobResponse flushJobResponse = client.machineLearning().flushJob(flushJobRequest, RequestOptions.DEFAULT); // end::flush-job-execute // tag::flush-job-response boolean isFlushed = flushJobResponse.isFlushed(); // <1> Date lastFinalizedBucketEnd = flushJobResponse.getLastFinalizedBucketEnd(); // <2> // end::flush-job-response } { // tag::flush-job-execute-listener ActionListener<FlushJobResponse> listener = new ActionListener<FlushJobResponse>() { @Override public void onResponse(FlushJobResponse FlushJobResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::flush-job-execute-listener FlushJobRequest flushJobRequest = new FlushJobRequest("flushing-my-second-machine-learning-job"); // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::flush-job-execute-async client.machineLearning().flushJobAsync(flushJobRequest, RequestOptions.DEFAULT, listener); // <1> // end::flush-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteForecast() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("deleting-forecast-for-job"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); PostDataRequest.JsonBuilder builder = new PostDataRequest.JsonBuilder(); for(int i = 0; i < 30; i++) { Map<String, Object> hashMap = new HashMap<>(); hashMap.put("total", randomInt(1000)); hashMap.put("timestamp", (i+1)*1000); builder.addDoc(hashMap); } PostDataRequest postDataRequest = new PostDataRequest(job.getId(), builder); client.machineLearning().postData(postDataRequest, RequestOptions.DEFAULT); client.machineLearning().flushJob(new FlushJobRequest(job.getId()), RequestOptions.DEFAULT); ForecastJobResponse forecastJobResponse = client.machineLearning(). forecastJob(new ForecastJobRequest(job.getId()), RequestOptions.DEFAULT); String forecastId = forecastJobResponse.getForecastId(); GetRequest request = new GetRequest(".ml-anomalies-" + job.getId()); request.id(job.getId() + "_model_forecast_request_stats_" + forecastId); assertBusy(() -> { GetResponse getResponse = highLevelClient().get(request, RequestOptions.DEFAULT); assertTrue(getResponse.isExists()); assertTrue(getResponse.getSourceAsString().contains("finished")); }, 30, TimeUnit.SECONDS); { // tag::delete-forecast-request DeleteForecastRequest deleteForecastRequest = new DeleteForecastRequest("deleting-forecast-for-job"); // <1> // end::delete-forecast-request // tag::delete-forecast-request-options deleteForecastRequest.setForecastIds(forecastId); // <1> deleteForecastRequest.timeout("30s"); // <2> deleteForecastRequest.setAllowNoForecasts(true); // <3> // end::delete-forecast-request-options // tag::delete-forecast-execute AcknowledgedResponse deleteForecastResponse = client.machineLearning().deleteForecast(deleteForecastRequest, RequestOptions.DEFAULT); // end::delete-forecast-execute // tag::delete-forecast-response boolean isAcknowledged = deleteForecastResponse.isAcknowledged(); // <1> // end::delete-forecast-response } { // tag::delete-forecast-execute-listener ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() { @Override public void onResponse(AcknowledgedResponse DeleteForecastResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-forecast-execute-listener DeleteForecastRequest deleteForecastRequest = DeleteForecastRequest.deleteAllForecasts(job.getId()); deleteForecastRequest.setAllowNoForecasts(true); // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::delete-forecast-execute-async client.machineLearning().deleteForecastAsync(deleteForecastRequest, RequestOptions.DEFAULT, listener); // <1> // end::delete-forecast-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetJobStats() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("get-machine-learning-job-stats1"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); Job secondJob = MachineLearningIT.buildJob("get-machine-learning-job-stats2"); client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT); { // tag::get-job-stats-request GetJobStatsRequest request = new GetJobStatsRequest("get-machine-learning-job-stats1", "get-machine-learning-job-*"); // <1> request.setAllowNoJobs(true); // <2> // end::get-job-stats-request // tag::get-job-stats-execute GetJobStatsResponse response = client.machineLearning().getJobStats(request, RequestOptions.DEFAULT); // end::get-job-stats-execute // tag::get-job-stats-response long numberOfJobStats = response.count(); // <1> List<JobStats> jobStats = response.jobStats(); // <2> // end::get-job-stats-response assertEquals(2, response.count()); assertThat(response.jobStats(), hasSize(2)); assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()), containsInAnyOrder(job.getId(), secondJob.getId())); } { GetJobStatsRequest request = new GetJobStatsRequest("get-machine-learning-job-stats1", "get-machine-learning-job-*"); // tag::get-job-stats-execute-listener ActionListener<GetJobStatsResponse> listener = new ActionListener<GetJobStatsResponse>() { @Override public void onResponse(GetJobStatsResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-job-stats-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-job-stats-execute-async client.machineLearning().getJobStatsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-job-stats-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testForecastJob() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("forecasting-my-first-machine-learning-job"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); PostDataRequest.JsonBuilder builder = new PostDataRequest.JsonBuilder(); for(int i = 0; i < 30; i++) { Map<String, Object> hashMap = new HashMap<>(); hashMap.put("total", randomInt(1000)); hashMap.put("timestamp", (i+1)*1000); builder.addDoc(hashMap); } PostDataRequest postDataRequest = new PostDataRequest(job.getId(), builder); client.machineLearning().postData(postDataRequest, RequestOptions.DEFAULT); client.machineLearning().flushJob(new FlushJobRequest(job.getId()), RequestOptions.DEFAULT); { // tag::forecast-job-request ForecastJobRequest forecastJobRequest = new ForecastJobRequest("forecasting-my-first-machine-learning-job"); // <1> // end::forecast-job-request // tag::forecast-job-request-options forecastJobRequest.setExpiresIn(TimeValue.timeValueHours(48)); // <1> forecastJobRequest.setDuration(TimeValue.timeValueHours(24)); // <2> forecastJobRequest.setMaxModelMemory(new ByteSizeValue(30, ByteSizeUnit.MB)); // <3> // end::forecast-job-request-options // tag::forecast-job-execute ForecastJobResponse forecastJobResponse = client.machineLearning().forecastJob(forecastJobRequest, RequestOptions.DEFAULT); // end::forecast-job-execute // tag::forecast-job-response boolean isAcknowledged = forecastJobResponse.isAcknowledged(); // <1> String forecastId = forecastJobResponse.getForecastId(); // <2> // end::forecast-job-response assertTrue(isAcknowledged); assertNotNull(forecastId); } { // tag::forecast-job-execute-listener ActionListener<ForecastJobResponse> listener = new ActionListener<ForecastJobResponse>() { @Override public void onResponse(ForecastJobResponse forecastJobResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::forecast-job-execute-listener ForecastJobRequest forecastJobRequest = new ForecastJobRequest("forecasting-my-first-machine-learning-job"); // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::forecast-job-execute-async client.machineLearning().forecastJobAsync(forecastJobRequest, RequestOptions.DEFAULT, listener); // <1> // end::forecast-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetOverallBuckets() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String jobId1 = "test-get-overall-buckets-1"; String jobId2 = "test-get-overall-buckets-2"; Job job1 = MachineLearningGetResultsIT.buildJob(jobId1); Job job2 = MachineLearningGetResultsIT.buildJob(jobId2); client.machineLearning().putJob(new PutJobRequest(job1), RequestOptions.DEFAULT); client.machineLearning().putJob(new PutJobRequest(job2), RequestOptions.DEFAULT); // Let us index some buckets BulkRequest bulkRequest = new BulkRequest(); bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); { IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared"); indexRequest.source("{\"job_id\":\"test-get-overall-buckets-1\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000," + "\"bucket_span\": 600,\"is_interim\": false, \"anomaly_score\": 60.0}", XContentType.JSON); bulkRequest.add(indexRequest); } { IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared"); indexRequest.source("{\"job_id\":\"test-get-overall-buckets-2\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000," + "\"bucket_span\": 3600,\"is_interim\": false, \"anomaly_score\": 100.0}", XContentType.JSON); bulkRequest.add(indexRequest); } client.bulk(bulkRequest, RequestOptions.DEFAULT); { // tag::get-overall-buckets-request GetOverallBucketsRequest request = new GetOverallBucketsRequest(jobId1, jobId2); // <1> // end::get-overall-buckets-request // tag::get-overall-buckets-bucket-span request.setBucketSpan(TimeValue.timeValueHours(24)); // <1> // end::get-overall-buckets-bucket-span // tag::get-overall-buckets-end request.setEnd("2018-08-21T00:00:00Z"); // <1> // end::get-overall-buckets-end // tag::get-overall-buckets-exclude-interim request.setExcludeInterim(true); // <1> // end::get-overall-buckets-exclude-interim // tag::get-overall-buckets-overall-score request.setOverallScore(75.0); // <1> // end::get-overall-buckets-overall-score // tag::get-overall-buckets-start request.setStart("2018-08-01T00:00:00Z"); // <1> // end::get-overall-buckets-start // tag::get-overall-buckets-top-n request.setTopN(2); // <1> // end::get-overall-buckets-top-n // tag::get-overall-buckets-execute GetOverallBucketsResponse response = client.machineLearning().getOverallBuckets(request, RequestOptions.DEFAULT); // end::get-overall-buckets-execute // tag::get-overall-buckets-response long count = response.count(); // <1> List<OverallBucket> overallBuckets = response.overallBuckets(); // <2> // end::get-overall-buckets-response assertEquals(1, overallBuckets.size()); assertThat(overallBuckets.get(0).getOverallScore(), is(closeTo(80.0, 0.001))); } { GetOverallBucketsRequest request = new GetOverallBucketsRequest(jobId1, jobId2); // tag::get-overall-buckets-execute-listener ActionListener<GetOverallBucketsResponse> listener = new ActionListener<GetOverallBucketsResponse>() { @Override public void onResponse(GetOverallBucketsResponse getOverallBucketsResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-overall-buckets-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-overall-buckets-execute-async client.machineLearning().getOverallBucketsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-overall-buckets-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetRecords() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String jobId = "test-get-records"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); // Let us index a record IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared"); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); indexRequest.source("{\"job_id\":\"test-get-records\", \"result_type\":\"record\", \"timestamp\": 1533081600000," + "\"bucket_span\": 600,\"is_interim\": false, \"record_score\": 80.0}", XContentType.JSON); client.index(indexRequest, RequestOptions.DEFAULT); { // tag::get-records-request GetRecordsRequest request = new GetRecordsRequest(jobId); // <1> // end::get-records-request // tag::get-records-desc request.setDescending(true); // <1> // end::get-records-desc // tag::get-records-end request.setEnd("2018-08-21T00:00:00Z"); // <1> // end::get-records-end // tag::get-records-exclude-interim request.setExcludeInterim(true); // <1> // end::get-records-exclude-interim // tag::get-records-page request.setPageParams(new PageParams(100, 200)); // <1> // end::get-records-page // Set page params back to null so the response contains the record we indexed request.setPageParams(null); // tag::get-records-record-score request.setRecordScore(75.0); // <1> // end::get-records-record-score // tag::get-records-sort request.setSort("probability"); // <1> // end::get-records-sort // tag::get-records-start request.setStart("2018-08-01T00:00:00Z"); // <1> // end::get-records-start // tag::get-records-execute GetRecordsResponse response = client.machineLearning().getRecords(request, RequestOptions.DEFAULT); // end::get-records-execute // tag::get-records-response long count = response.count(); // <1> List<AnomalyRecord> records = response.records(); // <2> // end::get-records-response assertEquals(1, records.size()); } { GetRecordsRequest request = new GetRecordsRequest(jobId); // tag::get-records-execute-listener ActionListener<GetRecordsResponse> listener = new ActionListener<GetRecordsResponse>() { @Override public void onResponse(GetRecordsResponse getRecordsResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-records-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-records-execute-async client.machineLearning().getRecordsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-records-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testPostData() throws Exception { RestHighLevelClient client = highLevelClient(); Job job = MachineLearningIT.buildJob("test-post-data"); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); { // tag::post-data-request PostDataRequest.JsonBuilder jsonBuilder = new PostDataRequest.JsonBuilder(); // <1> Map<String, Object> mapData = new HashMap<>(); mapData.put("total", 109); jsonBuilder.addDoc(mapData); // <2> jsonBuilder.addDoc("{\"total\":1000}"); // <3> PostDataRequest postDataRequest = new PostDataRequest("test-post-data", jsonBuilder); // <4> // end::post-data-request // tag::post-data-request-options postDataRequest.setResetStart("2018-08-31T16:35:07+00:00"); // <1> postDataRequest.setResetEnd("2018-08-31T16:35:17+00:00"); // <2> // end::post-data-request-options postDataRequest.setResetEnd(null); postDataRequest.setResetStart(null); // tag::post-data-execute PostDataResponse postDataResponse = client.machineLearning().postData(postDataRequest, RequestOptions.DEFAULT); // end::post-data-execute // tag::post-data-response DataCounts dataCounts = postDataResponse.getDataCounts(); // <1> // end::post-data-response assertEquals(2, dataCounts.getInputRecordCount()); } { // tag::post-data-execute-listener ActionListener<PostDataResponse> listener = new ActionListener<PostDataResponse>() { @Override public void onResponse(PostDataResponse postDataResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::post-data-execute-listener PostDataRequest.JsonBuilder jsonBuilder = new PostDataRequest.JsonBuilder(); Map<String, Object> mapData = new HashMap<>(); mapData.put("total", 109); jsonBuilder.addDoc(mapData); PostDataRequest postDataRequest = new PostDataRequest("test-post-data", jsonBuilder); // <1> // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::post-data-execute-async client.machineLearning().postDataAsync(postDataRequest, RequestOptions.DEFAULT, listener); // <1> // end::post-data-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testFindFileStructure() throws Exception { RestHighLevelClient client = highLevelClient(); Path anInterestingFile = createTempFile(); String contents = "{\"logger\":\"controller\",\"timestamp\":1478261151445,\"level\":\"INFO\"," + "\"pid\":42,\"thread\":\"0x7fff7d2a8000\",\"message\":\"message 1\",\"class\":\"ml\"," + "\"method\":\"core::SomeNoiseMaker\",\"file\":\"Noisemaker.cc\",\"line\":333}\n" + "{\"logger\":\"controller\",\"timestamp\":1478261151445," + "\"level\":\"INFO\",\"pid\":42,\"thread\":\"0x7fff7d2a8000\",\"message\":\"message 2\",\"class\":\"ml\"," + "\"method\":\"core::SomeNoiseMaker\",\"file\":\"Noisemaker.cc\",\"line\":333}\n"; Files.write(anInterestingFile, Collections.singleton(contents), StandardCharsets.UTF_8); { // tag::find-file-structure-request FindFileStructureRequest findFileStructureRequest = new FindFileStructureRequest(); // <1> findFileStructureRequest.setSample(Files.readAllBytes(anInterestingFile)); // <2> // end::find-file-structure-request // tag::find-file-structure-request-options findFileStructureRequest.setLinesToSample(500); // <1> findFileStructureRequest.setExplain(true); // <2> // end::find-file-structure-request-options // tag::find-file-structure-execute FindFileStructureResponse findFileStructureResponse = client.machineLearning().findFileStructure(findFileStructureRequest, RequestOptions.DEFAULT); // end::find-file-structure-execute // tag::find-file-structure-response FileStructure structure = findFileStructureResponse.getFileStructure(); // <1> // end::find-file-structure-response assertEquals(2, structure.getNumLinesAnalyzed()); } { // tag::find-file-structure-execute-listener ActionListener<FindFileStructureResponse> listener = new ActionListener<FindFileStructureResponse>() { @Override public void onResponse(FindFileStructureResponse findFileStructureResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::find-file-structure-execute-listener FindFileStructureRequest findFileStructureRequest = new FindFileStructureRequest(); findFileStructureRequest.setSample(Files.readAllBytes(anInterestingFile)); // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::find-file-structure-execute-async client.machineLearning().findFileStructureAsync(findFileStructureRequest, RequestOptions.DEFAULT, listener); // <1> // end::find-file-structure-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetInfluencers() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String jobId = "test-get-influencers"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); // Let us index a record IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared"); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); indexRequest.source("{\"job_id\":\"test-get-influencers\", \"result_type\":\"influencer\", \"timestamp\": 1533081600000," + "\"bucket_span\": 600,\"is_interim\": false, \"influencer_score\": 80.0, \"influencer_field_name\": \"my_influencer\"," + "\"influencer_field_value\":\"foo\"}", XContentType.JSON); client.index(indexRequest, RequestOptions.DEFAULT); { // tag::get-influencers-request GetInfluencersRequest request = new GetInfluencersRequest(jobId); // <1> // end::get-influencers-request // tag::get-influencers-desc request.setDescending(true); // <1> // end::get-influencers-desc // tag::get-influencers-end request.setEnd("2018-08-21T00:00:00Z"); // <1> // end::get-influencers-end // tag::get-influencers-exclude-interim request.setExcludeInterim(true); // <1> // end::get-influencers-exclude-interim // tag::get-influencers-influencer-score request.setInfluencerScore(75.0); // <1> // end::get-influencers-influencer-score // tag::get-influencers-page request.setPageParams(new PageParams(100, 200)); // <1> // end::get-influencers-page // Set page params back to null so the response contains the influencer we indexed request.setPageParams(null); // tag::get-influencers-sort request.setSort("probability"); // <1> // end::get-influencers-sort // tag::get-influencers-start request.setStart("2018-08-01T00:00:00Z"); // <1> // end::get-influencers-start // tag::get-influencers-execute GetInfluencersResponse response = client.machineLearning().getInfluencers(request, RequestOptions.DEFAULT); // end::get-influencers-execute // tag::get-influencers-response long count = response.count(); // <1> List<Influencer> influencers = response.influencers(); // <2> // end::get-influencers-response assertEquals(1, influencers.size()); } { GetInfluencersRequest request = new GetInfluencersRequest(jobId); // tag::get-influencers-execute-listener ActionListener<GetInfluencersResponse> listener = new ActionListener<GetInfluencersResponse>() { @Override public void onResponse(GetInfluencersResponse getInfluencersResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-influencers-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-influencers-execute-async client.machineLearning().getInfluencersAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-influencers-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetCategories() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String jobId = "test-get-categories"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); // Let us index a category IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared"); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); indexRequest.source("{\"job_id\": \"test-get-categories\", \"category_id\": 1, \"terms\": \"AAL\"," + " \"regex\": \".*?AAL.*\", \"max_matching_length\": 3, \"examples\": [\"AAL\"]}", XContentType.JSON); client.index(indexRequest, RequestOptions.DEFAULT); { // tag::get-categories-request GetCategoriesRequest request = new GetCategoriesRequest(jobId); // <1> // end::get-categories-request // tag::get-categories-category-id request.setCategoryId(1L); // <1> // end::get-categories-category-id // tag::get-categories-page request.setPageParams(new PageParams(100, 200)); // <1> // end::get-categories-page // Set page params back to null so the response contains the category we indexed request.setPageParams(null); // tag::get-categories-execute GetCategoriesResponse response = client.machineLearning().getCategories(request, RequestOptions.DEFAULT); // end::get-categories-execute // tag::get-categories-response long count = response.count(); // <1> List<CategoryDefinition> categories = response.categories(); // <2> // end::get-categories-response assertEquals(1, categories.size()); } { GetCategoriesRequest request = new GetCategoriesRequest(jobId); // tag::get-categories-execute-listener ActionListener<GetCategoriesResponse> listener = new ActionListener<GetCategoriesResponse>() { @Override public void onResponse(GetCategoriesResponse getcategoriesResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-categories-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-categories-execute-async client.machineLearning().getCategoriesAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-categories-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteExpiredData() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String jobId = "test-delete-expired-data"; MachineLearningIT.buildJob(jobId); { // tag::delete-expired-data-request DeleteExpiredDataRequest request = new DeleteExpiredDataRequest( // <1> null, // <2> 1000.0f, // <3> TimeValue.timeValueHours(12) // <4> ); // end::delete-expired-data-request // tag::delete-expired-data-execute DeleteExpiredDataResponse response = client.machineLearning().deleteExpiredData(request, RequestOptions.DEFAULT); // end::delete-expired-data-execute // tag::delete-expired-data-response boolean deleted = response.getDeleted(); // <1> // end::delete-expired-data-response assertTrue(deleted); } { // tag::delete-expired-data-execute-listener ActionListener<DeleteExpiredDataResponse> listener = new ActionListener<DeleteExpiredDataResponse>() { @Override public void onResponse(DeleteExpiredDataResponse deleteExpiredDataResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-expired-data-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); DeleteExpiredDataRequest deleteExpiredDataRequest = new DeleteExpiredDataRequest(); // tag::delete-expired-data-execute-async client.machineLearning().deleteExpiredDataAsync(deleteExpiredDataRequest, RequestOptions.DEFAULT, listener); // <1> // end::delete-expired-data-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteModelSnapshot() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String jobId = "test-delete-model-snapshot"; String snapshotId = "1541587919"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); // Let us index a snapshot IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared"); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); indexRequest.source("{\"job_id\":\"" + jobId + "\", \"timestamp\":1541587919000, " + "\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " + "\"snapshot_id\":\"" + snapshotId + "\", \"snapshot_doc_count\":1, \"model_size_stats\":{" + "\"job_id\":\"" + jobId + "\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " + "\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," + "\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " + "\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," + "\"latest_result_time_stamp\":1519930800000, \"retain\":false}", XContentType.JSON); { client.index(indexRequest, RequestOptions.DEFAULT); // tag::delete-model-snapshot-request DeleteModelSnapshotRequest request = new DeleteModelSnapshotRequest(jobId, snapshotId); // <1> // end::delete-model-snapshot-request // tag::delete-model-snapshot-execute AcknowledgedResponse response = client.machineLearning().deleteModelSnapshot(request, RequestOptions.DEFAULT); // end::delete-model-snapshot-execute // tag::delete-model-snapshot-response boolean isAcknowledged = response.isAcknowledged(); // <1> // end::delete-model-snapshot-response assertTrue(isAcknowledged); } { client.index(indexRequest, RequestOptions.DEFAULT); // tag::delete-model-snapshot-execute-listener ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() { @Override public void onResponse(AcknowledgedResponse acknowledgedResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-model-snapshot-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); DeleteModelSnapshotRequest deleteModelSnapshotRequest = new DeleteModelSnapshotRequest(jobId, "1541587919"); // tag::delete-model-snapshot-execute-async client.machineLearning().deleteModelSnapshotAsync(deleteModelSnapshotRequest, RequestOptions.DEFAULT, listener); // <1> // end::delete-model-snapshot-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetModelSnapshots() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String jobId = "test-get-model-snapshots"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); // Let us index a snapshot IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared"); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); indexRequest.source("{\"job_id\":\"test-get-model-snapshots\", \"timestamp\":1541587919000, " + "\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " + "\"snapshot_id\":\"1541587919\", \"snapshot_doc_count\":1, \"model_size_stats\":{" + "\"job_id\":\"test-get-model-snapshots\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " + "\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," + "\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " + "\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," + "\"latest_result_time_stamp\":1519930800000, \"retain\":false}", XContentType.JSON); client.index(indexRequest, RequestOptions.DEFAULT); { // tag::get-model-snapshots-request GetModelSnapshotsRequest request = new GetModelSnapshotsRequest(jobId); // <1> // end::get-model-snapshots-request // tag::get-model-snapshots-snapshot-id request.setSnapshotId("1541587919"); // <1> // end::get-model-snapshots-snapshot-id // Set snapshot id to null as it is incompatible with other args request.setSnapshotId(null); // tag::get-model-snapshots-desc request.setDesc(true); // <1> // end::get-model-snapshots-desc // tag::get-model-snapshots-end request.setEnd("2018-11-07T21:00:00Z"); // <1> // end::get-model-snapshots-end // tag::get-model-snapshots-page request.setPageParams(new PageParams(100, 200)); // <1> // end::get-model-snapshots-page // Set page params back to null so the response contains the snapshot we indexed request.setPageParams(null); // tag::get-model-snapshots-sort request.setSort("latest_result_time_stamp"); // <1> // end::get-model-snapshots-sort // tag::get-model-snapshots-start request.setStart("2018-11-07T00:00:00Z"); // <1> // end::get-model-snapshots-start // tag::get-model-snapshots-execute GetModelSnapshotsResponse response = client.machineLearning().getModelSnapshots(request, RequestOptions.DEFAULT); // end::get-model-snapshots-execute // tag::get-model-snapshots-response long count = response.count(); // <1> List<ModelSnapshot> modelSnapshots = response.snapshots(); // <2> // end::get-model-snapshots-response assertEquals(1, modelSnapshots.size()); } { GetModelSnapshotsRequest request = new GetModelSnapshotsRequest(jobId); // tag::get-model-snapshots-execute-listener ActionListener<GetModelSnapshotsResponse> listener = new ActionListener<GetModelSnapshotsResponse>() { @Override public void onResponse(GetModelSnapshotsResponse getModelSnapshotsResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-model-snapshots-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-model-snapshots-execute-async client.machineLearning().getModelSnapshotsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-model-snapshots-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testRevertModelSnapshot() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String jobId = "test-revert-model-snapshot"; String snapshotId = "1541587919"; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); // Let us index a snapshot String documentId = jobId + "_model_snapshot_" + snapshotId; IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared").id(documentId); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); indexRequest.source("{\"job_id\":\"test-revert-model-snapshot\", \"timestamp\":1541587919000, " + "\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " + "\"snapshot_id\":\"1541587919\", \"snapshot_doc_count\":1, \"model_size_stats\":{" + "\"job_id\":\"test-revert-model-snapshot\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " + "\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," + "\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " + "\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," + "\"latest_result_time_stamp\":1519930800000, \"retain\":false, " + "\"quantiles\":{\"job_id\":\"test-revert-model-snapshot\", \"timestamp\":1541587919000, " + "\"quantile_state\":\"state\"}}", XContentType.JSON); client.index(indexRequest, RequestOptions.DEFAULT); { // tag::revert-model-snapshot-request RevertModelSnapshotRequest request = new RevertModelSnapshotRequest(jobId, snapshotId); // <1> // end::revert-model-snapshot-request // tag::revert-model-snapshot-delete-intervening-results request.setDeleteInterveningResults(true); // <1> // end::revert-model-snapshot-delete-intervening-results // tag::revert-model-snapshot-execute RevertModelSnapshotResponse response = client.machineLearning().revertModelSnapshot(request, RequestOptions.DEFAULT); // end::revert-model-snapshot-execute // tag::revert-model-snapshot-response ModelSnapshot modelSnapshot = response.getModel(); // <1> // end::revert-model-snapshot-response assertEquals(snapshotId, modelSnapshot.getSnapshotId()); assertEquals("State persisted due to job close at 2018-11-07T10:51:59+0000", modelSnapshot.getDescription()); assertEquals(51722, modelSnapshot.getModelSizeStats().getModelBytes()); } { RevertModelSnapshotRequest request = new RevertModelSnapshotRequest(jobId, snapshotId); // tag::revert-model-snapshot-execute-listener ActionListener<RevertModelSnapshotResponse> listener = new ActionListener<RevertModelSnapshotResponse>() { @Override public void onResponse(RevertModelSnapshotResponse revertModelSnapshotResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::revert-model-snapshot-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::revert-model-snapshot-execute-async client.machineLearning().revertModelSnapshotAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::revert-model-snapshot-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testUpdateModelSnapshot() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String jobId = "test-update-model-snapshot"; String snapshotId = "1541587919"; String documentId = jobId + "_model_snapshot_" + snapshotId; Job job = MachineLearningIT.buildJob(jobId); client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); // Let us index a snapshot IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared").id(documentId); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); indexRequest.source("{\"job_id\":\"test-update-model-snapshot\", \"timestamp\":1541587919000, " + "\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " + "\"snapshot_id\":\"1541587919\", \"snapshot_doc_count\":1, \"model_size_stats\":{" + "\"job_id\":\"test-update-model-snapshot\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " + "\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," + "\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " + "\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," + "\"latest_result_time_stamp\":1519930800000, \"retain\":false}", XContentType.JSON); client.index(indexRequest, RequestOptions.DEFAULT); { // tag::update-model-snapshot-request UpdateModelSnapshotRequest request = new UpdateModelSnapshotRequest(jobId, snapshotId); // <1> // end::update-model-snapshot-request // tag::update-model-snapshot-description request.setDescription("My Snapshot"); // <1> // end::update-model-snapshot-description // tag::update-model-snapshot-retain request.setRetain(true); // <1> // end::update-model-snapshot-retain // tag::update-model-snapshot-execute UpdateModelSnapshotResponse response = client.machineLearning().updateModelSnapshot(request, RequestOptions.DEFAULT); // end::update-model-snapshot-execute // tag::update-model-snapshot-response boolean acknowledged = response.getAcknowledged(); // <1> ModelSnapshot modelSnapshot = response.getModel(); // <2> // end::update-model-snapshot-response assertTrue(acknowledged); assertEquals("My Snapshot", modelSnapshot.getDescription()); } { UpdateModelSnapshotRequest request = new UpdateModelSnapshotRequest(jobId, snapshotId); // tag::update-model-snapshot-execute-listener ActionListener<UpdateModelSnapshotResponse> listener = new ActionListener<UpdateModelSnapshotResponse>() { @Override public void onResponse(UpdateModelSnapshotResponse updateModelSnapshotResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::update-model-snapshot-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::update-model-snapshot-execute-async client.machineLearning().updateModelSnapshotAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::update-model-snapshot-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testPutCalendar() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); // tag::put-calendar-request Calendar calendar = new Calendar("public_holidays", Collections.singletonList("job_1"), "A calendar for public holidays"); PutCalendarRequest request = new PutCalendarRequest(calendar); // <1> // end::put-calendar-request // tag::put-calendar-execute PutCalendarResponse response = client.machineLearning().putCalendar(request, RequestOptions.DEFAULT); // end::put-calendar-execute // tag::put-calendar-response Calendar newCalendar = response.getCalendar(); // <1> // end::put-calendar-response assertThat(newCalendar.getId(), equalTo("public_holidays")); // tag::put-calendar-execute-listener ActionListener<PutCalendarResponse> listener = new ActionListener<PutCalendarResponse>() { @Override public void onResponse(PutCalendarResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::put-calendar-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::put-calendar-execute-async client.machineLearning().putCalendarAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::put-calendar-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } public void testPutCalendarJob() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays"); PutCalendarRequest putRequest = new PutCalendarRequest(calendar); client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT); { // tag::put-calendar-job-request PutCalendarJobRequest request = new PutCalendarJobRequest("holidays", // <1> "job_2", "job_group_1"); // <2> // end::put-calendar-job-request // tag::put-calendar-job-execute PutCalendarResponse response = client.machineLearning().putCalendarJob(request, RequestOptions.DEFAULT); // end::put-calendar-job-execute // tag::put-calendar-job-response Calendar updatedCalendar = response.getCalendar(); // <1> // end::put-calendar-job-response assertThat(updatedCalendar.getJobIds(), containsInAnyOrder("job_1", "job_2", "job_group_1")); } { PutCalendarJobRequest request = new PutCalendarJobRequest("holidays", "job_4"); // tag::put-calendar-job-execute-listener ActionListener<PutCalendarResponse> listener = new ActionListener<PutCalendarResponse>() { @Override public void onResponse(PutCalendarResponse putCalendarsResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::put-calendar-job-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::put-calendar-job-execute-async client.machineLearning().putCalendarJobAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::put-calendar-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteCalendarJob() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); Calendar calendar = new Calendar("holidays", Arrays.asList("job_1", "job_group_1", "job_2"), "A calendar for public holidays"); PutCalendarRequest putRequest = new PutCalendarRequest(calendar); client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT); { // tag::delete-calendar-job-request DeleteCalendarJobRequest request = new DeleteCalendarJobRequest("holidays", // <1> "job_1", "job_group_1"); // <2> // end::delete-calendar-job-request // tag::delete-calendar-job-execute PutCalendarResponse response = client.machineLearning().deleteCalendarJob(request, RequestOptions.DEFAULT); // end::delete-calendar-job-execute // tag::delete-calendar-job-response Calendar updatedCalendar = response.getCalendar(); // <1> // end::delete-calendar-job-response assertThat(updatedCalendar.getJobIds(), containsInAnyOrder("job_2")); } { DeleteCalendarJobRequest request = new DeleteCalendarJobRequest("holidays", "job_2"); // tag::delete-calendar-job-execute-listener ActionListener<PutCalendarResponse> listener = new ActionListener<PutCalendarResponse>() { @Override public void onResponse(PutCalendarResponse deleteCalendarsResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-calendar-job-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::delete-calendar-job-execute-async client.machineLearning().deleteCalendarJobAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::delete-calendar-job-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetCalendar() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays"); PutCalendarRequest putRequest = new PutCalendarRequest(calendar); client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT); { // tag::get-calendars-request GetCalendarsRequest request = new GetCalendarsRequest(); // <1> // end::get-calendars-request // tag::get-calendars-id request.setCalendarId("holidays"); // <1> // end::get-calendars-id // tag::get-calendars-page request.setPageParams(new PageParams(10, 20)); // <1> // end::get-calendars-page // reset page params request.setPageParams(null); // tag::get-calendars-execute GetCalendarsResponse response = client.machineLearning().getCalendars(request, RequestOptions.DEFAULT); // end::get-calendars-execute // tag::get-calendars-response long count = response.count(); // <1> List<Calendar> calendars = response.calendars(); // <2> // end::get-calendars-response assertEquals(1, calendars.size()); } { GetCalendarsRequest request = new GetCalendarsRequest("holidays"); // tag::get-calendars-execute-listener ActionListener<GetCalendarsResponse> listener = new ActionListener<GetCalendarsResponse>() { @Override public void onResponse(GetCalendarsResponse getCalendarsResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-calendars-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-calendars-execute-async client.machineLearning().getCalendarsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-calendars-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteCalendar() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays"); PutCalendarRequest putCalendarRequest = new PutCalendarRequest(calendar); client.machineLearning().putCalendar(putCalendarRequest, RequestOptions.DEFAULT); // tag::delete-calendar-request DeleteCalendarRequest request = new DeleteCalendarRequest("holidays"); // <1> // end::delete-calendar-request // tag::delete-calendar-execute AcknowledgedResponse response = client.machineLearning().deleteCalendar(request, RequestOptions.DEFAULT); // end::delete-calendar-execute // tag::delete-calendar-response boolean isAcknowledged = response.isAcknowledged(); // <1> // end::delete-calendar-response assertTrue(isAcknowledged); // tag::delete-calendar-execute-listener ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() { @Override public void onResponse(AcknowledgedResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-calendar-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::delete-calendar-execute-async client.machineLearning().deleteCalendarAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::delete-calendar-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } public void testGetCalendarEvent() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays"); PutCalendarRequest putRequest = new PutCalendarRequest(calendar); client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT); List<ScheduledEvent> events = Collections.singletonList(ScheduledEventTests.testInstance(calendar.getId(), null)); client.machineLearning().postCalendarEvent(new PostCalendarEventRequest("holidays", events), RequestOptions.DEFAULT); { // tag::get-calendar-events-request GetCalendarEventsRequest request = new GetCalendarEventsRequest("holidays"); // <1> // end::get-calendar-events-request // tag::get-calendar-events-page request.setPageParams(new PageParams(10, 20)); // <1> // end::get-calendar-events-page // tag::get-calendar-events-start request.setStart("2018-08-01T00:00:00Z"); // <1> // end::get-calendar-events-start // tag::get-calendar-events-end request.setEnd("2018-08-02T00:00:00Z"); // <1> // end::get-calendar-events-end // tag::get-calendar-events-jobid request.setJobId("job_1"); // <1> // end::get-calendar-events-jobid // reset params request.setPageParams(null); request.setJobId(null); request.setStart(null); request.setEnd(null); // tag::get-calendar-events-execute GetCalendarEventsResponse response = client.machineLearning().getCalendarEvents(request, RequestOptions.DEFAULT); // end::get-calendar-events-execute // tag::get-calendar-events-response long count = response.count(); // <1> List<ScheduledEvent> scheduledEvents = response.events(); // <2> // end::get-calendar-events-response assertEquals(1, scheduledEvents.size()); } { GetCalendarEventsRequest request = new GetCalendarEventsRequest("holidays"); // tag::get-calendar-events-execute-listener ActionListener<GetCalendarEventsResponse> listener = new ActionListener<GetCalendarEventsResponse>() { @Override public void onResponse(GetCalendarEventsResponse getCalendarsResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-calendar-events-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-calendar-events-execute-async client.machineLearning().getCalendarEventsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-calendar-events-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testPostCalendarEvent() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays"); PutCalendarRequest putRequest = new PutCalendarRequest(calendar); client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT); { List<ScheduledEvent> events = Collections.singletonList(ScheduledEventTests.testInstance(calendar.getId(), null)); // tag::post-calendar-event-request PostCalendarEventRequest request = new PostCalendarEventRequest("holidays", // <1> events); // <2> // end::post-calendar-event-request // tag::post-calendar-event-execute PostCalendarEventResponse response = client.machineLearning().postCalendarEvent(request, RequestOptions.DEFAULT); // end::post-calendar-event-execute // tag::post-calendar-event-response List<ScheduledEvent> scheduledEvents = response.getScheduledEvents(); // <1> // end::post-calendar-event-response assertEquals(1, scheduledEvents.size()); } { List<ScheduledEvent> events = Collections.singletonList(ScheduledEventTests.testInstance()); PostCalendarEventRequest request = new PostCalendarEventRequest("holidays", events); // <1> // tag::post-calendar-event-execute-listener ActionListener<PostCalendarEventResponse> listener = new ActionListener<PostCalendarEventResponse>() { @Override public void onResponse(PostCalendarEventResponse postCalendarsResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::post-calendar-event-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::post-calendar-event-execute-async client.machineLearning().postCalendarEventAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::post-calendar-event-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteCalendarEvent() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); Calendar calendar = new Calendar("holidays", Arrays.asList("job_1", "job_group_1", "job_2"), "A calendar for public holidays"); PutCalendarRequest putRequest = new PutCalendarRequest(calendar); client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT); List<ScheduledEvent> events = Arrays.asList(ScheduledEventTests.testInstance(calendar.getId(), null), ScheduledEventTests.testInstance(calendar.getId(), null)); client.machineLearning().postCalendarEvent(new PostCalendarEventRequest("holidays", events), RequestOptions.DEFAULT); GetCalendarEventsResponse getCalendarEventsResponse = client.machineLearning().getCalendarEvents(new GetCalendarEventsRequest("holidays"), RequestOptions.DEFAULT); { // tag::delete-calendar-event-request DeleteCalendarEventRequest request = new DeleteCalendarEventRequest("holidays", // <1> "EventId"); // <2> // end::delete-calendar-event-request request = new DeleteCalendarEventRequest("holidays", getCalendarEventsResponse.events().get(0).getEventId()); // tag::delete-calendar-event-execute AcknowledgedResponse response = client.machineLearning().deleteCalendarEvent(request, RequestOptions.DEFAULT); // end::delete-calendar-event-execute // tag::delete-calendar-event-response boolean acknowledged = response.isAcknowledged(); // <1> // end::delete-calendar-event-response assertThat(acknowledged, is(true)); } { DeleteCalendarEventRequest request = new DeleteCalendarEventRequest("holidays", getCalendarEventsResponse.events().get(1).getEventId()); // tag::delete-calendar-event-execute-listener ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() { @Override public void onResponse(AcknowledgedResponse deleteCalendarEventResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-calendar-event-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::delete-calendar-event-execute-async client.machineLearning().deleteCalendarEventAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::delete-calendar-event-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetDataFrameAnalytics() throws Exception { createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]); RestHighLevelClient client = highLevelClient(); client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT); { // tag::get-data-frame-analytics-request GetDataFrameAnalyticsRequest request = new GetDataFrameAnalyticsRequest("my-analytics-config"); // <1> // end::get-data-frame-analytics-request // tag::get-data-frame-analytics-execute GetDataFrameAnalyticsResponse response = client.machineLearning().getDataFrameAnalytics(request, RequestOptions.DEFAULT); // end::get-data-frame-analytics-execute // tag::get-data-frame-analytics-response List<DataFrameAnalyticsConfig> configs = response.getAnalytics(); // end::get-data-frame-analytics-response assertThat(configs, hasSize(1)); } { GetDataFrameAnalyticsRequest request = new GetDataFrameAnalyticsRequest("my-analytics-config"); // tag::get-data-frame-analytics-execute-listener ActionListener<GetDataFrameAnalyticsResponse> listener = new ActionListener<>() { @Override public void onResponse(GetDataFrameAnalyticsResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-data-frame-analytics-execute-listener // Replace the empty listener by a blocking listener in test CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-data-frame-analytics-execute-async client.machineLearning().getDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-data-frame-analytics-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetDataFrameAnalyticsStats() throws Exception { createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]); RestHighLevelClient client = highLevelClient(); client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT); { // tag::get-data-frame-analytics-stats-request GetDataFrameAnalyticsStatsRequest request = new GetDataFrameAnalyticsStatsRequest("my-analytics-config"); // <1> // end::get-data-frame-analytics-stats-request // tag::get-data-frame-analytics-stats-execute GetDataFrameAnalyticsStatsResponse response = client.machineLearning().getDataFrameAnalyticsStats(request, RequestOptions.DEFAULT); // end::get-data-frame-analytics-stats-execute // tag::get-data-frame-analytics-stats-response List<DataFrameAnalyticsStats> stats = response.getAnalyticsStats(); // end::get-data-frame-analytics-stats-response assertThat(stats, hasSize(1)); } { GetDataFrameAnalyticsStatsRequest request = new GetDataFrameAnalyticsStatsRequest("my-analytics-config"); // tag::get-data-frame-analytics-stats-execute-listener ActionListener<GetDataFrameAnalyticsStatsResponse> listener = new ActionListener<>() { @Override public void onResponse(GetDataFrameAnalyticsStatsResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-data-frame-analytics-stats-execute-listener // Replace the empty listener by a blocking listener in test CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-data-frame-analytics-stats-execute-async client.machineLearning().getDataFrameAnalyticsStatsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-data-frame-analytics-stats-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testPutDataFrameAnalytics() throws Exception { createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]); RestHighLevelClient client = highLevelClient(); { // tag::put-data-frame-analytics-query-config QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder()); // end::put-data-frame-analytics-query-config // tag::put-data-frame-analytics-source-config DataFrameAnalyticsSource sourceConfig = DataFrameAnalyticsSource.builder() // <1> .setIndex("put-test-source-index") // <2> .setQueryConfig(queryConfig) // <3> .setSourceFiltering(new FetchSourceContext(true, new String[] { "included_field_1", "included_field_2" }, new String[] { "excluded_field" })) // <4> .build(); // end::put-data-frame-analytics-source-config // tag::put-data-frame-analytics-dest-config DataFrameAnalyticsDest destConfig = DataFrameAnalyticsDest.builder() // <1> .setIndex("put-test-dest-index") // <2> .build(); // end::put-data-frame-analytics-dest-config // tag::put-data-frame-analytics-outlier-detection-default DataFrameAnalysis outlierDetection = OutlierDetection.createDefault(); // <1> // end::put-data-frame-analytics-outlier-detection-default // tag::put-data-frame-analytics-outlier-detection-customized DataFrameAnalysis outlierDetectionCustomized = OutlierDetection.builder() // <1> .setMethod(OutlierDetection.Method.DISTANCE_KNN) // <2> .setNNeighbors(5) // <3> .setFeatureInfluenceThreshold(0.1) // <4> .setComputeFeatureInfluence(true) // <5> .setOutlierFraction(0.05) // <6> .setStandardizationEnabled(true) // <7> .build(); // end::put-data-frame-analytics-outlier-detection-customized // tag::put-data-frame-analytics-classification DataFrameAnalysis classification = Classification.builder("my_dependent_variable") // <1> .setLambda(1.0) // <2> .setGamma(5.5) // <3> .setEta(5.5) // <4> .setMaxTrees(50) // <5> .setFeatureBagFraction(0.4) // <6> .setNumTopFeatureImportanceValues(3) // <7> .setPredictionFieldName("my_prediction_field_name") // <8> .setTrainingPercent(50.0) // <9> .setRandomizeSeed(1234L) // <10> .setClassAssignmentObjective(Classification.ClassAssignmentObjective.MAXIMIZE_ACCURACY) // <11> .setNumTopClasses(1) // <12> .build(); // end::put-data-frame-analytics-classification // tag::put-data-frame-analytics-regression DataFrameAnalysis regression = org.elasticsearch.client.ml.dataframe.Regression.builder("my_dependent_variable") // <1> .setLambda(1.0) // <2> .setGamma(5.5) // <3> .setEta(5.5) // <4> .setMaxTrees(50) // <5> .setFeatureBagFraction(0.4) // <6> .setNumTopFeatureImportanceValues(3) // <7> .setPredictionFieldName("my_prediction_field_name") // <8> .setTrainingPercent(50.0) // <9> .setRandomizeSeed(1234L) // <10> .setLossFunction(Regression.LossFunction.MSE) // <11> .setLossFunctionParameter(1.0) // <12> .build(); // end::put-data-frame-analytics-regression // tag::put-data-frame-analytics-analyzed-fields FetchSourceContext analyzedFields = new FetchSourceContext( true, new String[] { "included_field_1", "included_field_2" }, new String[] { "excluded_field" }); // end::put-data-frame-analytics-analyzed-fields // tag::put-data-frame-analytics-config DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder() .setId("my-analytics-config") // <1> .setSource(sourceConfig) // <2> .setDest(destConfig) // <3> .setAnalysis(outlierDetection) // <4> .setAnalyzedFields(analyzedFields) // <5> .setModelMemoryLimit(new ByteSizeValue(5, ByteSizeUnit.MB)) // <6> .setDescription("this is an example description") // <7> .build(); // end::put-data-frame-analytics-config // tag::put-data-frame-analytics-request PutDataFrameAnalyticsRequest request = new PutDataFrameAnalyticsRequest(config); // <1> // end::put-data-frame-analytics-request // tag::put-data-frame-analytics-execute PutDataFrameAnalyticsResponse response = client.machineLearning().putDataFrameAnalytics(request, RequestOptions.DEFAULT); // end::put-data-frame-analytics-execute // tag::put-data-frame-analytics-response DataFrameAnalyticsConfig createdConfig = response.getConfig(); // end::put-data-frame-analytics-response assertThat(createdConfig.getId(), equalTo("my-analytics-config")); } { PutDataFrameAnalyticsRequest request = new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG); // tag::put-data-frame-analytics-execute-listener ActionListener<PutDataFrameAnalyticsResponse> listener = new ActionListener<>() { @Override public void onResponse(PutDataFrameAnalyticsResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::put-data-frame-analytics-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::put-data-frame-analytics-execute-async client.machineLearning().putDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::put-data-frame-analytics-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteDataFrameAnalytics() throws Exception { createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]); RestHighLevelClient client = highLevelClient(); client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT); { // tag::delete-data-frame-analytics-request DeleteDataFrameAnalyticsRequest request = new DeleteDataFrameAnalyticsRequest("my-analytics-config"); // <1> // end::delete-data-frame-analytics-request //tag::delete-data-frame-analytics-request-options request.setForce(false); // <1> request.setTimeout(TimeValue.timeValueMinutes(1)); // <2> //end::delete-data-frame-analytics-request-options // tag::delete-data-frame-analytics-execute AcknowledgedResponse response = client.machineLearning().deleteDataFrameAnalytics(request, RequestOptions.DEFAULT); // end::delete-data-frame-analytics-execute // tag::delete-data-frame-analytics-response boolean acknowledged = response.isAcknowledged(); // end::delete-data-frame-analytics-response assertThat(acknowledged, is(true)); } client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT); { DeleteDataFrameAnalyticsRequest request = new DeleteDataFrameAnalyticsRequest("my-analytics-config"); // tag::delete-data-frame-analytics-execute-listener ActionListener<AcknowledgedResponse> listener = new ActionListener<>() { @Override public void onResponse(AcknowledgedResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-data-frame-analytics-execute-listener // Replace the empty listener by a blocking listener in test CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::delete-data-frame-analytics-execute-async client.machineLearning().deleteDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::delete-data-frame-analytics-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testStartDataFrameAnalytics() throws Exception { createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]); highLevelClient().index( new IndexRequest(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]).source(XContentType.JSON, "total", 10000) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), RequestOptions.DEFAULT); RestHighLevelClient client = highLevelClient(); client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT); { // tag::start-data-frame-analytics-request StartDataFrameAnalyticsRequest request = new StartDataFrameAnalyticsRequest("my-analytics-config"); // <1> // end::start-data-frame-analytics-request // tag::start-data-frame-analytics-execute StartDataFrameAnalyticsResponse response = client.machineLearning().startDataFrameAnalytics(request, RequestOptions.DEFAULT); // end::start-data-frame-analytics-execute // tag::start-data-frame-analytics-response boolean acknowledged = response.isAcknowledged(); String node = response.getNode(); // <1> // end::start-data-frame-analytics-response assertThat(acknowledged, is(true)); assertThat(node, notNullValue()); } assertBusy( () -> assertThat(getAnalyticsState(DF_ANALYTICS_CONFIG.getId()), equalTo(DataFrameAnalyticsState.STOPPED)), 30, TimeUnit.SECONDS); { StartDataFrameAnalyticsRequest request = new StartDataFrameAnalyticsRequest("my-analytics-config"); // tag::start-data-frame-analytics-execute-listener ActionListener<StartDataFrameAnalyticsResponse> listener = new ActionListener<>() { @Override public void onResponse(StartDataFrameAnalyticsResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::start-data-frame-analytics-execute-listener // Replace the empty listener by a blocking listener in test CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::start-data-frame-analytics-execute-async client.machineLearning().startDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::start-data-frame-analytics-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } assertBusy( () -> assertThat(getAnalyticsState(DF_ANALYTICS_CONFIG.getId()), equalTo(DataFrameAnalyticsState.STOPPED)), 30, TimeUnit.SECONDS); } public void testStopDataFrameAnalytics() throws Exception { createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]); highLevelClient().index( new IndexRequest(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]).source(XContentType.JSON, "total", 10000) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), RequestOptions.DEFAULT); RestHighLevelClient client = highLevelClient(); client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT); { // tag::stop-data-frame-analytics-request StopDataFrameAnalyticsRequest request = new StopDataFrameAnalyticsRequest("my-analytics-config"); // <1> request.setForce(false); // <2> // end::stop-data-frame-analytics-request // tag::stop-data-frame-analytics-execute StopDataFrameAnalyticsResponse response = client.machineLearning().stopDataFrameAnalytics(request, RequestOptions.DEFAULT); // end::stop-data-frame-analytics-execute // tag::stop-data-frame-analytics-response boolean acknowledged = response.isStopped(); // end::stop-data-frame-analytics-response assertThat(acknowledged, is(true)); } assertBusy( () -> assertThat(getAnalyticsState(DF_ANALYTICS_CONFIG.getId()), equalTo(DataFrameAnalyticsState.STOPPED)), 30, TimeUnit.SECONDS); { StopDataFrameAnalyticsRequest request = new StopDataFrameAnalyticsRequest("my-analytics-config"); // tag::stop-data-frame-analytics-execute-listener ActionListener<StopDataFrameAnalyticsResponse> listener = new ActionListener<>() { @Override public void onResponse(StopDataFrameAnalyticsResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::stop-data-frame-analytics-execute-listener // Replace the empty listener by a blocking listener in test CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::stop-data-frame-analytics-execute-async client.machineLearning().stopDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::stop-data-frame-analytics-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } assertBusy( () -> assertThat(getAnalyticsState(DF_ANALYTICS_CONFIG.getId()), equalTo(DataFrameAnalyticsState.STOPPED)), 30, TimeUnit.SECONDS); } public void testEvaluateDataFrame() throws Exception { String indexName = "evaluate-test-index"; CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName) .mapping(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("label") .field("type", "keyword") .endObject() .startObject("p") .field("type", "double") .endObject() .endObject() .endObject()); BulkRequest bulkRequest = new BulkRequest(indexName) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", false, "p", 0.1)) // #0 .add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", false, "p", 0.2)) // #1 .add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", false, "p", 0.3)) // #2 .add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", false, "p", 0.4)) // #3 .add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", false, "p", 0.7)) // #4 .add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", true, "p", 0.2)) // #5 .add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", true, "p", 0.3)) // #6 .add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", true, "p", 0.4)) // #7 .add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", true, "p", 0.8)) // #8 .add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", true, "p", 0.9)); // #9 RestHighLevelClient client = highLevelClient(); client.indices().create(createIndexRequest, RequestOptions.DEFAULT); client.bulk(bulkRequest, RequestOptions.DEFAULT); { // tag::evaluate-data-frame-evaluation-softclassification Evaluation evaluation = new BinarySoftClassification( // <1> "label", // <2> "p", // <3> // Evaluation metrics // <4> PrecisionMetric.at(0.4, 0.5, 0.6), // <5> RecallMetric.at(0.5, 0.7), // <6> ConfusionMatrixMetric.at(0.5), // <7> AucRocMetric.withCurve()); // <8> // end::evaluate-data-frame-evaluation-softclassification // tag::evaluate-data-frame-request EvaluateDataFrameRequest request = new EvaluateDataFrameRequest( // <1> indexName, // <2> new QueryConfig(QueryBuilders.termQuery("dataset", "blue")), // <3> evaluation); // <4> // end::evaluate-data-frame-request // tag::evaluate-data-frame-execute EvaluateDataFrameResponse response = client.machineLearning().evaluateDataFrame(request, RequestOptions.DEFAULT); // end::evaluate-data-frame-execute // tag::evaluate-data-frame-response List<EvaluationMetric.Result> metrics = response.getMetrics(); // <1> // end::evaluate-data-frame-response // tag::evaluate-data-frame-results-softclassification PrecisionMetric.Result precisionResult = response.getMetricByName(PrecisionMetric.NAME); // <1> double precision = precisionResult.getScoreByThreshold("0.4"); // <2> ConfusionMatrixMetric.Result confusionMatrixResult = response.getMetricByName(ConfusionMatrixMetric.NAME); // <3> ConfusionMatrix confusionMatrix = confusionMatrixResult.getScoreByThreshold("0.5"); // <4> // end::evaluate-data-frame-results-softclassification assertThat( metrics.stream().map(EvaluationMetric.Result::getMetricName).collect(Collectors.toList()), containsInAnyOrder(PrecisionMetric.NAME, RecallMetric.NAME, ConfusionMatrixMetric.NAME, AucRocMetric.NAME)); assertThat(precision, closeTo(0.6, 1e-9)); assertThat(confusionMatrix.getTruePositives(), equalTo(2L)); // docs #8 and #9 assertThat(confusionMatrix.getFalsePositives(), equalTo(1L)); // doc #4 assertThat(confusionMatrix.getTrueNegatives(), equalTo(4L)); // docs #0, #1, #2 and #3 assertThat(confusionMatrix.getFalseNegatives(), equalTo(3L)); // docs #5, #6 and #7 } { EvaluateDataFrameRequest request = new EvaluateDataFrameRequest( indexName, new QueryConfig(QueryBuilders.termQuery("dataset", "blue")), new BinarySoftClassification( "label", "p", PrecisionMetric.at(0.4, 0.5, 0.6), RecallMetric.at(0.5, 0.7), ConfusionMatrixMetric.at(0.5), AucRocMetric.withCurve())); // tag::evaluate-data-frame-execute-listener ActionListener<EvaluateDataFrameResponse> listener = new ActionListener<>() { @Override public void onResponse(EvaluateDataFrameResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::evaluate-data-frame-execute-listener // Replace the empty listener by a blocking listener in test CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::evaluate-data-frame-execute-async client.machineLearning().evaluateDataFrameAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::evaluate-data-frame-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testEvaluateDataFrame_Classification() throws Exception { String indexName = "evaluate-classification-test-index"; CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName) .mapping(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("actual_class") .field("type", "keyword") .endObject() .startObject("predicted_class") .field("type", "keyword") .endObject() .endObject() .endObject()); BulkRequest bulkRequest = new BulkRequest(indexName) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .add(new IndexRequest().source(XContentType.JSON, "actual_class", "cat", "predicted_class", "cat")) // #0 .add(new IndexRequest().source(XContentType.JSON, "actual_class", "cat", "predicted_class", "cat")) // #1 .add(new IndexRequest().source(XContentType.JSON, "actual_class", "cat", "predicted_class", "cat")) // #2 .add(new IndexRequest().source(XContentType.JSON, "actual_class", "cat", "predicted_class", "dog")) // #3 .add(new IndexRequest().source(XContentType.JSON, "actual_class", "cat", "predicted_class", "fox")) // #4 .add(new IndexRequest().source(XContentType.JSON, "actual_class", "dog", "predicted_class", "cat")) // #5 .add(new IndexRequest().source(XContentType.JSON, "actual_class", "dog", "predicted_class", "dog")) // #6 .add(new IndexRequest().source(XContentType.JSON, "actual_class", "dog", "predicted_class", "dog")) // #7 .add(new IndexRequest().source(XContentType.JSON, "actual_class", "dog", "predicted_class", "dog")) // #8 .add(new IndexRequest().source(XContentType.JSON, "actual_class", "ant", "predicted_class", "cat")); // #9 RestHighLevelClient client = highLevelClient(); client.indices().create(createIndexRequest, RequestOptions.DEFAULT); client.bulk(bulkRequest, RequestOptions.DEFAULT); { // tag::evaluate-data-frame-evaluation-classification Evaluation evaluation = new org.elasticsearch.client.ml.dataframe.evaluation.classification.Classification( // <1> "actual_class", // <2> "predicted_class", // <3> // Evaluation metrics // <4> new AccuracyMetric(), // <5> new org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetric(), // <6> new org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetric(), // <7> new MulticlassConfusionMatrixMetric(3)); // <8> // end::evaluate-data-frame-evaluation-classification EvaluateDataFrameRequest request = new EvaluateDataFrameRequest(indexName, null, evaluation); EvaluateDataFrameResponse response = client.machineLearning().evaluateDataFrame(request, RequestOptions.DEFAULT); // tag::evaluate-data-frame-results-classification AccuracyMetric.Result accuracyResult = response.getMetricByName(AccuracyMetric.NAME); // <1> double accuracy = accuracyResult.getOverallAccuracy(); // <2> org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetric.Result precisionResult = response.getMetricByName(org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetric.NAME); // <3> double precision = precisionResult.getAvgPrecision(); // <4> org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetric.Result recallResult = response.getMetricByName(org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetric.NAME); // <5> double recall = recallResult.getAvgRecall(); // <6> MulticlassConfusionMatrixMetric.Result multiclassConfusionMatrix = response.getMetricByName(MulticlassConfusionMatrixMetric.NAME); // <7> List<ActualClass> confusionMatrix = multiclassConfusionMatrix.getConfusionMatrix(); // <8> long otherClassesCount = multiclassConfusionMatrix.getOtherActualClassCount(); // <9> // end::evaluate-data-frame-results-classification assertThat(accuracyResult.getMetricName(), equalTo(AccuracyMetric.NAME)); assertThat(accuracy, equalTo(0.6)); assertThat( precisionResult.getMetricName(), equalTo(org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetric.NAME)); assertThat(precision, equalTo(0.675)); assertThat( recallResult.getMetricName(), equalTo(org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetric.NAME)); assertThat(recall, equalTo(0.45)); assertThat(multiclassConfusionMatrix.getMetricName(), equalTo(MulticlassConfusionMatrixMetric.NAME)); assertThat( confusionMatrix, equalTo( List.of( new ActualClass( "ant", 1L, List.of(new PredictedClass("ant", 0L), new PredictedClass("cat", 1L), new PredictedClass("dog", 0L)), 0L), new ActualClass( "cat", 5L, List.of(new PredictedClass("ant", 0L), new PredictedClass("cat", 3L), new PredictedClass("dog", 1L)), 1L), new ActualClass( "dog", 4L, List.of(new PredictedClass("ant", 0L), new PredictedClass("cat", 1L), new PredictedClass("dog", 3L)), 0L)))); assertThat(otherClassesCount, equalTo(0L)); } } public void testEvaluateDataFrame_Regression() throws Exception { String indexName = "evaluate-classification-test-index"; CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName) .mapping(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("actual_value") .field("type", "double") .endObject() .startObject("predicted_value") .field("type", "double") .endObject() .endObject() .endObject()); BulkRequest bulkRequest = new BulkRequest(indexName) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .add(new IndexRequest().source(XContentType.JSON, "actual_value", 1.0, "predicted_value", 1.0)) // #0 .add(new IndexRequest().source(XContentType.JSON, "actual_value", 1.0, "predicted_value", 0.9)) // #1 .add(new IndexRequest().source(XContentType.JSON, "actual_value", 2.0, "predicted_value", 2.0)) // #2 .add(new IndexRequest().source(XContentType.JSON, "actual_value", 1.5, "predicted_value", 1.4)) // #3 .add(new IndexRequest().source(XContentType.JSON, "actual_value", 1.2, "predicted_value", 1.3)) // #4 .add(new IndexRequest().source(XContentType.JSON, "actual_value", 1.7, "predicted_value", 2.0)) // #5 .add(new IndexRequest().source(XContentType.JSON, "actual_value", 2.1, "predicted_value", 2.1)) // #6 .add(new IndexRequest().source(XContentType.JSON, "actual_value", 2.5, "predicted_value", 2.7)) // #7 .add(new IndexRequest().source(XContentType.JSON, "actual_value", 0.8, "predicted_value", 1.0)) // #8 .add(new IndexRequest().source(XContentType.JSON, "actual_value", 2.5, "predicted_value", 2.4)); // #9 RestHighLevelClient client = highLevelClient(); client.indices().create(createIndexRequest, RequestOptions.DEFAULT); client.bulk(bulkRequest, RequestOptions.DEFAULT); { // tag::evaluate-data-frame-evaluation-regression Evaluation evaluation = new org.elasticsearch.client.ml.dataframe.evaluation.regression.Regression( // <1> "actual_value", // <2> "predicted_value", // <3> // Evaluation metrics // <4> new MeanSquaredErrorMetric(), // <5> new RSquaredMetric()); // <6> // end::evaluate-data-frame-evaluation-regression EvaluateDataFrameRequest request = new EvaluateDataFrameRequest(indexName, null, evaluation); EvaluateDataFrameResponse response = client.machineLearning().evaluateDataFrame(request, RequestOptions.DEFAULT); // tag::evaluate-data-frame-results-regression MeanSquaredErrorMetric.Result meanSquaredErrorResult = response.getMetricByName(MeanSquaredErrorMetric.NAME); // <1> double meanSquaredError = meanSquaredErrorResult.getError(); // <2> RSquaredMetric.Result rSquaredResult = response.getMetricByName(RSquaredMetric.NAME); // <3> double rSquared = rSquaredResult.getValue(); // <4> // end::evaluate-data-frame-results-regression assertThat(meanSquaredError, closeTo(0.021, 1e-3)); assertThat(rSquared, closeTo(0.941, 1e-3)); } } public void testExplainDataFrameAnalytics() throws Exception { createIndex("explain-df-test-source-index"); BulkRequest bulkRequest = new BulkRequest("explain-df-test-source-index") .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); for (int i = 0; i < 10; ++i) { bulkRequest.add(new IndexRequest().source(XContentType.JSON, "timestamp", 123456789L, "total", 10L)); } RestHighLevelClient client = highLevelClient(); client.bulk(bulkRequest, RequestOptions.DEFAULT); { // tag::explain-data-frame-analytics-id-request ExplainDataFrameAnalyticsRequest request = new ExplainDataFrameAnalyticsRequest("existing_job_id"); // <1> // end::explain-data-frame-analytics-id-request // tag::explain-data-frame-analytics-config-request DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder() .setSource(DataFrameAnalyticsSource.builder().setIndex("explain-df-test-source-index").build()) .setAnalysis(OutlierDetection.createDefault()) .build(); request = new ExplainDataFrameAnalyticsRequest(config); // <1> // end::explain-data-frame-analytics-config-request // tag::explain-data-frame-analytics-execute ExplainDataFrameAnalyticsResponse response = client.machineLearning().explainDataFrameAnalytics(request, RequestOptions.DEFAULT); // end::explain-data-frame-analytics-execute // tag::explain-data-frame-analytics-response List<FieldSelection> fieldSelection = response.getFieldSelection(); // <1> MemoryEstimation memoryEstimation = response.getMemoryEstimation(); // <2> // end::explain-data-frame-analytics-response assertThat(fieldSelection.size(), equalTo(2)); assertThat(fieldSelection.stream().map(FieldSelection::getName).collect(Collectors.toList()), contains("timestamp", "total")); ByteSizeValue expectedMemoryWithoutDisk = memoryEstimation.getExpectedMemoryWithoutDisk(); // <1> ByteSizeValue expectedMemoryWithDisk = memoryEstimation.getExpectedMemoryWithDisk(); // <2> // We are pretty liberal here as this test does not aim at verifying concrete numbers but rather end-to-end user workflow. ByteSizeValue lowerBound = new ByteSizeValue(1, ByteSizeUnit.KB); ByteSizeValue upperBound = new ByteSizeValue(1, ByteSizeUnit.GB); assertThat(expectedMemoryWithoutDisk, allOf(greaterThan(lowerBound), lessThan(upperBound))); assertThat(expectedMemoryWithDisk, allOf(greaterThan(lowerBound), lessThan(upperBound))); } { DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder() .setSource(DataFrameAnalyticsSource.builder().setIndex("explain-df-test-source-index").build()) .setAnalysis(OutlierDetection.createDefault()) .build(); ExplainDataFrameAnalyticsRequest request = new ExplainDataFrameAnalyticsRequest(config); // tag::explain-data-frame-analytics-execute-listener ActionListener<ExplainDataFrameAnalyticsResponse> listener = new ActionListener<>() { @Override public void onResponse(ExplainDataFrameAnalyticsResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::explain-data-frame-analytics-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::explain-data-frame-analytics-execute-async client.machineLearning().explainDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::explain-data-frame-analytics-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetTrainedModels() throws Exception { putTrainedModel("my-trained-model"); RestHighLevelClient client = highLevelClient(); { // tag::get-trained-models-request GetTrainedModelsRequest request = new GetTrainedModelsRequest("my-trained-model") // <1> .setPageParams(new PageParams(0, 1)) // <2> .setIncludeDefinition(false) // <3> .setDecompressDefinition(false) // <4> .setAllowNoMatch(true) // <5> .setTags("regression") // <6> .setForExport(false); // <7> // end::get-trained-models-request request.setTags((List<String>)null); // tag::get-trained-models-execute GetTrainedModelsResponse response = client.machineLearning().getTrainedModels(request, RequestOptions.DEFAULT); // end::get-trained-models-execute // tag::get-trained-models-response List<TrainedModelConfig> models = response.getTrainedModels(); // end::get-trained-models-response assertThat(models, hasSize(1)); } { GetTrainedModelsRequest request = new GetTrainedModelsRequest("my-trained-model"); // tag::get-trained-models-execute-listener ActionListener<GetTrainedModelsResponse> listener = new ActionListener<>() { @Override public void onResponse(GetTrainedModelsResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-trained-models-execute-listener // Replace the empty listener by a blocking listener in test CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-trained-models-execute-async client.machineLearning().getTrainedModelsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-trained-models-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testPutTrainedModel() throws Exception { TrainedModelDefinition definition = TrainedModelDefinitionTests.createRandomBuilder(TargetType.REGRESSION).build(); // tag::put-trained-model-config TrainedModelConfig trainedModelConfig = TrainedModelConfig.builder() .setDefinition(definition) // <1> .setCompressedDefinition(InferenceToXContentCompressor.deflate(definition)) // <2> .setModelId("my-new-trained-model") // <3> .setInput(new TrainedModelInput("col1", "col2", "col3", "col4")) // <4> .setDescription("test model") // <5> .setMetadata(new HashMap<>()) // <6> .setTags("my_regression_models") // <7> .setInferenceConfig(new RegressionConfig("value", 0)) // <8> .build(); // end::put-trained-model-config trainedModelConfig = TrainedModelConfig.builder() .setDefinition(definition) .setInferenceConfig(new RegressionConfig(null, null)) .setModelId("my-new-trained-model") .setInput(new TrainedModelInput("col1", "col2", "col3", "col4")) .setDescription("test model") .setMetadata(new HashMap<>()) .setTags("my_regression_models") .build(); RestHighLevelClient client = highLevelClient(); { // tag::put-trained-model-request PutTrainedModelRequest request = new PutTrainedModelRequest(trainedModelConfig); // <1> // end::put-trained-model-request // tag::put-trained-model-execute PutTrainedModelResponse response = client.machineLearning().putTrainedModel(request, RequestOptions.DEFAULT); // end::put-trained-model-execute // tag::put-trained-model-response TrainedModelConfig model = response.getResponse(); // end::put-trained-model-response assertThat(model.getModelId(), equalTo(trainedModelConfig.getModelId())); highLevelClient().machineLearning() .deleteTrainedModel(new DeleteTrainedModelRequest("my-new-trained-model"), RequestOptions.DEFAULT); } { PutTrainedModelRequest request = new PutTrainedModelRequest(trainedModelConfig); // tag::put-trained-model-execute-listener ActionListener<PutTrainedModelResponse> listener = new ActionListener<>() { @Override public void onResponse(PutTrainedModelResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::put-trained-model-execute-listener // Replace the empty listener by a blocking listener in test CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::put-trained-model-execute-async client.machineLearning().putTrainedModelAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::put-trained-model-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); highLevelClient().machineLearning() .deleteTrainedModel(new DeleteTrainedModelRequest("my-new-trained-model"), RequestOptions.DEFAULT); } } public void testGetTrainedModelsStats() throws Exception { putTrainedModel("my-trained-model"); RestHighLevelClient client = highLevelClient(); { // tag::get-trained-models-stats-request GetTrainedModelsStatsRequest request = new GetTrainedModelsStatsRequest("my-trained-model") // <1> .setPageParams(new PageParams(0, 1)) // <2> .setAllowNoMatch(true); // <3> // end::get-trained-models-stats-request // tag::get-trained-models-stats-execute GetTrainedModelsStatsResponse response = client.machineLearning().getTrainedModelsStats(request, RequestOptions.DEFAULT); // end::get-trained-models-stats-execute // tag::get-trained-models-stats-response List<TrainedModelStats> models = response.getTrainedModelStats(); // end::get-trained-models-stats-response assertThat(models, hasSize(1)); } { GetTrainedModelsStatsRequest request = new GetTrainedModelsStatsRequest("my-trained-model"); // tag::get-trained-models-stats-execute-listener ActionListener<GetTrainedModelsStatsResponse> listener = new ActionListener<>() { @Override public void onResponse(GetTrainedModelsStatsResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-trained-models-stats-execute-listener // Replace the empty listener by a blocking listener in test CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-trained-models-stats-execute-async client.machineLearning() .getTrainedModelsStatsAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-trained-models-stats-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteTrainedModel() throws Exception { RestHighLevelClient client = highLevelClient(); { putTrainedModel("my-trained-model"); // tag::delete-trained-model-request DeleteTrainedModelRequest request = new DeleteTrainedModelRequest("my-trained-model"); // <1> // end::delete-trained-model-request // tag::delete-trained-model-execute AcknowledgedResponse response = client.machineLearning().deleteTrainedModel(request, RequestOptions.DEFAULT); // end::delete-trained-model-execute // tag::delete-trained-model-response boolean deleted = response.isAcknowledged(); // end::delete-trained-model-response assertThat(deleted, is(true)); } { putTrainedModel("my-trained-model"); DeleteTrainedModelRequest request = new DeleteTrainedModelRequest("my-trained-model"); // tag::delete-trained-model-execute-listener ActionListener<AcknowledgedResponse> listener = new ActionListener<>() { @Override public void onResponse(AcknowledgedResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-trained-model-execute-listener // Replace the empty listener by a blocking listener in test CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::delete-trained-model-execute-async client.machineLearning().deleteTrainedModelAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::delete-trained-model-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testCreateFilter() throws Exception { RestHighLevelClient client = highLevelClient(); { // tag::put-filter-config MlFilter.Builder filterBuilder = MlFilter.builder("my_safe_domains") // <1> .setDescription("A list of safe domains") // <2> .setItems("*.google.com", "wikipedia.org"); // <3> // end::put-filter-config // tag::put-filter-request PutFilterRequest request = new PutFilterRequest(filterBuilder.build()); // <1> // end::put-filter-request // tag::put-filter-execute PutFilterResponse response = client.machineLearning().putFilter(request, RequestOptions.DEFAULT); // end::put-filter-execute // tag::put-filter-response MlFilter createdFilter = response.getResponse(); // <1> // end::put-filter-response assertThat(createdFilter.getId(), equalTo("my_safe_domains")); } { MlFilter.Builder filterBuilder = MlFilter.builder("safe_domains_async") .setDescription("A list of safe domains") .setItems("*.google.com", "wikipedia.org"); PutFilterRequest request = new PutFilterRequest(filterBuilder.build()); // tag::put-filter-execute-listener ActionListener<PutFilterResponse> listener = new ActionListener<PutFilterResponse>() { @Override public void onResponse(PutFilterResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::put-filter-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::put-filter-execute-async client.machineLearning().putFilterAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::put-filter-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetFilters() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String filterId = "get-filter-doc-test"; MlFilter.Builder filterBuilder = MlFilter.builder(filterId).setDescription("test").setItems("*.google.com", "wikipedia.org"); client.machineLearning().putFilter(new PutFilterRequest(filterBuilder.build()), RequestOptions.DEFAULT); { // tag::get-filters-request GetFiltersRequest request = new GetFiltersRequest(); // <1> // end::get-filters-request // tag::get-filters-filter-id request.setFilterId("get-filter-doc-test"); // <1> // end::get-filters-filter-id // tag::get-filters-page-params request.setFrom(100); // <1> request.setSize(200); // <2> // end::get-filters-page-params request.setFrom(null); request.setSize(null); // tag::get-filters-execute GetFiltersResponse response = client.machineLearning().getFilter(request, RequestOptions.DEFAULT); // end::get-filters-execute // tag::get-filters-response long count = response.count(); // <1> List<MlFilter> filters = response.filters(); // <2> // end::get-filters-response assertEquals(1, filters.size()); } { GetFiltersRequest request = new GetFiltersRequest(); request.setFilterId(filterId); // tag::get-filters-execute-listener ActionListener<GetFiltersResponse> listener = new ActionListener<GetFiltersResponse>() { @Override public void onResponse(GetFiltersResponse getfiltersResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-filters-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-filters-execute-async client.machineLearning().getFilterAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-filters-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testUpdateFilter() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); String filterId = "update-filter-doc-test"; MlFilter.Builder filterBuilder = MlFilter.builder(filterId).setDescription("test").setItems("*.google.com", "wikipedia.org"); client.machineLearning().putFilter(new PutFilterRequest(filterBuilder.build()), RequestOptions.DEFAULT); { // tag::update-filter-request UpdateFilterRequest request = new UpdateFilterRequest(filterId); // <1> // end::update-filter-request // tag::update-filter-description request.setDescription("my new description"); // <1> // end::update-filter-description // tag::update-filter-add-items request.setAddItems(Arrays.asList("*.bing.com", "*.elastic.co")); // <1> // end::update-filter-add-items // tag::update-filter-remove-items request.setRemoveItems(Arrays.asList("*.google.com")); // <1> // end::update-filter-remove-items // tag::update-filter-execute PutFilterResponse response = client.machineLearning().updateFilter(request, RequestOptions.DEFAULT); // end::update-filter-execute // tag::update-filter-response MlFilter updatedFilter = response.getResponse(); // <1> // end::update-filter-response assertEquals(request.getDescription(), updatedFilter.getDescription()); } { UpdateFilterRequest request = new UpdateFilterRequest(filterId); // tag::update-filter-execute-listener ActionListener<PutFilterResponse> listener = new ActionListener<PutFilterResponse>() { @Override public void onResponse(PutFilterResponse putFilterResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::update-filter-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::update-filter-execute-async client.machineLearning().updateFilterAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::update-filter-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testDeleteFilter() throws Exception { RestHighLevelClient client = highLevelClient(); String filterId = createFilter(client); { // tag::delete-filter-request DeleteFilterRequest request = new DeleteFilterRequest(filterId); // <1> // end::delete-filter-request // tag::delete-filter-execute AcknowledgedResponse response = client.machineLearning().deleteFilter(request, RequestOptions.DEFAULT); // end::delete-filter-execute // tag::delete-filter-response boolean isAcknowledged = response.isAcknowledged(); // <1> // end::delete-filter-response assertTrue(isAcknowledged); } filterId = createFilter(client); { DeleteFilterRequest request = new DeleteFilterRequest(filterId); // tag::delete-filter-execute-listener ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() { @Override public void onResponse(AcknowledgedResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::delete-filter-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::delete-filter-execute-async client.machineLearning().deleteFilterAsync(request, RequestOptions.DEFAULT, listener); //<1> // end::delete-filter-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testGetMlInfo() throws Exception { RestHighLevelClient client = highLevelClient(); { // tag::get-ml-info-request MlInfoRequest request = new MlInfoRequest(); // <1> // end::get-ml-info-request // tag::get-ml-info-execute MlInfoResponse response = client.machineLearning() .getMlInfo(request, RequestOptions.DEFAULT); // end::get-ml-info-execute // tag::get-ml-info-response final Map<String, Object> info = response.getInfo();// <1> // end::get-ml-info-response assertTrue(info.containsKey("defaults")); assertTrue(info.containsKey("limits")); } { MlInfoRequest request = new MlInfoRequest(); // tag::get-ml-info-execute-listener ActionListener<MlInfoResponse> listener = new ActionListener<MlInfoResponse>() { @Override public void onResponse(MlInfoResponse response) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::get-ml-info-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::get-ml-info-execute-async client.machineLearning() .getMlInfoAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-ml-info-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testSetUpgradeMode() throws Exception { RestHighLevelClient client = highLevelClient(); { // tag::set-upgrade-mode-request SetUpgradeModeRequest request = new SetUpgradeModeRequest(true); // <1> request.setTimeout(TimeValue.timeValueMinutes(10)); // <2> // end::set-upgrade-mode-request // Set to false so that the cluster setting does not have to be unset at the end of the test. request.setEnabled(false); // tag::set-upgrade-mode-execute AcknowledgedResponse acknowledgedResponse = client.machineLearning().setUpgradeMode(request, RequestOptions.DEFAULT); // end::set-upgrade-mode-execute // tag::set-upgrade-mode-response boolean acknowledged = acknowledgedResponse.isAcknowledged(); // <1> // end::set-upgrade-mode-response assertThat(acknowledged, is(true)); } { SetUpgradeModeRequest request = new SetUpgradeModeRequest(false); // tag::set-upgrade-mode-execute-listener ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() { @Override public void onResponse(AcknowledgedResponse acknowledgedResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::set-upgrade-mode-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::set-upgrade-mode-execute-async client.machineLearning() .setUpgradeModeAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::set-upgrade-mode-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } public void testEstimateModelMemory() throws Exception { RestHighLevelClient client = highLevelClient(); { // tag::estimate-model-memory-request Detector.Builder detectorBuilder = new Detector.Builder() .setFunction("count") .setPartitionFieldName("status"); AnalysisConfig.Builder analysisConfigBuilder = new AnalysisConfig.Builder(Collections.singletonList(detectorBuilder.build())) .setBucketSpan(TimeValue.timeValueMinutes(10)) .setInfluencers(Collections.singletonList("src_ip")); EstimateModelMemoryRequest request = new EstimateModelMemoryRequest(analysisConfigBuilder.build()); // <1> request.setOverallCardinality(Collections.singletonMap("status", 50L)); // <2> request.setMaxBucketCardinality(Collections.singletonMap("src_ip", 30L)); // <3> // end::estimate-model-memory-request // tag::estimate-model-memory-execute EstimateModelMemoryResponse estimateModelMemoryResponse = client.machineLearning().estimateModelMemory(request, RequestOptions.DEFAULT); // end::estimate-model-memory-execute // tag::estimate-model-memory-response ByteSizeValue modelMemoryEstimate = estimateModelMemoryResponse.getModelMemoryEstimate(); // <1> long estimateInBytes = modelMemoryEstimate.getBytes(); // end::estimate-model-memory-response assertThat(estimateInBytes, greaterThan(10000000L)); } { AnalysisConfig analysisConfig = AnalysisConfig.builder(Collections.singletonList(Detector.builder().setFunction("count").build())).build(); EstimateModelMemoryRequest request = new EstimateModelMemoryRequest(analysisConfig); // tag::estimate-model-memory-execute-listener ActionListener<EstimateModelMemoryResponse> listener = new ActionListener<EstimateModelMemoryResponse>() { @Override public void onResponse(EstimateModelMemoryResponse estimateModelMemoryResponse) { // <1> } @Override public void onFailure(Exception e) { // <2> } }; // end::estimate-model-memory-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); // tag::estimate-model-memory-execute-async client.machineLearning() .estimateModelMemoryAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::estimate-model-memory-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } private String createFilter(RestHighLevelClient client) throws IOException { MlFilter.Builder filterBuilder = MlFilter.builder("my_safe_domains") .setDescription("A list of safe domains") .setItems("*.google.com", "wikipedia.org"); PutFilterRequest putFilterRequest = new PutFilterRequest(filterBuilder.build()); PutFilterResponse putFilterResponse = client.machineLearning().putFilter(putFilterRequest, RequestOptions.DEFAULT); MlFilter createdFilter = putFilterResponse.getResponse(); assertThat(createdFilter.getId(), equalTo("my_safe_domains")); return createdFilter.getId(); } private void createIndex(String indexName) throws IOException { CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName); createIndexRequest.mapping(XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("timestamp") .field("type", "date") .endObject() .startObject("total") .field("type", "long") .endObject() .endObject() .endObject()); highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT); } private DataFrameAnalyticsState getAnalyticsState(String configId) throws IOException { GetDataFrameAnalyticsStatsResponse statsResponse = highLevelClient().machineLearning().getDataFrameAnalyticsStats( new GetDataFrameAnalyticsStatsRequest(configId), RequestOptions.DEFAULT); assertThat(statsResponse.getAnalyticsStats(), hasSize(1)); DataFrameAnalyticsStats stats = statsResponse.getAnalyticsStats().get(0); return stats.getState(); } private void putTrainedModel(String modelId) throws IOException { TrainedModelDefinition definition = TrainedModelDefinitionTests.createRandomBuilder(TargetType.REGRESSION).build(); TrainedModelConfig trainedModelConfig = TrainedModelConfig.builder() .setDefinition(definition) .setModelId(modelId) .setInferenceConfig(new RegressionConfig("value", 0)) .setInput(new TrainedModelInput(Arrays.asList("col1", "col2", "col3", "col4"))) .setDescription("test model") .build(); highLevelClient().machineLearning().putTrainedModel(new PutTrainedModelRequest(trainedModelConfig), RequestOptions.DEFAULT); } @Override protected NamedXContentRegistry xContentRegistry() { return new NamedXContentRegistry(new MlInferenceNamedXContentProvider().getNamedXContentParsers()); } private static final DataFrameAnalyticsConfig DF_ANALYTICS_CONFIG = DataFrameAnalyticsConfig.builder() .setId("my-analytics-config") .setSource(DataFrameAnalyticsSource.builder() .setIndex("put-test-source-index") .build()) .setDest(DataFrameAnalyticsDest.builder() .setIndex("put-test-dest-index") .build()) .setAnalysis(OutlierDetection.createDefault()) .build(); }
apache-2.0
tmarciniak/cfmtp
mtp-core/src/main/java/org/tmarciniak/mtp/model/TradeMessage.java
4895
package org.tmarciniak.mtp.model; import java.io.Serializable; import java.math.BigDecimal; import java.util.Date; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.SequenceGenerator; import javax.persistence.Table; import org.apache.commons.lang3.builder.ToStringBuilder; /** * @author tomasz.marciniak * * Immutable class representing trade message */ @Entity @Table public final class TradeMessage implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "trade_message_id_seq") @SequenceGenerator(name = "trade_message_id_seq", sequenceName = "trade_message_id_seq", allocationSize = 1) private long id; private String userId; private String currencyFrom; private String currencyTo; private BigDecimal amountBuy; private BigDecimal amountSell; private BigDecimal rate; private Date timePlaced; private String originatingCountry; public String getUserId() { return userId; } public void setUserId(String userId) { this.userId = userId; } public String getCurrencyFrom() { return currencyFrom; } public void setCurrencyFrom(String currencyFrom) { this.currencyFrom = currencyFrom; } public String getCurrencyTo() { return currencyTo; } public void setCurrencyTo(String currencyTo) { this.currencyTo = currencyTo; } public BigDecimal getAmountBuy() { return amountBuy; } public void setAmountBuy(BigDecimal amountBuy) { this.amountBuy = amountBuy; } public BigDecimal getAmountSell() { return amountSell; } public void setAmountSell(BigDecimal amountSell) { this.amountSell = amountSell; } public BigDecimal getRate() { return rate; } public void setRate(BigDecimal rate) { this.rate = rate; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((amountBuy == null) ? 0 : amountBuy.hashCode()); result = prime * result + ((amountSell == null) ? 0 : amountSell.hashCode()); result = prime * result + ((currencyFrom == null) ? 0 : currencyFrom.hashCode()); result = prime * result + ((currencyTo == null) ? 0 : currencyTo.hashCode()); result = prime * result + (int) (id ^ (id >>> 32)); result = prime * result + ((originatingCountry == null) ? 0 : originatingCountry .hashCode()); result = prime * result + ((rate == null) ? 0 : rate.hashCode()); result = prime * result + ((timePlaced == null) ? 0 : timePlaced.hashCode()); result = prime * result + ((userId == null) ? 0 : userId.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; TradeMessage other = (TradeMessage) obj; if (amountBuy == null) { if (other.amountBuy != null) return false; } else if (!amountBuy.equals(other.amountBuy)) return false; if (amountSell == null) { if (other.amountSell != null) return false; } else if (!amountSell.equals(other.amountSell)) return false; if (currencyFrom == null) { if (other.currencyFrom != null) return false; } else if (!currencyFrom.equals(other.currencyFrom)) return false; if (currencyTo == null) { if (other.currencyTo != null) return false; } else if (!currencyTo.equals(other.currencyTo)) return false; if (id != other.id) return false; if (originatingCountry == null) { if (other.originatingCountry != null) return false; } else if (!originatingCountry.equals(other.originatingCountry)) return false; if (rate == null) { if (other.rate != null) return false; } else if (!rate.equals(other.rate)) return false; if (timePlaced == null) { if (other.timePlaced != null) return false; } else if (!timePlaced.equals(other.timePlaced)) return false; if (userId == null) { if (other.userId != null) return false; } else if (!userId.equals(other.userId)) return false; return true; } public Date getTimePlaced() { return new Date(timePlaced.getTime()); } public void setTimePlaced(Date timePlaced) { this.timePlaced = timePlaced; } public String getOriginatingCountry() { return originatingCountry; } public void setOriginatingCountry(String originatingCountry) { this.originatingCountry = originatingCountry; } @Override public String toString() { return ToStringBuilder.reflectionToString(this); } public long getId() { return id; } public void setId(long id) { this.id = id; } }
apache-2.0
dagnir/aws-sdk-java
aws-java-sdk-polly/src/main/java/com/amazonaws/services/polly/AbstractAmazonPollyAsync.java
4021
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.polly; import javax.annotation.Generated; import com.amazonaws.services.polly.model.*; /** * Abstract implementation of {@code AmazonPollyAsync}. Convenient method forms pass through to the corresponding * overload that takes a request object and an {@code AsyncHandler}, which throws an * {@code UnsupportedOperationException}. */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AbstractAmazonPollyAsync extends AbstractAmazonPolly implements AmazonPollyAsync { protected AbstractAmazonPollyAsync() { } @Override public java.util.concurrent.Future<DeleteLexiconResult> deleteLexiconAsync(DeleteLexiconRequest request) { return deleteLexiconAsync(request, null); } @Override public java.util.concurrent.Future<DeleteLexiconResult> deleteLexiconAsync(DeleteLexiconRequest request, com.amazonaws.handlers.AsyncHandler<DeleteLexiconRequest, DeleteLexiconResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<DescribeVoicesResult> describeVoicesAsync(DescribeVoicesRequest request) { return describeVoicesAsync(request, null); } @Override public java.util.concurrent.Future<DescribeVoicesResult> describeVoicesAsync(DescribeVoicesRequest request, com.amazonaws.handlers.AsyncHandler<DescribeVoicesRequest, DescribeVoicesResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<GetLexiconResult> getLexiconAsync(GetLexiconRequest request) { return getLexiconAsync(request, null); } @Override public java.util.concurrent.Future<GetLexiconResult> getLexiconAsync(GetLexiconRequest request, com.amazonaws.handlers.AsyncHandler<GetLexiconRequest, GetLexiconResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<ListLexiconsResult> listLexiconsAsync(ListLexiconsRequest request) { return listLexiconsAsync(request, null); } @Override public java.util.concurrent.Future<ListLexiconsResult> listLexiconsAsync(ListLexiconsRequest request, com.amazonaws.handlers.AsyncHandler<ListLexiconsRequest, ListLexiconsResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<PutLexiconResult> putLexiconAsync(PutLexiconRequest request) { return putLexiconAsync(request, null); } @Override public java.util.concurrent.Future<PutLexiconResult> putLexiconAsync(PutLexiconRequest request, com.amazonaws.handlers.AsyncHandler<PutLexiconRequest, PutLexiconResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } @Override public java.util.concurrent.Future<SynthesizeSpeechResult> synthesizeSpeechAsync(SynthesizeSpeechRequest request) { return synthesizeSpeechAsync(request, null); } @Override public java.util.concurrent.Future<SynthesizeSpeechResult> synthesizeSpeechAsync(SynthesizeSpeechRequest request, com.amazonaws.handlers.AsyncHandler<SynthesizeSpeechRequest, SynthesizeSpeechResult> asyncHandler) { throw new java.lang.UnsupportedOperationException(); } }
apache-2.0
tonybeltramelli/Ubiquitous-Media-Sharing-Surface
dk.itu.pervasive.mobile.android/src/dk/itu/pervasive/mobile/data/DataManager.java
2863
package dk.itu.pervasive.mobile.data; import android.app.Activity; import android.content.Context; import android.database.Cursor; import android.net.Uri; import android.preference.PreferenceManager; import android.provider.MediaStore; import android.util.Log; import android.widget.Toast; import dk.itu.pervasive.mobile.R; import java.io.FileOutputStream; /** * @author Tony Beltramelli www.tonybeltramelli.com */ public class DataManager { public static final String PREF_KEY_SAVE = "save"; public static final String PREF_KEY_USERNAME = "username"; public static final String PREF_KEY_SURFACE_ADDRESS = "surfaceAddress"; public static final String PREF_KEY_STICKER_ID = "stickerID"; private static DataManager _instance = null; private Activity _context; private String _username = ""; private String _surfaceAddress = ""; private String _stickerID = ""; private DataManager() { } public static DataManager getInstance() { if (_instance == null) { _instance = new DataManager(); } return _instance; } public void saveData() { _username = PreferenceManager.getDefaultSharedPreferences(_context).getString(PREF_KEY_USERNAME, _context.getResources().getString(R.string.preference_user_name_default)); _surfaceAddress = PreferenceManager.getDefaultSharedPreferences(_context).getString(PREF_KEY_SURFACE_ADDRESS, _context.getResources().getString(R.string.preference_surface_address_default)); _stickerID = PreferenceManager.getDefaultSharedPreferences(_context).getString(PREF_KEY_STICKER_ID, _context.getResources().getString(R.string.preference_sticker_id_default)); Log.wtf("save data", _username + ", " + _surfaceAddress + ", " + _stickerID); } public String getPathFromUri(Uri uri) { String[] projection = { MediaStore.Images.Media.DATA }; Cursor cursor = _context.getContentResolver().query(uri, projection, null, null, null); int column_index = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA); cursor.moveToFirst(); return cursor.getString(column_index); } public void saveImage(String imageName, byte[] bytes) { FileOutputStream fos; try { fos = _context.openFileOutput(imageName, Context.MODE_PRIVATE); fos.write(bytes); fos.close(); } catch (Exception e) { e.printStackTrace(); } } public void displayMessage(final String message) { _context.runOnUiThread(new Runnable() { public void run() { Toast.makeText(_context, message, Toast.LENGTH_SHORT).show(); } }); } public String getUsername() { return _username; } public String getSurfaceAddress() { return _surfaceAddress; } public String getStickerID() { return _stickerID; } public void setContext(Activity context) { _context = context; saveData(); } public Context getContext(){ return _context; } }
apache-2.0
jpodeszwik/mifos
application/src/main/java/org/mifos/accounts/struts/actionforms/AccountApplyPaymentActionForm.java
15538
/* * Copyright (c) 2005-2011 Grameen Foundation USA * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * See also http://www.apache.org/licenses/LICENSE-2.0.html for an * explanation of the license and how it is applied. */ package org.mifos.accounts.struts.actionforms; import static org.mifos.framework.util.helpers.DateUtils.dateFallsBeforeDate; import static org.mifos.framework.util.helpers.DateUtils.getDateAsSentFromBrowser; import java.sql.Date; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.Locale; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import org.apache.commons.lang.StringUtils; import org.apache.struts.Globals; import org.apache.struts.action.ActionErrors; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionMapping; import org.apache.struts.action.ActionMessage; import org.joda.time.LocalDate; import org.mifos.accounts.servicefacade.AccountTypeDto; import org.mifos.accounts.util.helpers.AccountConstants; import org.mifos.application.admin.servicefacade.InvalidDateException; import org.mifos.application.master.business.MifosCurrency; import org.mifos.config.AccountingRules; import org.mifos.framework.business.util.helpers.MethodNameConstants; import org.mifos.framework.struts.actionforms.BaseActionForm; import org.mifos.framework.util.helpers.Constants; import org.mifos.framework.util.helpers.DateUtils; import org.mifos.framework.util.helpers.DoubleConversionResult; import org.mifos.framework.util.helpers.SessionUtils; import org.mifos.security.login.util.helpers.LoginConstants; import org.mifos.security.util.ActivityMapper; import org.mifos.security.util.UserContext; public class AccountApplyPaymentActionForm extends BaseActionForm { private String input; private String transactionDateDD; private String transactionDateMM; private String transactionDateYY; private String amount; private Short currencyId; private String receiptId; private String receiptDateDD; private String receiptDateMM; private String receiptDateYY; /* * Among other things, this field holds the PaymentTypes value for disbursements. */ private String paymentTypeId; private String waiverInterest; private String globalAccountNum; private String accountId; private String prdOfferingName; private boolean amountCannotBeZero = true; private java.util.Date lastPaymentDate; private String accountForTransfer; private Short transferPaymentTypeId; public boolean amountCannotBeZero() { return this.amountCannotBeZero; } public void setAmountCannotBeZero(boolean amountCannotBeZero) { this.amountCannotBeZero = amountCannotBeZero; } public String getPrdOfferingName() { return prdOfferingName; } public void setPrdOfferingName(String prdOfferingName) { this.prdOfferingName = prdOfferingName; } public String getAmount() { return amount; } public void setAmount(String amount) { this.amount = amount; } public String getInput() { return input; } @Override public ActionErrors validate(ActionMapping mapping, HttpServletRequest request) { String methodCalled = request.getParameter(MethodNameConstants.METHOD); ActionErrors errors = new ActionErrors(); if (methodCalled != null && methodCalled.equals("preview")) { validateTransfer(errors); validateTransactionDate(errors); validatePaymentType(errors); validateReceiptDate(errors); String accountType = (String) request.getSession().getAttribute(Constants.ACCOUNT_TYPE); validateAccountType(errors, accountType); validateAmount(errors); validateModeOfPaymentSecurity(request, errors); } if (!errors.isEmpty()) { request.setAttribute(Globals.ERROR_KEY, errors); request.setAttribute("methodCalled", methodCalled); } return errors; } private void validateModeOfPaymentSecurity(HttpServletRequest request, ActionErrors errors){ UserContext userContext = (UserContext) SessionUtils.getAttribute(Constants.USER_CONTEXT_KEY, request.getSession()); if(getPaymentTypeId().equals("4") && !ActivityMapper.getInstance().isModeOfPaymentSecurity(userContext)){ errors.add(AccountConstants.LOAN_TRANSFER_PERMISSION, new ActionMessage(AccountConstants.LOAN_TRANSFER_PERMISSION, getLocalizedMessage("accounts.mode_of_payment_permission"))); } } private void validateTransfer(ActionErrors errors) { if (paymentTypeId.equals(String.valueOf(transferPaymentTypeId)) && StringUtils.isBlank(accountForTransfer)) { errors.add(AccountConstants.NO_ACCOUNT_FOR_TRANSFER, new ActionMessage(AccountConstants.NO_ACCOUNT_FOR_TRANSFER)); } } private void validateAccountType(ActionErrors errors, String accountType) { if (accountType != null && accountType.equals(AccountTypeDto.LOAN_ACCOUNT.name())) { if (getAmount() == null || getAmount().equals("")) { errors.add(AccountConstants.ERROR_MANDATORY, new ActionMessage(AccountConstants.ERROR_MANDATORY, getLocalizedMessage("accounts.amt"))); } } } private void validatePaymentType(ActionErrors errors) { if (StringUtils.isEmpty(getPaymentTypeId())) { errors.add(AccountConstants.ERROR_MANDATORY, new ActionMessage(AccountConstants.ERROR_MANDATORY, getLocalizedMessage("accounts.mode_of_payment"))); } } private void validateReceiptDate(ActionErrors errors) { if (getReceiptDate() != null && !getReceiptDate().equals("")) { ActionErrors validationErrors = validateDate(getReceiptDate(), getLocalizedMessage("accounts.receiptdate")); if (null != validationErrors && !validationErrors.isEmpty()) { errors.add(validationErrors); } } } private void validateTransactionDate(ActionErrors errors) { String fieldName = "accounts.date_of_trxn"; ActionErrors validationErrors = validateDate(getTransactionDate(), getLocalizedMessage(fieldName)); if (null != validationErrors && !validationErrors.isEmpty()) { errors.add(validationErrors); } if (null != getTransactionDate()){ validationErrors = validatePaymentDate(getTransactionDate(), getLocalizedMessage(fieldName)); if (validationErrors != null && !validationErrors.isEmpty()) { errors.add(validationErrors); } } } //exposed for testing public ActionErrors validatePaymentDate(String transactionDate, String fieldName) { ActionErrors errors = null; try { if (lastPaymentDate != null && dateFallsBeforeDate(getDateAsSentFromBrowser(transactionDate), lastPaymentDate)) { errors = new ActionErrors(); errors.add(AccountConstants.ERROR_PAYMENT_DATE_BEFORE_LAST_PAYMENT, new ActionMessage(AccountConstants.ERROR_PAYMENT_DATE_BEFORE_LAST_PAYMENT, fieldName)); } } catch (InvalidDateException ide) { errors = new ActionErrors(); //dont add a message, since it was already added in validateDate() } return errors; } protected ActionErrors validateDate(String date, String fieldName) { ActionErrors errors = null; java.sql.Date sqlDate = null; if (date != null && !date.equals("")) { try { sqlDate = getDateAsSentFromBrowser(date); if (DateUtils.whichDirection(sqlDate) > 0) { errors = new ActionErrors(); errors.add(AccountConstants.ERROR_FUTUREDATE, new ActionMessage(AccountConstants.ERROR_FUTUREDATE, fieldName)); } } catch (InvalidDateException ide) { errors = new ActionErrors(); errors.add(AccountConstants.ERROR_INVALIDDATE, new ActionMessage(AccountConstants.ERROR_INVALIDDATE, fieldName)); } } else { errors = new ActionErrors(); errors.add(AccountConstants.ERROR_MANDATORY, new ActionMessage(AccountConstants.ERROR_MANDATORY, fieldName)); } return errors; } protected Locale getUserLocale(HttpServletRequest request) { Locale locale = null; HttpSession session = request.getSession(); if (session != null) { UserContext userContext = (UserContext) session.getAttribute(LoginConstants.USERCONTEXT); if (null != userContext) { locale = userContext.getCurrentLocale(); } } return locale; } protected void validateAmount(ActionErrors errors) { MifosCurrency currency = null; if (getCurrencyId() != null && AccountingRules.isMultiCurrencyEnabled()) { currency = AccountingRules.getCurrencyByCurrencyId(getCurrencyId()); } DoubleConversionResult conversionResult = validateAmount(getAmount(), currency , AccountConstants.ACCOUNT_AMOUNT, errors, ""); if (amountCannotBeZero() && conversionResult.getErrors().size() == 0 && !(conversionResult.getDoubleValue() > 0.0)) { addError(errors, AccountConstants.ACCOUNT_AMOUNT, AccountConstants.ERRORS_MUST_BE_GREATER_THAN_ZERO, getLocalizedMessage(AccountConstants.ACCOUNT_AMOUNT)); } } public void setInput(String input) { this.input = input; } public String getPaymentTypeId() { return paymentTypeId; } public void setPaymentTypeId(String paymentTypeId) { this.paymentTypeId = paymentTypeId; } public String getReceiptDate() { return compileDateString(receiptDateDD, receiptDateMM, receiptDateYY); } public void setReceiptDate(String receiptDate) throws InvalidDateException { if (StringUtils.isBlank(receiptDate)) { receiptDateDD = null; receiptDateMM = null; receiptDateYY = null; } else { Calendar cal = new GregorianCalendar(); java.sql.Date date = getDateAsSentFromBrowser(receiptDate); cal.setTime(date); receiptDateDD = Integer.toString(cal.get(Calendar.DAY_OF_MONTH)); receiptDateMM = Integer.toString(cal.get(Calendar.MONTH) + 1); receiptDateYY = Integer.toString(cal.get(Calendar.YEAR)); } } public String getReceiptId() { return receiptId; } public void setReceiptId(String receiptId) { this.receiptId = receiptId; } public String getTransactionDate() { return compileDateString(transactionDateDD, transactionDateMM, transactionDateYY); } public void setTransactionDate(String receiptDate) throws InvalidDateException { if (StringUtils.isBlank(receiptDate)) { transactionDateDD = null; transactionDateMM = null; transactionDateYY = null; } else { Calendar cal = new GregorianCalendar(); java.sql.Date date = getDateAsSentFromBrowser(receiptDate); cal.setTime(date); transactionDateDD = Integer.toString(cal.get(Calendar.DAY_OF_MONTH)); transactionDateMM = Integer.toString(cal.get(Calendar.MONTH) + 1); transactionDateYY = Integer.toString(cal.get(Calendar.YEAR)); } } public String getAccountId() { return accountId; } public void setAccountId(String accountId) { this.accountId = accountId; } public String getGlobalAccountNum() { return globalAccountNum; } public void setGlobalAccountNum(String globalAccountNum) { this.globalAccountNum = globalAccountNum; } protected void clear() throws InvalidDateException { this.amount = null; this.paymentTypeId = null; setReceiptDate(null); this.receiptId = null; } public String getReceiptDateDD() { return receiptDateDD; } public void setReceiptDateDD(String receiptDateDD) { this.receiptDateDD = receiptDateDD; } public String getReceiptDateMM() { return receiptDateMM; } public void setReceiptDateMM(String receiptDateMM) { this.receiptDateMM = receiptDateMM; } public String getReceiptDateYY() { return receiptDateYY; } public void setReceiptDateYY(String receiptDateYY) { this.receiptDateYY = receiptDateYY; } public String getTransactionDateDD() { return transactionDateDD; } public void setTransactionDateDD(String transactionDateDD) { this.transactionDateDD = transactionDateDD; } public String getTransactionDateMM() { return transactionDateMM; } public void setTransactionDateMM(String transactionDateMM) { this.transactionDateMM = transactionDateMM; } public String getTransactionDateYY() { return transactionDateYY; } public void setTransactionDateYY(String transactionDateYY) { this.transactionDateYY = transactionDateYY; } public Short getCurrencyId() { return this.currencyId; } public void setCurrencyId(Short currencyId) { this.currencyId = currencyId; } public String getWaiverInterest() { return waiverInterest; } public void setWaiverInterest(String waiverInterest) { this.waiverInterest = waiverInterest; } public LocalDate getReceiptDateAsLocalDate() throws InvalidDateException { Date receiptDateStr = getDateAsSentFromBrowser(getReceiptDate()); return (receiptDateStr != null) ? new LocalDate(receiptDateStr.getTime()) : null; } public LocalDate getTrxnDateAsLocalDate() throws InvalidDateException { return new LocalDate(getTrxnDate().getTime()); } public Date getTrxnDate() throws InvalidDateException { return getDateAsSentFromBrowser(getTransactionDate()); } public void setLastPaymentDate(java.util.Date lastPaymentDate) { this.lastPaymentDate = lastPaymentDate; } public String getAccountForTransfer() { return accountForTransfer; } public void setAccountForTransfer(String accountForTransfer) { this.accountForTransfer = accountForTransfer; } public Short getTransferPaymentTypeId() { return transferPaymentTypeId; } public void setTransferPaymentTypeId(Short transferPaymentTypeId) { this.transferPaymentTypeId = transferPaymentTypeId; } }
apache-2.0
hzbarcea/activemq-nob
activemq-nob-filestore/src/main/java/org/apache/activemq/nob/filestore/uuiddir/UUIDDirectoryStoreFilenameDecoder.java
2083
package org.apache.activemq.nob.filestore.uuiddir; import org.apache.activemq.nob.filestore.BrokerFilenameDecoder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.util.UUID; /** * Decoder of filenames in a UUID-based filesystem store of broker configuration files. This store only supports * broker IDs in the form of UUIDs. * * Created by art on 2/19/15. */ public class UUIDDirectoryStoreFilenameDecoder implements BrokerFilenameDecoder { public static final String XBEAN_FILE_PATH_SUFFIX = "-xbean.xml"; private static final Logger DEFAULT_LOGGER = LoggerFactory.getLogger(UUIDDirectoryStoreFilenameDecoder.class); private Logger LOG = DEFAULT_LOGGER; /** * Decode the pathname as a UUID if it is a regular file (i.e. not a directory) and return the UUID. * * @param brokerPath path to the candidate broker. * @return */ @Override public String extractIdFromFilename(File brokerPath) { String result = null; if ( ! brokerPath.isDirectory() ) { try { UUID uuid = UUID.fromString(brokerPath.getName()); if (uuid != null) { result = uuid.toString(); } } catch ( IllegalArgumentException illegalArgExc ) { LOG.debug("invalid UUID {}", brokerPath.getName()); } } return result; } /** * Locate the path to the xbean configuration file for the broker at the given path. This method validates the * broker path as it must to determine the broker ID. * * @param brokerPath path to the broker. * @return path to the xbean configuration file, even if it does not exist. */ @Override public File getBrokerXbeanFile(File brokerPath) { File result = null; String brokerId = this.extractIdFromFilename(brokerPath); if ( brokerId != null ) { result = new File(brokerPath.getPath() + XBEAN_FILE_PATH_SUFFIX); } return result; } }
apache-2.0