repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
ernestp/consulo
platform/xdebugger-impl/src/com/intellij/xdebugger/impl/breakpoints/XExpressionImpl.java
3535
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.xdebugger.impl.breakpoints; import com.intellij.lang.Language; import com.intellij.xdebugger.XExpression; import com.intellij.xdebugger.evaluation.EvaluationMode; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** * @author egor */ public class XExpressionImpl implements XExpression { public static final XExpression EMPTY_EXPRESSION = fromText("", EvaluationMode.EXPRESSION); public static final XExpression EMPTY_CODE_FRAGMENT = fromText("", EvaluationMode.CODE_FRAGMENT); @NotNull private final String myExpression; private final Language myLanguage; private final String myCustomInfo; private final EvaluationMode myMode; public XExpressionImpl(@NotNull String expression, Language language, String customInfo) { this(expression, language, customInfo, EvaluationMode.EXPRESSION); } public XExpressionImpl(@NotNull String expression, Language language, String customInfo, EvaluationMode mode) { myExpression = expression; myLanguage = language; myCustomInfo = customInfo; myMode = mode; } @NotNull @Override public String getExpression() { return myExpression; } @Override public Language getLanguage() { return myLanguage; } @Override public String getCustomInfo() { return myCustomInfo; } @Override public EvaluationMode getMode() { return myMode; } @Nullable public static XExpressionImpl fromText(@Nullable String text) { return text != null ? new XExpressionImpl(text, null, null, EvaluationMode.EXPRESSION) : null; } @Nullable public static XExpressionImpl fromText(@Nullable String text, EvaluationMode mode) { return text != null ? new XExpressionImpl(text, null, null, mode) : null; } public static XExpressionImpl changeMode(XExpression expression, EvaluationMode mode) { return new XExpressionImpl(expression.getExpression(), expression.getLanguage(), expression.getCustomInfo(), mode); } @Override public String toString() { return myExpression; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; XExpressionImpl that = (XExpressionImpl)o; if (myCustomInfo != null ? !myCustomInfo.equals(that.myCustomInfo) : that.myCustomInfo != null) return false; if (!myExpression.equals(that.myExpression)) return false; if (myLanguage != null ? !myLanguage.equals(that.myLanguage) : that.myLanguage != null) return false; if (myMode != that.myMode) return false; return true; } @Override public int hashCode() { int result = myExpression.hashCode(); result = 31 * result + (myLanguage != null ? myLanguage.hashCode() : 0); result = 31 * result + (myCustomInfo != null ? myCustomInfo.hashCode() : 0); result = 31 * result + (myMode != null ? myMode.hashCode() : 0); return result; } }
apache-2.0
zuesgooogle/game-server
src/main/java/com/simplegame/server/bus/client/io/action/BusOutAction.java
640
package com.simplegame.server.bus.client.io.action; import javax.annotation.Resource; import com.simplegame.core.action.annotation.ActionMapping; import com.simplegame.core.action.annotation.ActionWorker; import com.simplegame.core.message.Message; import com.simplegame.server.bus.client.io.command.ClientIoCommands; import com.simplegame.server.bus.client.io.service.IIoService; @ActionWorker public class BusOutAction { @Resource private IIoService ioService; @ActionMapping(mapping = ClientIoCommands.ROLE_OUT) public void roleOut(Message message) { this.ioService.roleOut(message.getRoleId()); } }
apache-2.0
badlogicmanpreet/aura
aura-impl/src/main/java/org/auraframework/impl/system/DefinitionImpl.java
11852
/* * Copyright (C) 2013 salesforce.com, inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.auraframework.impl.system; import static org.auraframework.instance.AuraValueProviderType.LABEL; import java.io.Serializable; import java.util.Collection; import java.util.Map; import java.util.Set; import org.auraframework.Aura; import org.auraframework.builder.DefBuilder; import org.auraframework.def.DefDescriptor; import org.auraframework.def.Definition; import org.auraframework.def.DefinitionAccess; import org.auraframework.expression.PropertyReference; import org.auraframework.impl.DefinitionAccessImpl; import org.auraframework.instance.GlobalValueProvider; import org.auraframework.system.Location; import org.auraframework.system.SubDefDescriptor; import org.auraframework.throwable.AuraExceptionInfo; import org.auraframework.throwable.quickfix.InvalidDefinitionException; import org.auraframework.throwable.quickfix.QuickFixException; import org.auraframework.util.json.Serialization; import org.auraframework.util.json.Serialization.ReferenceScope; import org.auraframework.util.json.Serialization.ReferenceType; import org.auraframework.util.text.Hash; import com.google.common.collect.Maps; /** * The implementation for a definition. */ @Serialization(referenceType = ReferenceType.IDENTITY, referenceScope = ReferenceScope.REQUEST) public abstract class DefinitionImpl<T extends Definition> implements Definition, Serializable { private static final long serialVersionUID = 5836732915093913670L; protected final DefDescriptor<T> descriptor; protected final Location location; protected final Map<SubDefDescriptor<?, T>, Definition> subDefs; protected final String apiVersion; protected final String description; private final QuickFixException parseError; private final String ownHash; private final DefinitionAccess access; private boolean valid; protected DefinitionImpl(DefDescriptor<T> descriptor, Location location) { this(descriptor, location, null, null, null, null, null, null); } protected DefinitionImpl(RefBuilderImpl<T, ?> builder) { this(builder.getDescriptor(), builder.getLocation(), builder.subDefs, builder.apiVersion, builder.description, builder.getAccess(), builder.getOwnHash(), builder.getParseError()); } DefinitionImpl(DefDescriptor<T> descriptor, Location location, Map<SubDefDescriptor<?, T>, Definition> subDefs, String apiVersion, String description, DefinitionAccess access, String ownHash, QuickFixException parseError) { this.descriptor = descriptor; this.location = location; this.subDefs = subDefs; this.apiVersion = apiVersion; this.description = description; this.ownHash = ownHash; this.parseError = parseError; this.access = access == null ? DefinitionAccessImpl.defaultAccess(descriptor != null ? descriptor.getNamespace() : null) : access; } /** * @see Definition#getDescriptor() */ @Override public DefDescriptor<T> getDescriptor() { return descriptor; } /** * @see Definition#getLocation() */ @Override public Location getLocation() { return location; } @Override public DefinitionAccess getAccess() { return access; } /** * @see Definition#getName() */ @Override public String getName() { return descriptor == null ? getClass().getName() : descriptor.getName(); } @Override public String getOwnHash() { return ownHash; } /** * @throws QuickFixException * @see Definition#appendDependencies(java.util.Set) */ @Override public void appendDependencies(Set<DefDescriptor<?>> dependencies) { } /** * @throws QuickFixException * @see Definition#appendSupers(java.util.Set) */ @Override public void appendSupers(Set<DefDescriptor<?>> dependencies) throws QuickFixException { } /** * @throws QuickFixException * @see Definition#validateDefinition() */ @Override public void validateDefinition() throws QuickFixException { if (parseError != null) { throw parseError; } if (descriptor == null) { throw new InvalidDefinitionException("No descriptor", location); } } @Override public void markValid() { this.valid = true; } @Override public boolean isValid() { return this.valid; } /** * @throws QuickFixException * @see Definition#validateReferences() */ @Override public void validateReferences() throws QuickFixException { } @Override public String toString() { // getDescriptor is not always non-null (though is should be). Avoid // throwing a null pointer // exception when someone asks for a string representation. if (getDescriptor() != null) { return getDescriptor().toString(); } else { return "INVALID[" + this.location + "]: " + this.description; } } @SuppressWarnings("unchecked") @Override public <D extends Definition> D getSubDefinition(SubDefDescriptor<D, ?> sddesc) { if (subDefs == null) { return null; } return (D) subDefs.get(sddesc); } public abstract static class BuilderImpl<T extends Definition> extends RefBuilderImpl<T, T> { protected BuilderImpl(Class<T> defClass) { super(defClass); } }; public abstract static class RefBuilderImpl<T extends Definition, A extends Definition> implements DefBuilder<T, A> { private boolean descriptorLocked; public DefDescriptor<T> descriptor; public Location location; public Map<SubDefDescriptor<?, T>, Definition> subDefs; private final Class<T> defClass; public String apiVersion; public String description; public Hash hash; public String ownHash; private QuickFixException parseError; private DefinitionAccess access; protected RefBuilderImpl(Class<T> defClass) { this.defClass = defClass; //this.ownHash = String.valueOf(System.currentTimeMillis()); } public RefBuilderImpl<T, A> setAccess(DefinitionAccess access) { this.access = access; return this; } public DefinitionAccess getAccess() { return access; } @Override public RefBuilderImpl<T, A> setLocation(String fileName, int line, int column, long lastModified) { location = new Location(fileName, line, column, lastModified); return this; } @Override public RefBuilderImpl<T, A> setLocation(String fileName, long lastModified) { location = new Location(fileName, lastModified); return this; } @Override public RefBuilderImpl<T, A> setLocation(Location location) { this.location = location; return this; } public Location getLocation() { return this.location; } public RefBuilderImpl<T, A> addSubDef(SubDefDescriptor<?, T> sddesc, Definition inner) { if (this.subDefs == null) { this.subDefs = Maps.newHashMap(); } this.subDefs.put(sddesc, inner); return this; } public RefBuilderImpl<T, A> lockDescriptor(DefDescriptor<T> desc) { this.descriptorLocked = true; this.descriptor = desc; return this; } @Override public RefBuilderImpl<T, A> setDescriptor(String qualifiedName) { try { return this.setDescriptor(DefDescriptorImpl.getInstance(qualifiedName, defClass)); } catch (Exception e) { setParseError(e); return this; } } @Override public RefBuilderImpl<T, A> setDescriptor(DefDescriptor<T> desc) { if (!this.descriptorLocked) { this.descriptor = desc; } return this; } @Override public DefDescriptor<T> getDescriptor() { return descriptor; } @Override public RefBuilderImpl<T, A> setAPIVersion(String apiVersion) { this.apiVersion = apiVersion; return this; } @Override public RefBuilderImpl<T, A> setDescription(String description) { this.description = description; return this; } @Override public RefBuilderImpl<T,A> setOwnHash(Hash hash) { if (hash != null) { this.ownHash = null; } this.hash = hash; return this; } @Override public RefBuilderImpl<T,A> setOwnHash(String ownHash) { this.ownHash = ownHash; return this; } private String getOwnHash() { // // Try to make sure that we have a hash string. // if (ownHash == null && hash != null && hash.isSet()) { ownHash = hash.toString(); } return ownHash; } @Override public void setParseError(Throwable cause) { if (this.parseError != null) { return; } if (cause instanceof QuickFixException) { this.parseError = (QuickFixException)cause; } else { Location location = null; if (cause instanceof AuraExceptionInfo) { AuraExceptionInfo aei = (AuraExceptionInfo)cause; location = aei.getLocation(); } this.parseError = new InvalidDefinitionException(cause.getMessage(), location, cause); } } @Override public QuickFixException getParseError() { return parseError; } } @Override public void retrieveLabels() throws QuickFixException { } /** * A utility routine to get the full set of labels out of a set of property references. * * This is used everywhere that we parse javascript to get property references and want to * process them. But can be applied to literally anything. * * @param props the collection of properties to scan. */ protected void retrieveLabels(Collection<PropertyReference> props) throws QuickFixException { if (props != null && !props.isEmpty()) { GlobalValueProvider labelProvider = Aura.getContextService().getCurrentContext().getGlobalProviders().get(LABEL.getPrefix()); for (PropertyReference e : props) { if (e.getRoot().equals(LABEL.getPrefix())) { labelProvider.validate(e.getStem()); labelProvider.getValue(e.getStem()); } } } } @Override public String getAPIVersion() { return apiVersion; } @Override public String getDescription() { return description; } }
apache-2.0
robjcaskey/Unofficial-Coffee-Mud-Upstream
com/planet_ink/fakedb/ResultSet.java
30543
package com.planet_ink.fakedb; import java.io.InputStream; import java.io.Reader; import java.sql.NClob; import java.sql.RowId; import java.sql.SQLException; import java.sql.SQLXML; import java.util.Map; /* Copyright 2001 Thomas Neumann Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @SuppressWarnings("unchecked") class ResultSet implements java.sql.ResultSet { private Statement statement; private Backend.Relation relation; private java.util.Iterator iter; private int currentRow=0; private int conditionIndex; private String conditionValue; private boolean eq=true; private boolean lt=false; private boolean gt=false; private final String[] values; private final boolean[] nullIndicators; private boolean nullFlag = false; ResultSet(Statement s, Backend.Relation r, int ci, String cv, String comp) { statement=s; relation=r; conditionIndex=ci; conditionValue=cv; comp=comp.trim(); eq=(comp.indexOf("=")>=0); lt=(comp.indexOf("<")>=0); gt=(comp.indexOf(">")>=0) ; currentRow=0; values=new String[r.attributes.length]; nullIndicators=new boolean[values.length]; if ((ci<0)&&(cv!=null)) { iter=r.index.keySet().iterator(); } else { iter=r.index.values().iterator(); } } public java.sql.Statement getStatement() throws java.sql.SQLException { return statement; } public static boolean isNumber(String s) { if(s==null) return false; s=s.trim(); if(s.length()==0) return false; if((s.length()>1)&&(s.startsWith("-"))) s=s.substring(1); for(int i=0;i<s.length();i++) if("0123456789.,".indexOf(s.charAt(i))<0) return false; return true; } public static double s_double(String DOUBLE) { double sdouble=0; try{ sdouble=Double.parseDouble(DOUBLE); } catch(Exception e){ return 0;} return sdouble; } public static long s_long(String LONG) { long slong=0; try{ slong=Long.parseLong(LONG); } catch(Exception e){ return 0;} return slong; } public static boolean isDouble(String DBL) { if(DBL.length()==0) return false; if(DBL.startsWith("-")&&(DBL.length()>1)) DBL=DBL.substring(1); boolean alreadyDot=false; for(int i=0;i<DBL.length();i++) if(!Character.isDigit(DBL.charAt(i))) { if(DBL.charAt(i)=='.') { if(alreadyDot) return false; alreadyDot=true; } else return false; } return alreadyDot; } public int numCompare(String s1, String s2) { if((s1==null)||(s2==null)) return 0; if((!isNumber(s1))||(!isNumber(s2))) return 0; if(isDouble(s1)||(isDouble(s2))) { double d1=isDouble(s1)?s_double(s1):Long.valueOf(s_long(s1)).doubleValue(); double d2=isDouble(s2)?s_double(s2):Long.valueOf(s_long(s2)).doubleValue(); if(d1==d2) return 0; if(d1>d2) return 1; return -1; } long l1=s_long(s1); long l2=s_long(s2); if(l1==l2) return 0; if(l1>l2) return 1; return -1; } public boolean next() throws java.sql.SQLException { while (true) { if (!iter.hasNext()) return false; if ((conditionIndex<0)&&(conditionValue!=null)) { String key=(String)iter.next(); String subKey=key; int x=subKey.indexOf("\n"); if(x>0)subKey=subKey.substring(0,x); int nc=(lt||gt)?numCompare(subKey,conditionValue):0; int sc=(lt||gt)?subKey.compareTo(conditionValue):0; if(((eq)&&(subKey.equals(conditionValue))) ||((eq)&&(key.startsWith(conditionValue+"\n"))) ||((lt)&&(nc<0)) ||((gt)&&(nc>0)) ||((lt)&&(sc<0)) ||((gt)&&(sc>0))) { currentRow++; return relation.getRecord(nullIndicators,values,(Backend.RecordInfo)relation.index.get(key)); } continue; } if (!relation.getRecord(nullIndicators,values,(Backend.RecordInfo)iter.next())) return false; if (conditionIndex>=0) { if (nullIndicators[conditionIndex]) continue; String subKey=values[conditionIndex]; int nc=(lt||gt)?numCompare(subKey,conditionValue):0; int sc=(lt||gt)?subKey.compareTo(conditionValue):0; if(!(((eq)&&(subKey.equals(conditionValue))) ||((lt)&&(nc<0)) ||((gt)&&(nc>0)) ||((lt)&&(sc<0)) ||((gt)&&(sc>0)))) continue; } currentRow++; return true; } } public void close() throws java.sql.SQLException { } public boolean wasNull() throws java.sql.SQLException { return nullFlag; } public String getString(int columnIndex) throws java.sql.SQLException { if ((columnIndex<0)||(columnIndex>=nullIndicators.length)||(nullIndicators[columnIndex])) { nullFlag=true; return null; } nullFlag=false; return values[columnIndex]; } public java.sql.Array getArray(int columnIndex) throws java.sql.SQLException { //String s=getString(columnIndex); if (nullFlag) return null; throw new java.sql.SQLException(); } public java.sql.Blob getBlob(int columnIndex) throws java.sql.SQLException { //String s=getString(columnIndex); if (nullFlag) return null; throw new java.sql.SQLException(); } public java.sql.Clob getClob(int columnIndex) throws java.sql.SQLException { //String s=getString(columnIndex); if (nullFlag) return null; throw new java.sql.SQLException(); } public java.sql.Ref getRef(int columnIndex) throws java.sql.SQLException { //String s=getString(columnIndex); if (nullFlag) return null; throw new java.sql.SQLException(); } public boolean getBoolean(int columnIndex) throws java.sql.SQLException { String s=getString(columnIndex); if ((s!=null)&&(s.length()>0)) switch (Character.toUpperCase(s.charAt(0))) { case 'T': case 'Y': case '1': return true; } return false; } public byte getByte(int columnIndex) throws java.sql.SQLException { String s=getString(columnIndex); if (nullFlag) return 0; try { return Byte.parseByte(s); } catch (NumberFormatException e) { throw new java.sql.SQLException(e.getMessage()); } } public short getShort(int columnIndex) throws java.sql.SQLException { String s=getString(columnIndex); if (nullFlag) return 0; try { return Short.parseShort(s); } catch (NumberFormatException e) { throw new java.sql.SQLException(e.getMessage()); } } public int getInt(int columnIndex) throws java.sql.SQLException { String s=getString(columnIndex); if (nullFlag) return 0; try { return Integer.parseInt(s); } catch (NumberFormatException e) { throw new java.sql.SQLException(e.getMessage()); } } public long getLong(int columnIndex) throws java.sql.SQLException { String s=getString(columnIndex); if (nullFlag) return 0; try { return Long.parseLong(s); } catch (NumberFormatException e) { throw new java.sql.SQLException(e.getMessage()); } } public float getFloat(int columnIndex) throws java.sql.SQLException { String s=getString(columnIndex); if (nullFlag) return 0; try { return Float.parseFloat(s); } catch (NumberFormatException e) { throw new java.sql.SQLException(e.getMessage()); } } public double getDouble(int columnIndex) throws java.sql.SQLException { String s=getString(columnIndex); if (nullFlag) return 0; try { return Double.parseDouble(s); } catch (NumberFormatException e) { throw new java.sql.SQLException(e.getMessage()); } } public java.math.BigDecimal getBigDecimal(int columnIndex) throws java.sql.SQLException { String s=getString(columnIndex); if (nullFlag) return new java.math.BigDecimal(0); try { return new java.math.BigDecimal(s); } catch (NumberFormatException e) { throw new java.sql.SQLException(e.getMessage()); } } /** * @deprecated */ public java.math.BigDecimal getBigDecimal(int columnIndex, int scale) throws java.sql.SQLException { String s=getString(columnIndex); if (nullFlag) { java.math.BigDecimal v=new java.math.BigDecimal(0); v.setScale(scale); return v; } try { java.math.BigDecimal v=new java.math.BigDecimal(s); v.setScale(scale); return v; } catch (NumberFormatException e) { throw new java.sql.SQLException(e.getMessage()); } } public byte[] getBytes(int columnIndex) throws java.sql.SQLException { String s=getString(columnIndex); if (nullFlag) return null; try { return s.getBytes(); } catch (NumberFormatException e) { throw new java.sql.SQLException(e.getMessage()); } } public java.sql.Date getDate(int columnIndex) throws java.sql.SQLException { String s=getString(columnIndex); if (nullFlag) return null; try { return java.sql.Date.valueOf(s); } catch (NumberFormatException e) { throw new java.sql.SQLException(e.getMessage()); } } public java.sql.Time getTime(int columnIndex) throws java.sql.SQLException { String s=getString(columnIndex); if (nullFlag) return null; try { return java.sql.Time.valueOf(s); } catch (NumberFormatException e) { throw new java.sql.SQLException(e.getMessage()); } } public java.sql.Timestamp getTimestamp(int columnIndex) throws java.sql.SQLException { String s=getString(columnIndex); if (nullFlag) return null; try { return java.sql.Timestamp.valueOf(s); } catch (NumberFormatException e) { throw new java.sql.SQLException(e.getMessage()); } } public java.io.InputStream getAsciiStream(int columnIndex) throws java.sql.SQLException { return getBinaryStream(columnIndex); } /** * @deprecated */ public java.io.InputStream getUnicodeStream(int columnIndex) throws java.sql.SQLException { return getBinaryStream(columnIndex); } public java.io.InputStream getBinaryStream(int columnIndex) throws java.sql.SQLException { byte b[] = getBytes(columnIndex); if (nullFlag) return null; return new java.io.ByteArrayInputStream(b); } public java.io.Reader getCharacterStream(int columnIndex) throws java.sql.SQLException { String s=getString(columnIndex); if (nullFlag) return null; return new java.io.CharArrayReader(s.toCharArray()); } public Object getObject(int columnIndex) throws java.sql.SQLException { return getString(columnIndex); } public java.net.URL getURL(int columnIndex) throws java.sql.SQLException { String s=getString(columnIndex); if (nullFlag) return null; try { return new java.net.URL(s); } catch (java.net.MalformedURLException e) { throw new java.sql.SQLException(e.getMessage()); } } public int findColumn(String columnName) throws java.sql.SQLException { return relation.findAttribute(columnName); } public String getString(String columnName) throws java.sql.SQLException { return getString(findColumn(columnName)); } public java.sql.Array getArray(String columnName) throws java.sql.SQLException { return getArray(findColumn(columnName)); } public java.sql.Blob getBlob(String columnName) throws java.sql.SQLException { return getBlob(findColumn(columnName)); } public java.sql.Clob getClob(String columnName) throws java.sql.SQLException { return getClob(findColumn(columnName)); } public java.sql.Ref getRef(String columnName) throws java.sql.SQLException { return getRef(findColumn(columnName)); } public boolean getBoolean(String columnName) throws java.sql.SQLException { return getBoolean(findColumn(columnName)); } public byte getByte(String columnName) throws java.sql.SQLException { return getByte(findColumn(columnName)); } public short getShort(String columnName) throws java.sql.SQLException { return getShort(findColumn(columnName)); } public int getInt(String columnName) throws java.sql.SQLException { return getInt(findColumn(columnName)); } public long getLong(String columnName) throws java.sql.SQLException { return getLong(findColumn(columnName)); } public float getFloat(String columnName) throws java.sql.SQLException { return getFloat(findColumn(columnName)); } public double getDouble(String columnName) throws java.sql.SQLException { return getDouble(findColumn(columnName)); } public java.math.BigDecimal getBigDecimal(String columnName) throws java.sql.SQLException { return getBigDecimal(findColumn(columnName)); } /** * @deprecated */ public java.math.BigDecimal getBigDecimal(String columnName, int scale) throws java.sql.SQLException { return getBigDecimal(findColumn(columnName), scale); } public byte[] getBytes(String columnName) throws java.sql.SQLException { return getBytes(findColumn(columnName)); } public java.sql.Date getDate(String columnName) throws java.sql.SQLException { return getDate(findColumn(columnName)); } public java.sql.Date getDate(int columnName,java.util.Calendar c) throws java.sql.SQLException { return getDate(columnName); } public java.sql.Date getDate(String columnName,java.util.Calendar c) throws java.sql.SQLException { return getDate(findColumn(columnName)); } public java.sql.Time getTime(String columnName) throws java.sql.SQLException { return getTime(findColumn(columnName)); } public java.sql.Time getTime(int columnName,java.util.Calendar c) throws java.sql.SQLException { return getTime(columnName); } public java.sql.Time getTime(String columnName,java.util.Calendar c) throws java.sql.SQLException { return getTime(findColumn(columnName)); } public java.sql.Timestamp getTimestamp(String columnName) throws java.sql.SQLException { return getTimestamp(findColumn(columnName)); } public java.sql.Timestamp getTimestamp(int columnName,java.util.Calendar c) throws java.sql.SQLException { return getTimestamp(columnName); } public java.sql.Timestamp getTimestamp(String columnName,java.util.Calendar c) throws java.sql.SQLException { return getTimestamp(findColumn(columnName)); } public java.io.Reader getCharacterStream(String columnName) throws java.sql.SQLException { return getCharacterStream(findColumn(columnName)); } public java.io.InputStream getAsciiStream(String columnName) throws java.sql.SQLException { return getAsciiStream(findColumn(columnName)); } /** * @deprecated */ public java.io.InputStream getUnicodeStream(String columnName) throws java.sql.SQLException { return getUnicodeStream(findColumn(columnName)); } public java.io.InputStream getBinaryStream(String columnName) throws java.sql.SQLException { return getBinaryStream(findColumn(columnName)); } public java.net.URL getURL(String columnName) throws java.sql.SQLException { return getURL(findColumn(columnName)); } public Object getObject(String columnName) throws java.sql.SQLException { return getObject(findColumn(columnName)); } public java.sql.SQLWarning getWarnings() throws java.sql.SQLException { return null; } public void clearWarnings() throws java.sql.SQLException { } public String getCursorName() throws java.sql.SQLException { throw new java.sql.SQLException("Positioned Update not supported.", "S1C00"); } public java.sql.ResultSetMetaData getMetaData() throws java.sql.SQLException { return null; } public void updateArray(int columnIndex,java.sql.Array x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateArray(String columnName,java.sql.Array x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateAsciiStream(int columnIndex,java.io.InputStream x,int length) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateAsciiStream(String columnName,java.io.InputStream x, int length) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateBigDecimal(int columnIndex,java.math.BigDecimal x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateBigDecimal(String columnName,java.math.BigDecimal x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateBinaryStream(int columnIndex,java.io.InputStream x,int length) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateBinaryStream(String columnName,java.io.InputStream x, int length) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateBlob(int columnIndex,java.sql.Blob x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateBlob(String columnName,java.sql.Blob x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateBoolean(int columnIndex,boolean x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateBoolean(String columnName,boolean x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateByte(int columnIndex,byte x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateByte(String columnName,byte x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateBytes(int columnIndex,byte[] x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateBytes(String columnName,byte[] x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateCharacterStream(int columnIndex,java.io.Reader x,int length) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateCharacterStream(String columnName,java.io.Reader reader, int length) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateClob(int columnIndex,java.sql.Clob x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateClob(String columnName,java.sql.Clob x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateDate(int columnIndex,java.sql.Date x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateDate(String columnName,java.sql.Date x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateDouble(int columnIndex,double x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateDouble(String columnName,double x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateFloat(int columnIndex,float x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateFloat(String columnName,float x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateInt(int columnIndex,int x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateInt(String columnName,int x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateLong(int columnIndex,long x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateLong(String columnName,long x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateNull(int columnIndex) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateNull(String columnName) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateObject(int columnIndex,Object x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateObject(int columnIndex,Object x,int scale) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateObject(String columnName,Object x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateObject(String columnName,Object x,int scale) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateRef(int columnIndex,java.sql.Ref x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateRef(String columnName,java.sql.Ref x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateRow() throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateShort(int columnIndex,short x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateShort(String columnName,short x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateString(int columnIndex,String x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateString(String columnName,String x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateTime(int columnIndex,java.sql.Time x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateTime(String columnName,java.sql.Time x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateTimestamp(int columnIndex,java.sql.Timestamp x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void updateTimestamp(String columnName,java.sql.Timestamp x) throws java.sql.SQLException { throw new java.sql.SQLException(); } public void deleteRow() throws java.sql.SQLException { throw new java.sql.SQLException(); } public void moveToInsertRow() throws java.sql.SQLException { throw new java.sql.SQLException(); } public void moveToCurrentRow() throws java.sql.SQLException { throw new java.sql.SQLException(); } public void cancelRowUpdates() throws java.sql.SQLException { throw new java.sql.SQLException(); } public void insertRow() throws java.sql.SQLException { throw new java.sql.SQLException(); } public void refreshRow() throws java.sql.SQLException { throw new java.sql.SQLException(); } public int getRow() { return currentRow; } public boolean first() { return false; } public boolean previous() { return false; } public boolean isFirst() { return false; } private boolean afterLast=false; public boolean last() { try{ while(next()); } catch(java.sql.SQLException sqle){} afterLast=true; return true; } public boolean isLast() { return false; } public void beforeFirst() throws java.sql.SQLException { if(relation==null) throw new java.sql.SQLException(); if ((conditionIndex<0)&&(conditionValue!=null)) { iter=relation.index.keySet().iterator(); } else { iter=relation.index.values().iterator(); } currentRow=0; } public boolean isBeforeFirst() { return (currentRow==0); } public void afterLast(){ last(); } public boolean isAfterLast(){return afterLast;} public boolean absolute(int i) { return true; } public boolean relative(int i) { return false; } public boolean rowDeleted() { return false; } public boolean rowInserted() { return false; } public boolean rowUpdated() { return false; } public int getConcurrency() { return 0; } public int getType() { return 0; } public void setFetchSize(int i) throws java.sql.SQLException { statement.setFetchSize(i); } public int getFetchSize() throws java.sql.SQLException { return statement.getFetchSize(); } public void setFetchDirection(int i) throws java.sql.SQLException { statement.setFetchDirection(i); } public int getFetchDirection() throws java.sql.SQLException { return statement.getFetchDirection(); } public int getResultSetConcurrency() throws java.sql.SQLException { return statement.getResultSetConcurrency(); } public int getResultSetType() throws java.sql.SQLException { return statement.getResultSetType(); } public int getHoldability() throws SQLException { return 0; } public Reader getNCharacterStream(int arg0) throws SQLException { return null; } public Reader getNCharacterStream(String arg0) throws SQLException { return null; } public NClob getNClob(int arg0) throws SQLException { return null; } public NClob getNClob(String arg0) throws SQLException { return null; } public String getNString(int arg0) throws SQLException { return null; } public String getNString(String arg0) throws SQLException { return null; } //public Object getObject(int arg0, Map arg1) throws SQLException { return getString(arg0); } public Object getObject(int arg0, Map<String, Class<?>> arg1) throws SQLException { return getString(arg0); } public Object getObject(String arg0, Map<String, Class<?>> arg1) throws SQLException { return getObject(findColumn(arg0),arg1); } //public Object getObject(String arg0, Map arg1) throws SQLException { return getObject(findColumn(arg0),arg1); } public RowId getRowId(int arg0) throws SQLException { return null; } public RowId getRowId(String arg0) throws SQLException { return null; } public SQLXML getSQLXML(int arg0) throws SQLException { return null; } public SQLXML getSQLXML(String arg0) throws SQLException { return null;} public boolean isClosed() throws SQLException { return false; } public void updateAsciiStream(int arg0, InputStream arg1) throws SQLException {} public void updateAsciiStream(String arg0, InputStream arg1) throws SQLException {} public void updateAsciiStream(int arg0, InputStream arg1, long arg2) throws SQLException {} public void updateAsciiStream(String arg0, InputStream arg1, long arg2) throws SQLException {} public void updateBinaryStream(int arg0, InputStream arg1) throws SQLException {} public void updateBinaryStream(String arg0, InputStream arg1) throws SQLException {} public void updateBinaryStream(int arg0, InputStream arg1, long arg2) throws SQLException {} public void updateBinaryStream(String arg0, InputStream arg1, long arg2) throws SQLException {} public void updateBlob(int arg0, InputStream arg1) throws SQLException {} public void updateBlob(String arg0, InputStream arg1) throws SQLException {} public void updateBlob(int arg0, InputStream arg1, long arg2) throws SQLException {} public void updateBlob(String arg0, InputStream arg1, long arg2) throws SQLException {} public void updateCharacterStream(int arg0, Reader arg1) throws SQLException {} public void updateCharacterStream(String arg0, Reader arg1) throws SQLException {} public void updateCharacterStream(int arg0, Reader arg1, long arg2) throws SQLException {} public void updateCharacterStream(String arg0, Reader arg1, long arg2) throws SQLException {} public void updateClob(int arg0, Reader arg1) throws SQLException {} public void updateClob(String arg0, Reader arg1) throws SQLException {} public void updateClob(int arg0, Reader arg1, long arg2) throws SQLException {} public void updateClob(String arg0, Reader arg1, long arg2) throws SQLException {} public void updateNCharacterStream(int arg0, Reader arg1) throws SQLException {} public void updateNCharacterStream(String arg0, Reader arg1) throws SQLException {} public void updateNCharacterStream(int arg0, Reader arg1, long arg2) throws SQLException {} public void updateNCharacterStream(String arg0, Reader arg1, long arg2) throws SQLException {} public void updateNClob(int arg0, NClob arg1) throws SQLException {} public void updateNClob(String arg0, NClob arg1) throws SQLException {} public void updateNClob(int arg0, Reader arg1) throws SQLException {} public void updateNClob(String arg0, Reader arg1) throws SQLException {} public void updateNClob(int arg0, Reader arg1, long arg2) throws SQLException {} public void updateNClob(String arg0, Reader arg1, long arg2)throws SQLException {} public void updateNString(int arg0, String arg1) throws SQLException {} public void updateNString(String arg0, String arg1) throws SQLException {} public void updateRowId(int arg0, RowId arg1) throws SQLException {} public void updateRowId(String arg0, RowId arg1) throws SQLException {} public void updateSQLXML(int arg0, SQLXML arg1) throws SQLException {} public void updateSQLXML(String arg0, SQLXML arg1) throws SQLException {} public boolean isWrapperFor(Class<?> iface) throws SQLException {return false;} public <T> T unwrap(Class<T> iface) throws SQLException {return null;} }
apache-2.0
SmartDeveloperHub/sdh-scm-harvester
frontend/src/test/java/org/smartdeveloperhub/harvesters/scm/frontend/core/util/AbstractCappedContainerHandlerTestHelper.java
4942
/** * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * This file is part of the Smart Developer Hub Project: * http://www.smartdeveloperhub.org/ * * Center for Open Middleware * http://www.centeropenmiddleware.com/ * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Copyright (C) 2015-2016 Center for Open Middleware. * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Artifact : org.smartdeveloperhub.harvesters.scm:scm-harvester-frontend:0.3.0 * Bundle : scm-harvester.war * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# */ package org.smartdeveloperhub.harvesters.scm.frontend.core.util; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.sameInstance; import static org.junit.Assert.fail; import java.io.Serializable; import java.util.Collection; import java.util.Iterator; import java.util.Set; import mockit.Expectations; import mockit.Mocked; import org.ldp4j.application.data.DataSet; import org.ldp4j.application.data.Individual; import org.ldp4j.application.data.Name; import org.ldp4j.application.data.NamingScheme; import org.ldp4j.application.ext.ApplicationRuntimeException; import org.ldp4j.application.session.ContainerSnapshot; import org.ldp4j.application.session.ResourceSnapshot; import org.ldp4j.application.session.WriteSession; public abstract class AbstractCappedContainerHandlerTestHelper { private final class CustomDataSet implements DataSet { @Override public Iterator<Individual<?, ?>> iterator() { throw new UnsupportedOperationException("Method should not be invoked"); } @Override public Name<?> name() { throw new UnsupportedOperationException("Method should not be invoked"); } @Override public int numberOfIndividuals() { throw new UnsupportedOperationException("Method should not be invoked"); } @Override public boolean hasIndividuals() { throw new UnsupportedOperationException("Method should not be invoked"); } @Override public Collection<? extends Individual<?, ?>> individuals() { throw new UnsupportedOperationException("Method should not be invoked"); } @Override public Set<Serializable> individualIds() { throw new UnsupportedOperationException("Method should not be invoked"); } @Override public boolean hasIndividual(final Object id) { throw new UnsupportedOperationException("Method should not be invoked"); } @Override public <T extends Serializable, S extends Individual<T, S>> S individualOfId(final T id) { throw new UnsupportedOperationException("Method should not be invoked"); } @Override public <T extends Serializable, S extends Individual<T, S>> S individual(final T id, final Class<? extends S> clazz) { throw new UnsupportedOperationException("Method should not be invoked"); } @Override public boolean isEmpty() { throw new UnsupportedOperationException("Method should not be invoked"); } @Override public void remove(final Individual<?, ?> src) { throw new UnsupportedOperationException("Method should not be invoked"); } @Override public String toString() { return "DATASET"; } } @Mocked private ContainerSnapshot container; @Mocked private WriteSession session; @Mocked private ResourceSnapshot snapshot; protected final void verifyGetReturnsEmptyDataset(final AbstractCappedContainerHandler sut) throws Exception { final Name<String> name=NamingScheme.getDefault().name("id"); new Expectations() {{ AbstractCappedContainerHandlerTestHelper.this.snapshot.name();this.result=name; }}; final DataSet result = sut.get(this.snapshot); assertThat((Object)result.name(),sameInstance((Object)name)); assertThat(result.hasIndividuals(),equalTo(false)); } protected final void verifyFactoryMethodIsDisabled(final String name, final AbstractCappedContainerHandler sut) { try { sut.create(this.container, new CustomDataSet(), this.session); fail("Factory method should be disabled"); } catch (final ApplicationRuntimeException e) { assertThat(e.getMessage().toLowerCase(),equalTo(name+" creation is not supported")); } } }
apache-2.0
cbeams-archive/spring-framework-2.5.x
src/org/springframework/beans/TypeMismatchException.java
3697
/* * Copyright 2002-2008 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.beans; import java.beans.PropertyChangeEvent; import org.springframework.util.ClassUtils; /** * Exception thrown on a type mismatch when trying to set a bean property. * * @author Rod Johnson * @author Juergen Hoeller */ public class TypeMismatchException extends PropertyAccessException { /** * Error code that a type mismatch error will be registered with. */ public static final String ERROR_CODE = "typeMismatch"; private transient Object value; private Class requiredType; /** * Create a new TypeMismatchException. * @param propertyChangeEvent the PropertyChangeEvent that resulted in the problem * @param requiredType the required target type */ public TypeMismatchException(PropertyChangeEvent propertyChangeEvent, Class requiredType) { this(propertyChangeEvent, requiredType, null); } /** * Create a new TypeMismatchException. * @param propertyChangeEvent the PropertyChangeEvent that resulted in the problem * @param requiredType the required target type (or <code>null</code> if not known) * @param cause the root cause (may be <code>null</code>) */ public TypeMismatchException(PropertyChangeEvent propertyChangeEvent, Class requiredType, Throwable cause) { super(propertyChangeEvent, "Failed to convert property value of type [" + ClassUtils.getDescriptiveType(propertyChangeEvent.getNewValue()) + "]" + (requiredType != null ? " to required type [" + ClassUtils.getQualifiedName(requiredType) + "]" : "") + (propertyChangeEvent.getPropertyName() != null ? " for property '" + propertyChangeEvent.getPropertyName() + "'" : ""), cause); this.value = propertyChangeEvent.getNewValue(); this.requiredType = requiredType; } /** * Create a new TypeMismatchException without PropertyChangeEvent. * @param value the offending value that couldn't be converted (may be <code>null</code>) * @param requiredType the required target type (or <code>null</code> if not known) */ public TypeMismatchException(Object value, Class requiredType) { this(value, requiredType, null); } /** * Create a new TypeMismatchException without PropertyChangeEvent. * @param value the offending value that couldn't be converted (may be <code>null</code>) * @param requiredType the required target type (or <code>null</code> if not known) * @param cause the root cause (may be <code>null</code>) */ public TypeMismatchException(Object value, Class requiredType, Throwable cause) { super("Failed to convert value of type [" + ClassUtils.getDescriptiveType(value) + "]" + (requiredType != null ? " to required type [" + ClassUtils.getQualifiedName(requiredType) + "]" : ""), cause); this.value = value; this.requiredType = requiredType; } /** * Return the offending value (may be <code>null</code>) */ public Object getValue() { return this.value; } /** * Return the required target type, if any. */ public Class getRequiredType() { return this.requiredType; } public String getErrorCode() { return ERROR_CODE; } }
apache-2.0
SciGaP/SEAGrid-Desktop-GUI
src/main/java/org/gridchem/client/gui/filebrowser/FileBrowser.java
2787
/*Copyright (c) 2004,University of Illinois at Urbana-Champaign. All rights reserved. * * Created on Jun 14, 2006 * * Developed by: CCT, Center for Computation and Technology, * NCSA, University of Illinois at Urbana-Champaign * OSC, Ohio Supercomputing Center * TACC, Texas Advanced Computing Center * UKy, University of Kentucky * * https://www.gridchem.org/ * * Permission is hereby granted, free of charge, to any person * obtaining a copy of this software and associated documentation * files (the "Software"), to deal with the Software without * restriction, including without limitation the rights to use, * copy, modify, merge, publish, distribute, sublicense, and/or * sell copies of the Software, and to permit persons to whom * the Software is furnished to do so, subject to the following conditions: * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimers. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimers in the documentation * and/or other materials provided with the distribution. * 3. Neither the names of Chemistry and Computational Biology Group , NCSA, * University of Illinois at Urbana-Champaign, nor the names of its contributors * may be used to endorse or promote products derived from this Software without * specific prior written permission. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS WITH THE SOFTWARE. */ package org.gridchem.client.gui.filebrowser; import java.net.URI; /** * Interface for the <code>FileBrowser</code> class to provide some common * methods. This is probably unnecessary. * * @author Rion Dooley < dooley [at] tacc [dot] utexas [dot] edu > * */ public interface FileBrowser { /** * Set the path of the file browser * @param uri */ public void setPath(String path); /** * Get the currently selected files URI */ public String getPath(); /** * Select the file corresponding to the file name * * @param filename */ public void setSelected(String filename); /** * Get the name of the currently selected file * */ public String getSelected(); }
apache-2.0
nevenr/vertx-auth
vertx-auth-jwt/src/test/java/io/vertx/ext/auth/test/jwt/DummyVerticle.java
544
package io.vertx.ext.auth.test.jwt; import io.vertx.core.AbstractVerticle; import io.vertx.ext.auth.KeyStoreOptions; import io.vertx.ext.auth.jwt.JWTAuth; import io.vertx.ext.auth.jwt.JWTAuthOptions; public class DummyVerticle extends AbstractVerticle { private static final JWTAuthOptions config = new JWTAuthOptions() .setKeyStore(new KeyStoreOptions() .setPath("keystore.jceks") .setPassword("secret")); public void start() { System.out.println(this); JWTAuth.create(vertx, config); } }
apache-2.0
Ariah-Group/Finance
af_webapp/src/main/java/org/kuali/kfs/sys/businessobject/format/ExplicitKualiDecimalFormatter.java
1889
/* * Copyright 2012 The Kuali Foundation. * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kfs.sys.businessobject.format; import java.math.BigDecimal; import org.apache.log4j.Logger; import org.kuali.rice.core.api.util.type.KualiDecimal; import org.kuali.rice.core.web.format.BigDecimalFormatter; /** * This class is used to format explicit decimal value to BigDecimal objects. */ public class ExplicitKualiDecimalFormatter extends BigDecimalFormatter { private static Logger LOG = Logger.getLogger(ExplicitKualiDecimalFormatter.class); /** * Converts the given String into a KualiDecimal with the final two characters being behind the decimal place */ @Override protected Object convertToObject(String target) { BigDecimal value = (BigDecimal)super.convertToObject(addDecimalPoint(target)); return new KualiDecimal(value); } /** * Adds the decimal point to the String * @param amount the String representing the amount * @return a new String, with a decimal inserted in the third to last place */ private String addDecimalPoint (String amount) { if (!amount.contains(".")) { //have to add decimal point if it's missing int length = amount.length(); amount = amount.substring(0, length - 2) + "." + amount.substring(length - 2, length); } return amount; } }
apache-2.0
lagom/lagom
jackson/src/test/java/com/lightbend/lagom/serialization/AbstractEvent2.java
559
/* * Copyright (C) Lightbend Inc. <https://www.lightbend.com> */ package com.lightbend.lagom.serialization; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.lightbend.lagom.javadsl.immutable.ImmutableStyle; import org.immutables.value.Value; import org.immutables.value.Value.Parameter; @Value.Immutable @ImmutableStyle @JsonDeserialize(as = Event2.class) public interface AbstractEvent2 extends Jsonable { @Parameter String getField1V2(); // renamed from field1 @Parameter int getField2(); // new mandatory field }
apache-2.0
d4rken/myolib
myolib/src/main/java/eu/darken/myolib/processor/classifier/WarmUpResultClassifierEvent.java
1487
package eu.darken.myolib.processor.classifier; import eu.darken.myolib.processor.BaseDataPacket; import eu.darken.myolib.tools.ByteHelper; public class WarmUpResultClassifierEvent extends ClassifierEvent { /** * Possible warm-up results for Myo. */ public enum WarmUpResult { UNKNOWN((byte) 0x00), SUCCESS((byte) 0x01), FAILED_TIMEOUT((byte) 0x02); private final byte mValue; WarmUpResult(byte value) { mValue = value; } public byte getValue() { return mValue; } } private WarmUpResult mWarmUpResult; public WarmUpResultClassifierEvent(BaseDataPacket packet) { super(packet, Type.WARM_UP_RESULT); ByteHelper byteHelper = new ByteHelper(packet.getData()); int typeValue = byteHelper.getUInt8(); if (getType().getValue() != typeValue) throw new RuntimeException("Incompatible BaseDataPacket:" + typeValue); int warmUpResultValue = byteHelper.getUInt8(); for (WarmUpResultClassifierEvent.WarmUpResult warmUpResult : WarmUpResultClassifierEvent.WarmUpResult.values()) { if (warmUpResult.getValue() == warmUpResultValue) { mWarmUpResult = warmUpResult; break; } } } public WarmUpResult getWarmUpResult() { return mWarmUpResult; } public void setWarmUpResult(WarmUpResult warmUpResult) { mWarmUpResult = warmUpResult; } }
apache-2.0
wangdan/DownloadManager
downloader/src/main/java/org/aisen/download/DownloadController.java
2163
package org.aisen.download; import android.os.Handler; import android.os.Looper; import android.os.Message; import org.aisen.download.core.DownloadInfo; import org.aisen.download.utils.Constants; import org.aisen.download.utils.DLogger; import java.util.Vector; /** * 用于刷新UI * * Created by wangdan on 16/6/14. */ public final class DownloadController { static final String TAG = Constants.TAG + "_DownloadController"; // 所有注册的Proxy private final Vector<IDownloadSubject> mDownloadProxy = new Vector<>(); private final Handler mHandler = new Handler(Looper.getMainLooper()) { @Override public void handleMessage(Message msg) { super.handleMessage(msg); if (msg.what == 0) { DownloadMsg downloadMsg = (DownloadMsg) msg.getData().getSerializable("msg"); publishDownload(downloadMsg); } } }; DownloadController() { } public synchronized void register(IDownloadSubject callback) { if (callback == null) { return; } if (!mDownloadProxy.contains(callback)) { mDownloadProxy.add(callback); DLogger.v(TAG, "register proxy[%s]", callback.toString()); } } public synchronized void unregister(IDownloadSubject callback) { if (callback == null) { return; } boolean removed = mDownloadProxy.remove(callback); if (removed) { DLogger.v(TAG, "unregister proxy[%s]", callback.toString()); } } void publishDownload(DownloadMsg downloadMsg) { if (Looper.myLooper() == Looper.getMainLooper()) { for (IDownloadSubject proxy : mDownloadProxy) { proxy.publish(downloadMsg); } } else { Message message = mHandler.obtainMessage(); message.what = 0; message.getData().putSerializable("msg", downloadMsg); message.sendToTarget(); } } void publishDownload(DownloadInfo downloadInfo) { publishDownload(new DownloadMsg(downloadInfo)); } }
apache-2.0
t0nyren/spatedb
src/main/java/com/ricemap/spateDB/core/RTree.java
57963
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the * NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. */ package com.ricemap.spateDB.core; import java.io.ByteArrayOutputStream; import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutput; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Array; import java.lang.reflect.Field; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.Map; import java.util.Queue; import java.util.Stack; import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.util.IndexedSortable; import org.apache.hadoop.util.IndexedSorter; import org.apache.hadoop.util.LineReader; import org.apache.hadoop.util.PriorityQueue; import org.apache.hadoop.util.QuickSort; import com.ricemap.spateDB.io.MemoryInputStream; import com.ricemap.spateDB.io.Text2; import com.ricemap.spateDB.shape.Point3d; import com.ricemap.spateDB.shape.Prism; import com.ricemap.spateDB.shape.Shape; /** * An RTree loaded in bulk and never changed after that. It cannot by * dynamically manipulated by either insertion or deletion. It only works with * 2-dimensional objects (keys). * * @author tonyren, eldawy * */ public class RTree<T extends Shape> implements Writable, Iterable<T> { public static enum FIELD_TYPE{NULL, Integer, Long, Double}; /** Logger */ private static final Log LOG = LogFactory.getLog(RTree.class); /** Size of tree header on disk. Height + Degree + Number of records + isColumnar*/ public static final int TreeHeaderSize = 4 + 4 + 4 + 4; /** Size of a node. Offset of first child + dimensions (x, y, width, height) */ public static final int NodeSize = 4 + 8 * 6; /** t, x ,y */ public static final int IndexUnitSize = 8 * 3; /** An instance of T that can be used to deserialize objects from disk */ T stockObject; public boolean columnar; /** Height of the tree (number of levels) */ private int height; /** Degree of internal nodes in the tree */ private int degree; /** Total number of nodes in the tree */ private int nodeCount; /** Number of leaf nodes */ private int leafNodeCount; /** Number of non-leaf nodes */ private int nonLeafNodeCount; /** Number of elements in the tree */ private int elementCount; /** An input stream that is used to read node structure (i.e., nodes) */ private FSDataInputStream structure; /** Input stream to tree data */ private FSDataInputStream data; /** The start offset of the tree in the data stream */ private long treeStartOffset; /** * Total tree size (header + structure + data) used to read the data in the * last leaf node correctly */ private int treeSize; public RTree() { } /** * Builds the RTree given a serialized list of elements. It uses the given * stockObject to deserialize these elements and build the tree. Also writes * the created tree to the disk directly. * * @param elements * - serialization of elements to be written * @param offset * - index of the first element to use in the elements array * @param len * - number of bytes to user from the elements array * @param bytesAvailable * - size available (in bytes) to store the tree structures * @param dataOut * - an output to use for writing the tree to * @param fast_sort * - setting this to <code>true</code> allows the method to run * faster by materializing the offset of each element in the list * which speeds up the comparison. However, this requires an * additional 16 bytes per element. So, for each 1M elements, the * method will require an additional 16 M bytes (approximately). */ public void bulkLoadWrite(final byte[] element_bytes, final int offset, final int len, final int degree, DataOutput dataOut, final boolean fast_sort, final boolean columnarStorage) { try { columnar = columnarStorage; //TODO: the order of fields should be stable under Oracle JVM, but not guaranteed Field[] fields = stockObject.getClass().getDeclaredFields(); // Count number of elements in the given text int i_start = offset; final Text line = new Text(); while (i_start < offset + len) { int i_end = skipToEOL(element_bytes, i_start); // Extract the line without end of line character line.set(element_bytes, i_start, i_end - i_start - 1); stockObject.fromText(line); elementCount++; i_start = i_end; } LOG.info("Bulk loading an RTree with " + elementCount + " elements"); // It turns out the findBestDegree returns the best degree when the // whole // tree is loaded to memory when processed. However, as current // algorithms // process the tree while it's on disk, a higher degree should be // selected // such that a node fits one file block (assumed to be 4K). // final int degree = findBestDegree(bytesAvailable, elementCount); LOG.info("Writing an RTree with degree " + degree); int height = Math.max(1, (int) Math.ceil(Math.log(elementCount) / Math.log(degree))); int leafNodeCount = (int) Math.pow(degree, height - 1); if (elementCount < 2 * leafNodeCount && height > 1) { height--; leafNodeCount = (int) Math.pow(degree, height - 1); } int nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1)); int nonLeafNodeCount = nodeCount - leafNodeCount; // Keep track of the offset of each element in the text final int[] offsets = new int[elementCount]; final int[] ids = new int[elementCount]; final double[] ts = fast_sort ? new double[elementCount] : null; final double[] xs = fast_sort ? new double[elementCount] : null; final double[] ys = fast_sort ? new double[elementCount] : null; //initialize columnar data output ByteArrayOutputStream index_bos = new ByteArrayOutputStream(); DataOutputStream index_dos = new DataOutputStream(index_bos); ByteArrayOutputStream[] bos = new ByteArrayOutputStream[fields.length]; DataOutputStream[] dos = new DataOutputStream[fields.length]; for (int i = 0; i < bos.length; i++){ bos[i] = new ByteArrayOutputStream(); dos[i] = new DataOutputStream(bos[i]); } i_start = offset; line.clear(); for (int i = 0; i < elementCount; i++) { offsets[i] = i_start; ids[i] = i; int i_end = skipToEOL(element_bytes, i_start); if (xs != null) { // Extract the line with end of line character line.set(element_bytes, i_start, i_end - i_start - 1); stockObject.fromText(line); // Sample center of the shape ts[i] = (stockObject.getMBR().t1 + stockObject.getMBR().t2) / 2; xs[i] = (stockObject.getMBR().x1 + stockObject.getMBR().x2) / 2; ys[i] = (stockObject.getMBR().y1 + stockObject.getMBR().y2) / 2; //build columnar storage if (stockObject instanceof Point3d){ index_dos.writeDouble(ts[i]); index_dos.writeDouble(xs[i]); index_dos.writeDouble(ys[i]); } else{ throw new RuntimeException("Indexing non-point shape with RTREE is not supported yet"); } for (int j = 0 ; j < fields.length; j++){ if (fields[j].getType().equals(Integer.TYPE)){ dos[j].writeInt(fields[j].getInt(stockObject)); } else if (fields[j].getType().equals(Double.TYPE)){ dos[j].writeDouble(fields[j].getDouble(stockObject)); } else if (fields[j].getType().equals(Long.TYPE)){ dos[j].writeLong(fields[j].getLong(stockObject)); } else{ continue; //throw new RuntimeException("Field type is not supported yet"); } } } i_start = i_end; } index_dos.close(); for (int i = 0; i < dos.length; i++){ dos[i].close(); } /** A struct to store information about a split */ class SplitStruct extends Prism { /** Start and end index for this split */ int index1, index2; /** Direction of this split */ byte direction; /** Index of first element on disk */ int offsetOfFirstElement; static final byte DIRECTION_T = 0; static final byte DIRECTION_X = 1; static final byte DIRECTION_Y = 2; SplitStruct(int index1, int index2, byte direction) { this.index1 = index1; this.index2 = index2; this.direction = direction; } @Override public void write(DataOutput out) throws IOException { // if (columnarStorage) out.writeInt(index1); else out.writeInt(offsetOfFirstElement); super.write(out); } void partition(Queue<SplitStruct> toBePartitioned) { IndexedSortable sortableT; IndexedSortable sortableX; IndexedSortable sortableY; if (fast_sort) { // Use materialized xs[] and ys[] to do the comparisons sortableT = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap ts double tempt = ts[i]; ts[i] = ts[j]; ts[j] = tempt; // Swap xs double tempx = xs[i]; xs[i] = xs[j]; xs[j] = tempx; // Swap ys double tempY = ys[i]; ys[i] = ys[j]; ys[j] = tempY; // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; tempid = ids[i]; ids[i] = ids[j]; ids[j] = tempid; } @Override public int compare(int i, int j) { if (ts[i] < ts[j]) return -1; if (ts[i] > ts[j]) return 1; return 0; } }; sortableX = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap ts double tempt = ts[i]; ts[i] = ts[j]; ts[j] = tempt; // Swap xs double tempx = xs[i]; xs[i] = xs[j]; xs[j] = tempx; // Swap ys double tempY = ys[i]; ys[i] = ys[j]; ys[j] = tempY; // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; tempid = ids[i]; ids[i] = ids[j]; ids[j] = tempid; } @Override public int compare(int i, int j) { if (ts[i] < ts[j]) return -1; if (xs[i] < xs[j]) return -1; if (xs[i] > xs[j]) return 1; return 0; } }; sortableY = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap ts double tempt = ts[i]; ts[i] = ts[j]; ts[j] = tempt; // Swap xs double tempx = xs[i]; xs[i] = xs[j]; xs[j] = tempx; // Swap ys double tempY = ys[i]; ys[i] = ys[j]; ys[j] = tempY; // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; tempid = ids[i]; ids[i] = ids[j]; ids[j] = tempid; } @Override public int compare(int i, int j) { if (ys[i] < ys[j]) return -1; if (ys[i] > ys[j]) return 1; return 0; } }; } else { // No materialized xs and ys. Always deserialize objects // to compare sortableT = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; tempid = ids[i]; ids[i] = ids[j]; ids[j] = tempid; } @Override public int compare(int i, int j) { // Get end of line int eol = skipToEOL(element_bytes, offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); double ti = (stockObject.getMBR().t1 + stockObject .getMBR().t2) / 2; eol = skipToEOL(element_bytes, offsets[j]); line.set(element_bytes, offsets[j], eol - offsets[j] - 1); stockObject.fromText(line); double tj = (stockObject.getMBR().t1 + stockObject .getMBR().t2) / 2; if (ti < tj) return -1; if (ti > tj) return 1; return 0; } }; sortableX = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; tempid = ids[i]; ids[i] = ids[j]; ids[j] = tempid; } @Override public int compare(int i, int j) { // Get end of line int eol = skipToEOL(element_bytes, offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); double xi = (stockObject.getMBR().x1 + stockObject .getMBR().x2) / 2; eol = skipToEOL(element_bytes, offsets[j]); line.set(element_bytes, offsets[j], eol - offsets[j] - 1); stockObject.fromText(line); double xj = (stockObject.getMBR().x1 + stockObject .getMBR().x2) / 2; if (xi < xj) return -1; if (xi > xj) return 1; return 0; } }; sortableY = new IndexedSortable() { @Override public void swap(int i, int j) { // Swap id int tempid = offsets[i]; offsets[i] = offsets[j]; offsets[j] = tempid; tempid = ids[i]; ids[i] = ids[j]; ids[j] = tempid; } @Override public int compare(int i, int j) { int eol = skipToEOL(element_bytes, offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); double yi = (stockObject.getMBR().y1 + stockObject .getMBR().y2) / 2; eol = skipToEOL(element_bytes, offsets[j]); line.set(element_bytes, offsets[j], eol - offsets[j] - 1); stockObject.fromText(line); double yj = (stockObject.getMBR().y1 + stockObject .getMBR().y2) / 2; if (yi < yj) return -1; if (yi > yj) return 1; return 0; } }; } final IndexedSorter sorter = new QuickSort(); final IndexedSortable[] sortables = new IndexedSortable[3]; sortables[SplitStruct.DIRECTION_T] = sortableT; sortables[SplitStruct.DIRECTION_X] = sortableX; sortables[SplitStruct.DIRECTION_Y] = sortableY; sorter.sort(sortables[direction], index1, index2); // Partition into maxEntries partitions (equally) and // create a SplitStruct for each partition int i1 = index1; for (int iSplit = 0; iSplit < degree; iSplit++) { int i2 = index1 + (index2 - index1) * (iSplit + 1) / degree; SplitStruct newSplit; if (direction == 0){ newSplit = new SplitStruct(i1, i2, (byte) 1); } else if (direction == 1){ newSplit = new SplitStruct(i1, i2, (byte) 2); } else{ newSplit = new SplitStruct(i1, i2, (byte) 0); } toBePartitioned.add(newSplit); i1 = i2; } } } // All nodes stored in level-order traversal Vector<SplitStruct> nodes = new Vector<SplitStruct>(); final Queue<SplitStruct> toBePartitioned = new LinkedList<SplitStruct>(); toBePartitioned.add(new SplitStruct(0, elementCount, SplitStruct.DIRECTION_X)); while (!toBePartitioned.isEmpty()) { SplitStruct split = toBePartitioned.poll(); if (nodes.size() < nonLeafNodeCount) { // This is a non-leaf split.partition(toBePartitioned); } nodes.add(split); } if (nodes.size() != nodeCount) { throw new RuntimeException("Expected node count: " + nodeCount + ". Real node count: " + nodes.size()); } // Now we have our data sorted in the required order. Start building // the tree. // Store the offset of each leaf node in the tree FSDataOutputStream fakeOut = new FSDataOutputStream( new java.io.OutputStream() { // Null output stream @Override public void write(int b) throws IOException { // Do nothing } @Override public void write(byte[] b, int off, int len) throws IOException { // Do nothing } @Override public void write(byte[] b) throws IOException { // Do nothing } }, null, TreeHeaderSize + nodes.size() * NodeSize); for (int i_leaf = nonLeafNodeCount, i = 0; i_leaf < nodes.size(); i_leaf++) { nodes.elementAt(i_leaf).offsetOfFirstElement = (int) fakeOut .getPos(); if (i != nodes.elementAt(i_leaf).index1) throw new RuntimeException(); double t1, x1, y1, t2, x2, y2; // Initialize MBR to first object int eol = skipToEOL(element_bytes, offsets[i]); fakeOut.write(element_bytes, offsets[i], eol - offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); Prism mbr = stockObject.getMBR(); t1 = mbr.t1; x1 = mbr.x1; y1 = mbr.y1; t2 = mbr.t2; x2 = mbr.x2; y2 = mbr.y2; i++; while (i < nodes.elementAt(i_leaf).index2) { eol = skipToEOL(element_bytes, offsets[i]); fakeOut.write(element_bytes, offsets[i], eol - offsets[i]); line.set(element_bytes, offsets[i], eol - offsets[i] - 1); stockObject.fromText(line); mbr = stockObject.getMBR(); if (mbr.t1 < t1) t1 = mbr.t1; if (mbr.x1 < x1) x1 = mbr.x1; if (mbr.y1 < y1) y1 = mbr.y1; if (mbr.t2 > t2) t2 = mbr.t2; if (mbr.x2 > x2) x2 = mbr.x2; if (mbr.y2 > y2) y2 = mbr.y2; i++; } nodes.elementAt(i_leaf).set(t1, x1, y1, t2, x2, y2); } fakeOut.close(); fakeOut = null; // Calculate MBR and offsetOfFirstElement for non-leaves for (int i_node = nonLeafNodeCount - 1; i_node >= 0; i_node--) { int i_first_child = i_node * degree + 1; nodes.elementAt(i_node).offsetOfFirstElement = nodes .elementAt(i_first_child).offsetOfFirstElement; int i_child = 0; Prism mbr; mbr = nodes.elementAt(i_first_child + i_child); double t1 = mbr.t1; double x1 = mbr.x1; double y1 = mbr.y1; double t2 = mbr.t2; double x2 = mbr.x2; double y2 = mbr.y2; i_child++; while (i_child < degree) { mbr = nodes.elementAt(i_first_child + i_child); if (mbr.t1 < t1) t1 = mbr.t1; if (mbr.x1 < x1) x1 = mbr.x1; if (mbr.y1 < y1) y1 = mbr.y1; if (mbr.t2 > t2) t2 = mbr.t2; if (mbr.x2 > x2) x2 = mbr.x2; if (mbr.y2 > y2) y2 = mbr.y2; i_child++; } nodes.elementAt(i_node).set(t1, x1, y1, t2, x2, y2); } // Start writing the tree // write tree header (including size) // Total tree size. (== Total bytes written - 8 bytes for the size // itself) dataOut.writeInt(TreeHeaderSize + NodeSize * nodeCount + len); // Tree height dataOut.writeInt(height); // Degree dataOut.writeInt(degree); dataOut.writeInt(elementCount); //isColumnar dataOut.writeInt(columnarStorage ? 1 : 0); // write nodes for (SplitStruct node : nodes) { node.write(dataOut); } // write elements if (columnarStorage){ byte[] index_bs = index_bos.toByteArray(); byte[][] bss = new byte[bos.length][]; for (int i = 0; i < bss.length; i++){ bss[i] = bos[i].toByteArray(); } for (int element_i = 0; element_i < elementCount; element_i++) { //int eol = skipToEOL(element_bytes, offsets[element_i]); //dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]); dataOut.write(index_bs, ids[element_i]*IndexUnitSize, IndexUnitSize); } for (int i = 0; i < fields.length; i++){ int fieldSize = 0; if (fields[i].getType().equals(Integer.TYPE)){ fieldSize = 4; } else if (fields[i].getType().equals(Long.TYPE)){ fieldSize = 8; } else if (fields[i].getType().equals(Double.TYPE)){ fieldSize = 8; } else{ //throw new RuntimeException("Unsupported field type: " + fields[i].getType().getName()); continue; } for (int element_i = 0; element_i < elementCount; element_i++) { //int eol = skipToEOL(element_bytes, offsets[element_i]); //dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]); dataOut.write(bss[i], ids[element_i]*fieldSize, fieldSize); } } } else{ for (int element_i = 0; element_i < elementCount; element_i++) { int eol = skipToEOL(element_bytes, offsets[element_i]); dataOut.write(element_bytes, offsets[element_i], eol - offsets[element_i]); } } } catch (IOException e) { e.printStackTrace(); } catch (IllegalArgumentException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } } @Override public void write(DataOutput out) throws IOException { throw new RuntimeException("write is no longer supported. " + "Please use bulkLoadWrite to write the RTree."); } @Override public void readFields(DataInput in) throws IOException { // Tree size (Header + structure + data) treeSize = in.readInt(); if (treeSize == 0) { height = elementCount = 0; return; } // Read only the tree structure in memory while actual records remain on // disk and loaded when necessary height = in.readInt(); if (height == 0) return; degree = in.readInt(); elementCount = in.readInt(); columnar = in.readInt()==1; // Keep only tree structure in memory nodeCount = (int) ((powInt(degree, height) - 1) / (degree - 1)); int structureSize = nodeCount * NodeSize; byte[] treeStructure = new byte[structureSize]; in.readFully(treeStructure, 0, structureSize); structure = new FSDataInputStream(new MemoryInputStream(treeStructure)); if (in instanceof FSDataInputStream) { this.treeStartOffset = ((FSDataInputStream) in).getPos() - structureSize - TreeHeaderSize; this.data = (FSDataInputStream) in; } else { // Load all tree data in memory this.treeStartOffset = 0 - structureSize - TreeHeaderSize; int treeDataSize = treeSize - TreeHeaderSize - structureSize; byte[] treeData = new byte[treeDataSize]; in.readFully(treeData, 0, treeDataSize); this.data = new FSDataInputStream(new MemoryInputStream(treeData)); } nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1)); leafNodeCount = (int) Math.pow(degree, height - 1); nonLeafNodeCount = nodeCount - leafNodeCount; } /** * Reads and skips the header of the tree returning the total number of * bytes skipped from the stream. This is used as a preparatory function to * read all elements in the tree without the index part. * * @param in * @return - Total number of bytes read and skipped * @throws IOException */ public static int skipHeader(InputStream in) throws IOException { DataInput dataIn = in instanceof DataInput ? (DataInput) in : new DataInputStream(in); int skippedBytes = 0; /* int treeSize = */dataIn.readInt(); skippedBytes += 4; int height = dataIn.readInt(); skippedBytes += 4; if (height == 0) { // Empty tree. No results return skippedBytes; } int degree = dataIn.readInt(); skippedBytes += 4; int nodeCount = (int) ((powInt(degree, height) - 1) / (degree - 1)); /* int elementCount = */dataIn.readInt(); skippedBytes += 4; // Skip all nodes dataIn.skipBytes(nodeCount * NodeSize); skippedBytes += nodeCount * NodeSize; return skippedBytes; } /** * Returns the total size of the header (including the index) in bytes. * Assume that the input is aligned to the start offset of the tree * (header). Note that the part of the header is consumed from the given * input to be able to determine header size. * * @param in * @return * @throws IOException */ public static int getHeaderSize(DataInput in) throws IOException { int header_size = 0; /* int treeSize = */in.readInt(); header_size += 4; int height = in.readInt(); header_size += 4; if (height == 0) { // Empty tree. No results return header_size; } int degree = in.readInt(); header_size += 4; int nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1)); /* int elementCount = */in.readInt(); header_size += 4; // Add the size of all nodes header_size += nodeCount * NodeSize; return header_size; } /** * Returns total number of elements * * @return */ public int getElementCount() { return elementCount; } /** * Returns the MBR of the root * * @return */ public Prism getMBR() { Prism mbr = null; try { // MBR of the tree is the MBR of the root node structure.seek(0); mbr = new Prism(); /* int offset = */structure.readInt(); mbr.readFields(structure); } catch (IOException e) { e.printStackTrace(); } return mbr; } /** * Reads and returns the element with the given index * * @param i * @return * @throws IOException */ public T readElement(int i) { Iterator<T> iter = iterator(); while (i-- > 0 && iter.hasNext()) { iter.next(); } return iter.next(); } public void setStockObject(T stockObject) { this.stockObject = stockObject; } /** * Create Prisms that together pack all points in sample such that each * Prism contains roughly the same number of points. In other words it tries * to balance number of points in each Prism. Works similar to the logic of * bulkLoad but does only one level of Prisms. * * @param samples * @param gridInfo * - Used as a hint for number of Prisms per row or column * @return */ public static Prism[] packInPrisms(GridInfo gridInfo, final Point3d[] sample) { Prism[] Prisms = new Prism[gridInfo.layers * gridInfo.columns * gridInfo.rows]; int iPrism = 0; // Sort in t direction final IndexedSortable sortableT = new IndexedSortable() { @Override public void swap(int i, int j) { Point3d temp = sample[i]; sample[i] = sample[j]; sample[j] = temp; } @Override public int compare(int i, int j) { if (sample[i].t < sample[j].t) return -1; if (sample[i].t > sample[j].t) return 1; return 0; } }; // Sort in x direction final IndexedSortable sortableX = new IndexedSortable() { @Override public void swap(int i, int j) { Point3d temp = sample[i]; sample[i] = sample[j]; sample[j] = temp; } @Override public int compare(int i, int j) { if (sample[i].x < sample[j].x) return -1; if (sample[i].x > sample[j].x) return 1; return 0; } }; // Sort in y direction final IndexedSortable sortableY = new IndexedSortable() { @Override public void swap(int i, int j) { Point3d temp = sample[i]; sample[i] = sample[j]; sample[j] = temp; } @Override public int compare(int i, int j) { if (sample[i].y < sample[j].y) return -1; if (sample[i].y > sample[j].y) return 1; return 0; } }; final QuickSort quickSort = new QuickSort(); quickSort.sort(sortableT, 0, sample.length); //tony int tindex1 = 0; double t1 = gridInfo.t1; for (int lay = 0; lay < gridInfo.layers; lay++){ int tindex2 = sample.length * (lay + 1) / gridInfo.layers; double t2 = lay == gridInfo.layers - 1 ? gridInfo.t2 : sample[tindex2 - 1].t; quickSort.sort(sortableX, tindex1, tindex2); int xindex1 = tindex1; double x1 = gridInfo.x1; for (int col = 0; col < gridInfo.columns; col++) { int xindex2 = sample.length * (col + 1) / gridInfo.columns; // Determine extents for all Prisms in this column double x2 = col == gridInfo.columns - 1 ? gridInfo.x2 : sample[xindex2 - 1].x; // Sort all points in this column according to its y-coordinate quickSort.sort(sortableY, xindex1, xindex2); // Create Prisms in this column double y1 = gridInfo.y1; for (int row = 0; row < gridInfo.rows; row++) { int yindex2 = xindex1 + (xindex2 - xindex1) * (row + 1) / gridInfo.rows; double y2 = row == gridInfo.rows - 1 ? gridInfo.y2 : sample[yindex2 - 1].y; Prisms[iPrism++] = new Prism(t1, x1, y1, t2, x2, y2); y1 = y2; } xindex1 = xindex2; x1 = x2; } } return Prisms; } /** * An iterator that goes over all elements in the tree in no particular * order * * @author tonyren, eldawy * */ class RTreeIterator implements Iterator<T> { /** Current offset in the data stream */ int offset; /** Temporary text that holds one line to deserialize objects */ Text line; /** A stock object to read from stream */ T _stockObject; /** A reader to read lines from the tree */ LineReader reader; RTreeIterator() throws IOException { offset = TreeHeaderSize + NodeSize * RTree.this.nodeCount; _stockObject = (T) RTree.this.stockObject.clone(); line = new Text(); RTree.this.data.seek(offset + RTree.this.treeStartOffset); reader = new LineReader(RTree.this.data); } @Override public boolean hasNext() { return offset < RTree.this.treeSize; } @Override public T next() { try { offset += reader.readLine(line); _stockObject.fromText(line); } catch (IOException e) { e.printStackTrace(); return null; } return _stockObject; } @Override public void remove() { throw new RuntimeException("Not supported"); } } /** * Skip bytes until the end of line * * @param bytes * @param startOffset * @return */ public static int skipToEOL(byte[] bytes, int startOffset) { int eol = startOffset; while (eol < bytes.length && (bytes[eol] != '\n' && bytes[eol] != '\r')) eol++; while (eol < bytes.length && (bytes[eol] == '\n' || bytes[eol] == '\r')) eol++; return eol; } @Override public Iterator<T> iterator() { try { return new RTreeIterator(); } catch (IOException e) { e.printStackTrace(); } return null; } /** * Given a block size, record size and a required tree degree, this function * calculates the maximum number of records that can be stored in this block * taking into consideration the overhead needed by node structure. * * @param blockSize * @param degree * @param recordSize * @return */ public static int getBlockCapacity(long blockSize, int degree, int recordSize) { double a = (double) NodeSize / (degree - 1); double ratio = (blockSize + a) / (recordSize + a); double break_even_height = Math.log(ratio) / Math.log(degree); double h_min = Math.floor(break_even_height); double capacity1 = Math.floor(Math.pow(degree, h_min)); double structure_size = 4 + TreeHeaderSize + a * (capacity1 * degree - 1); double capacity2 = Math .floor((blockSize - structure_size) / recordSize); return Math.max((int) capacity1, (int) capacity2); } /** * Searches the RTree starting from the given start position. This is either * a node number or offset of an element. If it's a node number, it performs * the search in the subtree rooted at this node. If it's an offset number, * it searches only the object found there. It is assumed that the * openQuery() has been called before this function and that endQuery() will * be called afterwards. * * @param query_mbr * @param output * @param start * - where to start searching * @param end * - where to end searching. Only used when start is an offset of * an object. * @return * @throws IOException */ protected int searchColumnar(Shape query_shape, ResultCollector<Writable> output, int start, int end, String field) throws IOException { if (output == null){ throw new RuntimeException("Output is NULL"); } //build search field int fieldOffset = 0; int fieldSize = -1; FIELD_TYPE fieldType = FIELD_TYPE.NULL; //get fields Field[] fields = stockObject.getClass().getDeclaredFields(); for (int i = 0; i < fields.length; i++){ if (fields[i].getName().equals(field)){ if ( fields[i].getType().equals(Integer.TYPE)){ fieldSize = 4; fieldType = FIELD_TYPE.Integer; } else if ( fields[i].getType().equals(Long.TYPE)){ fieldSize = 8; fieldType = FIELD_TYPE.Long; } else if ( fields[i].getType().equals(Double.TYPE)){ fieldSize = 8; fieldType = FIELD_TYPE.Double; } else{ //throw new RuntimeException("Unsupported type: " + fields[i].getType()); } break; } else{ if ( fields[i].getType().equals(Integer.TYPE)){ fieldOffset += elementCount * 4; } else if ( fields[i].getType().equals(Long.TYPE) || fields[i].getType().equals(Double.TYPE)){ fieldOffset += elementCount * 8; } else{ //throw new RuntimeException("Unsupported type: " + fields[i].getType()); } } } Prism query_mbr = query_shape.getMBR(); int resultSize = 0; // Special case for an empty tree if (height == 0) return 0; Stack<Integer> toBeSearched = new Stack<Integer>(); // Start from the given node toBeSearched.push(start); if (start >= nodeCount) { toBeSearched.push(end); } Prism node_mbr = new Prism(); // Holds one data line from tree data Text line = new Text2(); while (!toBeSearched.isEmpty()) { int searchNumber = toBeSearched.pop(); int mbrsToTest = searchNumber == 0 ? 1 : degree; if (searchNumber < nodeCount) { long nodeOffset = NodeSize * searchNumber; structure.seek(nodeOffset); int dataOffset = structure.readInt(); for (int i = 0; i < mbrsToTest; i++) { node_mbr.readFields(structure); int lastOffset = (searchNumber + i) == nodeCount - 1 ? elementCount - 1 : structure.readInt(); if (query_mbr.contains(node_mbr)) { // The node is full contained in the query range. // Save the time and do full scan for this node // Checks if this node is the last node in its level // This can be easily detected because the next node in // the level // order traversal will be the first node in the next // level // which means it will have an offset less than this // node if (lastOffset <= dataOffset) lastOffset = elementCount; data.seek(treeStartOffset + TreeHeaderSize + nodeCount * NodeSize + elementCount * IndexUnitSize + fieldOffset + dataOffset * fieldSize); for (int j = 0; j < lastOffset - dataOffset; j++){ switch (fieldType){ case Integer: output.collect(new IntWritable(data.readInt())); break; case Long: output.collect(new LongWritable(data.readLong())); break; case Double: output.collect(new DoubleWritable(data.readDouble())); break; default: output.collect(new Point3d(data.readDouble(), data.readDouble(), data.readDouble())); break; } resultSize++; } } else if (query_mbr.isIntersected(node_mbr)) { // Node partially overlaps with query. Go deep under // this node if (searchNumber < nonLeafNodeCount) { // Search child nodes toBeSearched.push((searchNumber + i) * degree + 1); } else { // Search all elements in this node //toBeSearched.push(dataOffset); // Checks if this node is the last node in its level // This can be easily detected because the next node // in the level // order traversal will be the first node in the // next level // which means it will have an offset less than this // node if (lastOffset <= dataOffset) lastOffset = elementCount; //toBeSearched.push(lastOffset); data.seek(treeStartOffset + TreeHeaderSize + nodeCount * NodeSize + dataOffset * IndexUnitSize); boolean report[] = new boolean[lastOffset - dataOffset]; Point3d point = new Point3d(); for (int j = 0; j < lastOffset - dataOffset; j++){ point.t = data.readDouble(); point.x = data.readDouble(); point.y = data.readDouble(); if (point.isIntersected(query_shape)){ report[j] = true; } else report[j] = false; } data.seek(treeStartOffset + TreeHeaderSize + nodeCount * NodeSize + elementCount * IndexUnitSize + fieldOffset + dataOffset * fieldSize); for (int j = 0; j < lastOffset - dataOffset; j++){ if (report[j]){ switch (fieldType){ case Integer: output.collect(new IntWritable(data.readInt())); break; case Long: output.collect(new LongWritable(data.readLong())); break; case Double: output.collect(new DoubleWritable(data.readDouble())); break; default: output.collect(new Point3d(data.readDouble(), data.readDouble(), data.readDouble())); break; } resultSize++; } } } } dataOffset = lastOffset; } } else { LOG.error("searchNumber > nodeCount, something is wrong"); int firstOffset, lastOffset; // Search for data items (records) lastOffset = searchNumber; firstOffset = toBeSearched.pop(); data.seek(firstOffset + treeStartOffset); LineReader lineReader = new LineReader(data); while (firstOffset < lastOffset) { firstOffset += lineReader.readLine(line); stockObject.fromText(line); if (stockObject.isIntersected(query_shape)) { resultSize++; if (output != null) output.collect(stockObject); } } } } return resultSize; } protected int search(Shape query_shape, ResultCollector<T> output, int start, int end) throws IOException { Prism query_mbr = query_shape.getMBR(); int resultSize = 0; // Special case for an empty tree if (height == 0) return 0; Stack<Integer> toBeSearched = new Stack<Integer>(); // Start from the given node toBeSearched.push(start); if (start >= nodeCount) { toBeSearched.push(end); } Prism node_mbr = new Prism(); // Holds one data line from tree data Text line = new Text2(); while (!toBeSearched.isEmpty()) { int searchNumber = toBeSearched.pop(); int mbrsToTest = searchNumber == 0 ? 1 : degree; if (searchNumber < nodeCount) { long nodeOffset = NodeSize * searchNumber; structure.seek(nodeOffset); int dataOffset = structure.readInt(); for (int i = 0; i < mbrsToTest; i++) { node_mbr.readFields(structure); int lastOffset = (searchNumber + i) == nodeCount - 1 ? treeSize : structure.readInt(); if (query_mbr.contains(node_mbr)) { // The node is full contained in the query range. // Save the time and do full scan for this node toBeSearched.push(dataOffset); // Checks if this node is the last node in its level // This can be easily detected because the next node in // the level // order traversal will be the first node in the next // level // which means it will have an offset less than this // node if (lastOffset <= dataOffset) lastOffset = treeSize; toBeSearched.push(lastOffset); } else if (query_mbr.isIntersected(node_mbr)) { // Node partially overlaps with query. Go deep under // this node if (searchNumber < nonLeafNodeCount) { // Search child nodes toBeSearched.push((searchNumber + i) * degree + 1); } else { // Search all elements in this node toBeSearched.push(dataOffset); // Checks if this node is the last node in its level // This can be easily detected because the next node // in the level // order traversal will be the first node in the // next level // which means it will have an offset less than this // node if (lastOffset <= dataOffset) lastOffset = treeSize; toBeSearched.push(lastOffset); } } dataOffset = lastOffset; } } else { int firstOffset, lastOffset; // Search for data items (records) lastOffset = searchNumber; firstOffset = toBeSearched.pop(); data.seek(firstOffset + treeStartOffset); LineReader lineReader = new LineReader(data); while (firstOffset < lastOffset) { firstOffset += lineReader.readLine(line); stockObject.fromText(line); if (stockObject.isIntersected(query_shape)) { resultSize++; if (output != null) output.collect(stockObject); } } } } return resultSize; } /** * Performs a range query over this tree using the given query range. * * @param query * - The query Prism to use (TODO make it any shape not just * Prism) * @param output * - Shapes found are reported to this output. If null, results * are not reported * @return - Total number of records found */ public int searchColumnar(Shape query, ResultCollector<Writable> output, String field) { int resultCount = 0; try { resultCount = searchColumnar(query, output, 0, 0, field); } catch (IOException e) { e.printStackTrace(); } return resultCount; } public int search(Shape query, ResultCollector<T> output, String field) { int resultCount = 0; try { resultCount = search(query, output, 0, 0); } catch (IOException e) { e.printStackTrace(); } return resultCount; } /** * k nearest neighbor query Note: Current algorithm is approximate just for * simplicity. Writing an exact algorithm is on our TODO list * * @param qx * @param qy * @param k * @param output */ public int knn(final double qt, final double qx, final double qy, int k, final ResultCollector2<T, Double> output) { double query_area = ((getMBR().x2 - getMBR().x1) * (getMBR().y2 - getMBR().y1)) * k / getElementCount(); double query_radius = Math.sqrt(query_area / Math.PI); boolean result_correct; final Vector<Double> distances = new Vector<Double>(); final Vector<T> shapes = new Vector<T>(); // Find results in the range and increase this range if needed to ensure // correctness of the answer do { // Initialize result and query range distances.clear(); shapes.clear(); Prism queryRange = new Prism(); queryRange.x1 = qx - query_radius / 2; queryRange.y1 = qy - query_radius / 2; queryRange.x2 = qx + query_radius / 2; queryRange.y2 = qy + query_radius / 2; // Retrieve all results in range searchColumnar(queryRange, new ResultCollector<Writable>() { @Override public void collect(Writable shape) { distances.add(((T)shape).distanceTo(qt, qx, qy)); shapes.add((T) ((T) shape).clone()); } }, null); if (shapes.size() < k) { // Didn't find k elements in range, double the range to get more // items if (shapes.size() == getElementCount()) { // Already returned all possible elements result_correct = true; } else { query_radius *= 2; result_correct = false; } } else { // Sort items by distance to get the kth neighbor IndexedSortable s = new IndexedSortable() { @Override public void swap(int i, int j) { double temp_distance = distances.elementAt(i); distances.set(i, distances.elementAt(j)); distances.set(j, temp_distance); T temp_shape = shapes.elementAt(i); shapes.set(i, shapes.elementAt(j)); shapes.set(j, temp_shape); } @Override public int compare(int i, int j) { // Note. Equality is not important to check because // items with the // same distance can be ordered anyway. if (distances.elementAt(i) < distances.elementAt(j)) return -1; return 1; } }; IndexedSorter sorter = new QuickSort(); sorter.sort(s, 0, shapes.size()); if (distances.elementAt(k - 1) > query_radius) { result_correct = false; query_radius = distances.elementAt(k); } else { result_correct = true; } } } while (!result_correct); int result_size = Math.min(k, shapes.size()); if (output != null) { for (int i = 0; i < result_size; i++) { output.collect(shapes.elementAt(i), distances.elementAt(i)); } } return result_size; } protected static <S1 extends Shape, S2 extends Shape> int spatialJoinMemory( final RTree<S1> R, final RTree<S2> S, final ResultCollector2<S1, S2> output) throws IOException { S1[] rs = (S1[]) Array.newInstance(R.stockObject.getClass(), R.getElementCount()); int i = 0; for (S1 r : R) rs[i++] = (S1) r.clone(); if (i != rs.length) throw new RuntimeException(i + "!=" + rs.length); S2[] ss = (S2[]) Array.newInstance(S.stockObject.getClass(), S.getElementCount()); i = 0; for (S2 s : S) ss[i++] = (S2) s.clone(); if (i != ss.length) throw new RuntimeException(i + "!=" + ss.length); return SpatialAlgorithms.SpatialJoin_planeSweep(rs, ss, output); } // LRU cache used to avoid deserializing the same records again and again static class LruCache<A, B> extends LinkedHashMap<A, B> { private static final long serialVersionUID = 702044567572914544L; private final int maxEntries; private B unusedEntry; public LruCache(final int maxEntries) { super(maxEntries + 1, 1.0f, true); this.maxEntries = maxEntries; } @Override protected boolean removeEldestEntry(final Map.Entry<A, B> eldest) { if (super.size() > maxEntries) { unusedEntry = eldest.getValue(); return true; } return false; } public B popUnusedEntry() { B temp = unusedEntry; unusedEntry = null; return temp; } } /** * Performs a spatial join between records in two R-trees * * @param R * @param S * @param output * @return * @throws IOException */ protected static <S1 extends Shape, S2 extends Shape> int spatialJoinDisk( final RTree<S1> R, final RTree<S2> S, final ResultCollector2<S1, S2> output) throws IOException { // Reserve locations for nodes MBRs and data offset [start, end) final Prism[] r_nodes = new Prism[R.degree]; for (int i = 0; i < r_nodes.length; i++) r_nodes[i] = new Prism(); final int[] r_data_offset = new int[R.degree + 1]; final Prism[] s_nodes = new Prism[S.degree]; for (int i = 0; i < s_nodes.length; i++) s_nodes[i] = new Prism(); final int[] s_data_offset = new int[S.degree + 1]; PriorityQueue<Long> nodesToJoin = new PriorityQueue<Long>() { { initialize(R.leafNodeCount + S.leafNodeCount); } @Override protected boolean lessThan(Object a, Object b) { return ((Long) a) < ((Long) b); } }; nodesToJoin.put(0L); LruCache<Integer, Shape[]> r_records_cache = new LruCache<Integer, Shape[]>( R.degree * 2); LruCache<Integer, Shape[]> s_records_cache = new LruCache<Integer, Shape[]>( S.degree * R.degree * 4); Text line = new Text2(); int result_count = 0; LineReader r_lr = null, s_lr = null; // Last offset read from r and s int r_last_offset = 0; int s_last_offset = 0; while (nodesToJoin.size() > 0) { long nodes_to_join = nodesToJoin.pop(); int r_node = (int) (nodes_to_join >>> 32); int s_node = (int) (nodes_to_join & 0xFFFFFFFF); // Read all R nodes int r_mbrsToTest = r_node == 0 ? 1 : R.degree; boolean r_leaf = r_node * R.degree + 1 >= R.nodeCount; long nodeOffset = NodeSize * r_node; R.structure.seek(nodeOffset); for (int i = 0; i < r_mbrsToTest; i++) { r_data_offset[i] = R.structure.readInt(); r_nodes[i].readFields(R.structure); } r_data_offset[r_mbrsToTest] = (r_node + r_mbrsToTest) == R.nodeCount ? R.treeSize : R.structure.readInt(); // Read all S nodes int s_mbrsToTest = s_node == 0 ? 1 : S.degree; boolean s_leaf = s_node * S.degree + 1 >= S.nodeCount; if (r_leaf != s_leaf) { // This case happens when the two trees are of different heights if (r_leaf) r_mbrsToTest = 1; else s_mbrsToTest = 1; } nodeOffset = NodeSize * s_node; S.structure.seek(nodeOffset); for (int i = 0; i < s_mbrsToTest; i++) { s_data_offset[i] = S.structure.readInt(); s_nodes[i].readFields(S.structure); } s_data_offset[s_mbrsToTest] = (s_node + s_mbrsToTest) == S.nodeCount ? S.treeSize : S.structure.readInt(); // Find overlapping nodes by Cartesian product for (int i = 0; i < r_mbrsToTest; i++) { for (int j = 0; j < s_mbrsToTest; j++) { if (r_nodes[i].isIntersected(s_nodes[j])) { if (r_leaf && s_leaf) { // Reached leaf nodes in both trees. Start comparing // records int r_start_offset = r_data_offset[i]; int r_end_offset = r_data_offset[i + 1]; int s_start_offset = s_data_offset[j]; int s_end_offset = s_data_offset[j + 1]; // ///////////////////////////////////////////////////////////////// // Read or retrieve r_records Shape[] r_records = r_records_cache .get(r_start_offset); if (r_records == null) { int cache_key = r_start_offset; r_records = r_records_cache.popUnusedEntry(); if (r_records == null) { r_records = new Shape[R.degree * 2]; } // Need to read it from stream if (r_last_offset != r_start_offset) { long seekTo = r_start_offset + R.treeStartOffset; R.data.seek(seekTo); r_lr = new LineReader(R.data); } int record_i = 0; while (r_start_offset < r_end_offset) { r_start_offset += r_lr.readLine(line); if (r_records[record_i] == null) r_records[record_i] = R.stockObject .clone(); r_records[record_i].fromText(line); record_i++; } r_last_offset = r_start_offset; // Nullify other records while (record_i < r_records.length) r_records[record_i++] = null; r_records_cache.put(cache_key, r_records); } // Read or retrieve s_records Shape[] s_records = s_records_cache .get(s_start_offset); if (s_records == null) { int cache_key = s_start_offset; // Need to read it from stream if (s_lr == null || s_last_offset != s_start_offset) { // Need to reposition s_lr (LineReader of S) long seekTo = s_start_offset + S.treeStartOffset; S.data.seek(seekTo); s_lr = new LineReader(S.data); } s_records = s_records_cache.popUnusedEntry(); if (s_records == null) { s_records = new Shape[S.degree * 2]; } int record_i = 0; while (s_start_offset < s_end_offset) { s_start_offset += s_lr.readLine(line); if (s_records[record_i] == null) s_records[record_i] = S.stockObject .clone(); s_records[record_i].fromText(line); record_i++; } // Nullify other records while (record_i < s_records.length) s_records[record_i++] = null; // Put in cache s_records_cache.put(cache_key, s_records); s_last_offset = s_start_offset; } // Do Cartesian product between records to find // overlapping pairs for (int i_r = 0; i_r < r_records.length && r_records[i_r] != null; i_r++) { for (int i_s = 0; i_s < s_records.length && s_records[i_s] != null; i_s++) { if (r_records[i_r] .isIntersected(s_records[i_s])) { result_count++; if (output != null) { output.collect((S1) r_records[i_r], (S2) s_records[i_s]); } } } } // ///////////////////////////////////////////////////////////////// } else { // Add a new pair to node pairs to be tested // Go down one level if possible int new_r_node, new_s_node; if (!r_leaf) { new_r_node = (r_node + i) * R.degree + 1; } else { new_r_node = r_node + i; } if (!s_leaf) { new_s_node = (s_node + j) * S.degree + 1; } else { new_s_node = s_node + j; } long new_pair = (((long) new_r_node) << 32) | new_s_node; nodesToJoin.put(new_pair); } } } } } return result_count; } public static <S1 extends Shape, S2 extends Shape> int spatialJoin( final RTree<S1> R, final RTree<S2> S, final ResultCollector2<S1, S2> output) throws IOException { if (R.treeStartOffset >= 0 && S.treeStartOffset >= 0) { // Both trees are read from disk return spatialJoinDisk(R, S, output); } else { return spatialJoinMemory(R, S, output); } } /** * Calculate the storage overhead required to build an RTree for the given * number of nodes. * * @return - storage overhead in bytes */ public static int calculateStorageOverhead(int elementCount, int degree) { // Update storage overhead int height = Math.max(1, (int) Math.ceil(Math.log(elementCount) / Math.log(degree))); int leafNodeCount = (int) Math.pow(degree, height - 1); if (elementCount <= 2 * leafNodeCount && height > 1) { height--; leafNodeCount = (int) Math.pow(degree, height - 1); } int nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1)); int storage_overhead = 4 + TreeHeaderSize + nodeCount * NodeSize; return storage_overhead; } /** * Find log to the base 2 quickly * * @param x * @return */ public static int log2Floor(int x) { if (x == 0) return -1; int pos = 0; if ((x & 0xFFFF0000) != 0) { pos += 16; x >>>= 16; } if ((x & 0xFF00) != 0) { pos += 8; x >>>= 8; } if ((x & 0xF0) != 0) { pos += 4; x >>>= 4; } if ((x & 0xC) != 0) { pos += 2; x >>>= 2; } if ((x & 0x2) != 0) { pos++; x >>>= 1; } return pos; } public static int powInt(int base, int exponent) { int pow = 1; while (exponent != 0) { if ((exponent & 1) != 0) pow *= base; exponent >>>= 1; base *= base; } return pow; } private static final double LogLookupTable[]; static { int count = 100; LogLookupTable = new double[count]; for (int i = 0; i < count; i++) { LogLookupTable[i] = Math.log(i); } } public static double fastLog(int x) { if (x < LogLookupTable.length) { return LogLookupTable[x]; } return Math.log(x); } public static double fastPow(double a, double b) { final long tmp = (long) (9076650 * (a - 1) / (a + 1 + 4 * (Math.sqrt(a))) * b + 1072632447); return Double.longBitsToDouble(tmp << 32); } /** * Find the best (minimum) degree that can index the given number of records * such that the whole tree structure can be stored in the given bytes * available. * * @param bytesAvailable * @param recordCount * @return */ public static int findBestDegree(int bytesAvailable, int recordCount) { // Maximum number of nodes that can be stored in the bytesAvailable int maxNodeCount = (bytesAvailable - TreeHeaderSize) / NodeSize; // Calculate maximum possible tree height to store the given record // count int h_max = log2Floor(recordCount / 2); // Minimum height is always 1 (degree = recordCount) int h_min = 2; // Best degree is the minimum degree int d_best = Integer.MAX_VALUE; double log_recordcount_e = Math.log(recordCount / 2); double log_recordcount_2 = log_recordcount_e / fastLog(2); // Find the best height among all possible heights for (int h = h_min; h <= h_max; h++) { // Find the minimum degree for the given height (h) // This approximation is good enough for our case. // Not proven but tested with millions of random cases int d_min = (int) Math.ceil(fastPow(2.0, log_recordcount_2 / (h + 1))); // Some heights are invalid, recalculate the height to ensure it's // valid int h_recalculated = (int) Math.floor(log_recordcount_e / fastLog(d_min)); if (h != h_recalculated) continue; int nodeCount = (int) ((powInt(d_min, h + 1) - 1) / (d_min - 1)); if (nodeCount < maxNodeCount && d_min < d_best) d_best = d_min; } return d_best; } public static int calculateTreeStorage(int elementCount, int degree) { int height = Math.max(1, (int) Math.ceil(Math.log(elementCount) / Math.log(degree))); int leafNodeCount = (int) Math.pow(degree, height - 1); if (elementCount < 2 * leafNodeCount && height > 1) { height--; leafNodeCount = (int) Math.pow(degree, height - 1); } int nodeCount = (int) ((Math.pow(degree, height) - 1) / (degree - 1)); return TreeHeaderSize + nodeCount * NodeSize; } }
apache-2.0
gregwhitaker/catnap
catnap-springboot/src/main/java/com/github/gregwhitaker/catnap/springboot/annotation/EnableCatnap.java
1203
/* * Copyright 2016 Greg Whitaker * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.gregwhitaker.catnap.springboot.annotation; import com.github.gregwhitaker.catnap.core.annotation.CatnapAnnotation; import com.github.gregwhitaker.catnap.springboot.config.CatnapWebMvcConfigurerAdapter; import org.springframework.context.annotation.Import; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.TYPE}) @CatnapAnnotation @Import({CatnapWebMvcConfigurerAdapter.class}) public @interface EnableCatnap { }
apache-2.0
dzielins42/urban-bear
src/main/java/pl/dzielins42/dmtools/generator/religion/BasicPantheonGenerator.java
5109
package pl.dzielins42.dmtools.generator.religion; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import pl.dzielins42.dmtools.model.enumeration.Alignment; import pl.dzielins42.dmtools.model.enumeration.Gender; import pl.dzielins42.dmtools.model.religion.Deity; import pl.dzielins42.dmtools.model.religion.Domain; import pl.dzielins42.dmtools.model.religion.Pantheon; import pl.dzielins42.dmtools.util.ProbabilityDistributionTable; public class BasicPantheonGenerator implements PantheonGenerator<BasicPantheonGeneratorOptions> { protected final int MAX_DIVINE_RANK = 25; public Pantheon generate(BasicPantheonGeneratorOptions options) { // Validate options if (options == null || options.getRandom() == null || options.getDomainsProbability() == null || options.getNameGenerator() == null) { throw new IllegalArgumentException(); } // Generate number of deities as random number between minDeitiesNumber // and maxDeitiesNumber int numberOfDeities = options.getMinDeitiesNumber(); if (options.getMinDeitiesNumber() != options.getMaxDeitiesNumber()) { numberOfDeities += options.getRandom().nextInt(options.getMaxDeitiesNumber() - options.getMinDeitiesNumber() + 1); } // Generate each deity independently List<Deity> deities = new ArrayList<Deity>(numberOfDeities); Deity deity; for (int i = 0; i < numberOfDeities; i++) { deity = generateDeity(options); deities.add(deity); } return new Pantheon("The Pantheon", deities); } protected Deity generateDeity(BasicPantheonGeneratorOptions options) { // Generate rank // TODO higher ranks should be rarer, probably by some mathematical // formula // Basic pantheons should have a few greater deities (16-20) but mostly // intermediate deities (11-15) and lesser deities (6-10), demigods // (1-5) and heroes (0) if the pantheon size enables it. There should // not be many overdeities (21+). int rank = options.getRandom().nextInt(MAX_DIVINE_RANK + 1); // Generate domains // Number of deity's domains is its ceiling of its rank divided by 5 int numberOfDomains = (int) Math.ceil(((double) rank) / 5.0d); // Temporarily it is 3 numberOfDomains = 3; // If it is overdeity, its power is beyond domain partitioning - it has // power over every domain List<Domain> domains = new ArrayList<Domain>(); Domain domain; while (domains.size() < numberOfDomains) { domain = options.getDomainsProbability().getRandom(options.getRandom()); if (!domains.contains(domain)) { domains.add(domain); } } Alignment alignment = getRandomAlignmentForDomains(domains, options); Gender gender = Gender.values()[options.getRandom().nextInt(Gender.values().length)]; Deity deity = new Deity(options.getNameGenerator().generate(gender, options), alignment, gender, rank, domains); return deity; } /** * Returns deity's {@link Domain} list suited for pre-drawn * {@link Alignment}. * * @param alignment * deity's alignment. * @param options * generation options. * @return deity's {@link Domain} list suited for pre-drawn * {@link Alignment}. */ protected Domain getRandomDomainForAlignment(Alignment alignment, BasicPantheonGeneratorOptions options) { return null; } /** * Returns deity's {@link Alignment} suited for pre-drawn {@link Domain} * list. For each domain, probability for each alignment is retrieved using * {@link Domain#getAlignmentProbabilities()} method. Values are used to * create new {@link ProbabilityDistributionTable}, which is used to get * returned alignment. * * @param domains * list of domains of the deity. * @param options * generation options. * @return deity's {@link Alignment} suited for pre-drawn {@link Domain} * list. */ protected Alignment getRandomAlignmentForDomains(List<Domain> domains, BasicPantheonGeneratorOptions options) { // TODO maybe return random alignment based on uniform distribution if (domains == null || domains.isEmpty()) { throw new IllegalArgumentException(); } double[] probabilities = new double[Alignment.values().length]; Arrays.fill(probabilities, 1.0d); for (Domain domain : domains) { for (int i = 0; i < probabilities.length; i++) { probabilities[i] *= domain.getAlignmentProbabilities().getProbabilities().get(i); } } ProbabilityDistributionTable<Alignment> tempPdt = new ProbabilityDistributionTable<Alignment>(Alignment.values(), probabilities); return tempPdt.getRandom(options.getRandom()); } }
apache-2.0
jcodagnone/jiolsucker
iolsucker/src/test/java/ar/com/leak/iolsucker/view/common/FSRepositoryTest.java
8278
/** * Copyright (c) 2005-2011 Juan F. Codagnone <http://juan.zaubersoftware.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ar.com.leak.iolsucker.view.common; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.Reader; import java.util.Arrays; import java.util.Calendar; import java.util.Collection; import java.util.Date; import java.util.GregorianCalendar; import java.util.Iterator; import java.util.Observable; import java.util.Observer; import junit.framework.TestCase; import org.apache.commons.lang.Validate; import ar.com.leak.common.fs.FilesystemUtils; import ar.com.leak.iolsucker.impl.common.RelativeLocationValidator; import ar.com.leak.iolsucker.impl.mock.MockCourse; import ar.com.leak.iolsucker.impl.mock.MockIolDao; import ar.com.leak.iolsucker.impl.mock.MockMaterialDir; import ar.com.leak.iolsucker.impl.mock.MockMaterialFile; import ar.com.leak.iolsucker.model.Course; import ar.com.leak.iolsucker.model.IolDAO; import ar.com.leak.iolsucker.model.Material; import ar.com.leak.iolsucker.view.Repository; import ar.com.leak.iolsucker.view.Repository.ObservableAction; /** * testeo de unidad para FSRepository * * @author Juan F. Codagnone * @since Apr 30, 2005 */ public final class FSRepositoryTest extends TestCase { /** * @throws Exception on error */ public void testEvilPaths() throws Exception { final IolDAO dao = new MockIolDao(new Course[] { new MockCourse("testeo", "10.1", -3, Arrays.asList(new Material [] { /* al tener null de data, si el repositorio lo quiere leer va a * saltar una excepcion */ new MockMaterialFile("../pepe", null), new MockMaterialDir(".."), new MockMaterialDir("soy/malo/../../../../../proc/version"), new MockMaterialFile("soy/malo/../../../../test", null), new MockMaterialFile("soy/malo/../../../../", null), new MockMaterialFile("directorio_valido/archivo", "buu"), new MockMaterialDir("otro_directorio_valido"), })) }, null, null); // TODO no harcodear path final File location = new File(getTmpDirectory(), "evilPaths"); if(!location.exists()) { location.mkdirs(); } final Repository repository = new FSRepository(location, new NullDownloadMeter(), new RelativeLocationValidator(), new NullRepublishRepositoryStrategy(), 1); /** TODO assert for the correct # */ repository.addRepositoryListener(new Observer() { public void update(final Observable o, final Object arg) { Repository.ObservableAction action = (ObservableAction)arg; System.out.println(action.getType() + " -- " + action.getMsg()); } }); final Collection evilCourses = dao.getUserCourses(); for(Iterator i = evilCourses.iterator(); i.hasNext();) { Course evilCourse = (Course)i.next(); repository.syncMaterial(evilCourse); } } public void testSlashMateriaOkPaths() throws Exception { final IolDAO dao = new MockIolDao(new Course[] { new MockCourse("Estadistica K/V", "10.1", -3, Arrays.asList(new Material [] { new MockMaterialDir("otro_directorio_valido"), })) }, null, null); // TODO no harcodear path final File location = new File(getTmpDirectory(), "evilPaths"); if(!location.exists()) { location.mkdirs(); } final FSRepository repository = new FSRepository(location, new NullDownloadMeter(), new RelativeLocationValidator(), new NullRepublishRepositoryStrategy(), 1); for(Iterator i = dao.getUserCourses().iterator(); i.hasNext();) { Course evilCourse = (Course)i.next(); repository.syncMaterial(evilCourse); } assertEquals(0, repository.getExceptions().size()); } /** * testea que nos comportemos correctamente en una republicación de un * archivo * * @throws Exception on error */ public void testRepublish() throws Exception { final File location = new File(getTmpDirectory(), "jiol-testRepublish"); if(location.exists()) { FilesystemUtils.removeDir(location); } final FSRepository repository = new FSRepository(location, new NullDownloadMeter(), new RelativeLocationValidator(), new TagRepublishRepositoryStrategy(), 1); // armo un curso que tiene un archivo. dos horas mas tarde se sube // otro archivo, con mismo nombre final String courseName = "TESTS-101"; final String courseCode = "1.23"; final int courseLevel = Course.LEVEL_GRADO; final String fileName = "file1.txt"; final Calendar calendar = GregorianCalendar.getInstance(); final Date beforeDate = calendar.getTime(); calendar.add(Calendar.HOUR, 2); final Date afterDate = calendar.getTime(); final Date epochDate = new Date(0); final String []content = { "contenido 0", "contenido 1", "contenido 2" }; final Course beforeCourse = new MockCourse(courseName, courseCode, courseLevel , Arrays.asList(new Material[] { new MockMaterialFile(fileName, content[0], beforeDate) })); final Course afterCourse = new MockCourse(courseName, courseCode, courseLevel, Arrays.asList(new Material[] { new MockMaterialFile(fileName, content[1], afterDate) })); final Course epochCourse = new MockCourse(courseName, courseCode, courseLevel, Arrays.asList(new Material[] { new MockMaterialFile(fileName, content[2], epochDate) })); try { repository.syncMaterial(beforeCourse); repository.syncMaterial(afterCourse); repository.syncMaterial(epochCourse); assertEquals(repository.getDestDir(beforeCourse).listFiles().length, 2); assertEquals(content[1], // tuvo que haber quedado el contendo 1 getFileContent(new File( repository.getDestDir(beforeCourse), fileName))); } finally { if(location.exists()) { FilesystemUtils.removeDir(location); } } } /** @return el directorio temporario */ private File getTmpDirectory() { final String tmpDir = System.getProperty("java.io.tmpdir"); Validate.notNull(tmpDir, "java.io.tmpdir is not set!!"); return new File(tmpDir); } /** * @param f el archivo a abrir * @return el contenido de f * @throws IOException on error */ private String getFileContent(final File f) throws IOException { final StringBuilder sb = new StringBuilder(); final Reader reader = new FileReader(f); final int bufferSize = 1024; final char []buff = new char[bufferSize]; int len; while((len = reader.read(buff)) != -1) { sb.append(buff, 0, len); } reader.close(); return sb.toString(); } }
apache-2.0
thcomp/Android_BluetoothHelper
app/src/main/java/jp/co/thcomp/bluetoothhelper/BleReceiveDataProvider.java
7875
package jp.co.thcomp.bluetoothhelper; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; public class BleReceiveDataProvider extends BleDataProvider { public static final int AddPacketResultSuccess = -1; public static final int AddPacketResultAlreadyFinished = -2; public static final int AddPacketResultNoData = -3; private boolean mReceiveDataFinish = false; private byte[][] mReceiveDataArray; private int mLeftPacketCount = 0; private int mDataSize; private Short mReservedMessageId = null; private ArrayList<byte[]> mReservedPacketList = new ArrayList<>(); /** * @param packetData * @return AddPacketResultAlreadyFinished: 既に完了済みのメッセージへの追加(追加失敗) * AddPacketResultSuccess: 追加成功 * 0-ShortMax: 別のメッセージを追加している(追加失敗) */ public int addPacket(byte[] packetData) { int ret = AddPacketResultSuccess; if (packetData != null && packetData.length > 0) { if (!mReceiveDataFinish) { ByteBuffer tempBufferForShort = ByteBuffer.allocate(Short.SIZE / Byte.SIZE); ByteBuffer tempBufferForInt = ByteBuffer.allocate(Integer.SIZE / Byte.SIZE); // 0-1バイト:メッセージID(ShortMax上限且つPeripheralからの送信順番を示すが値は循環する) tempBufferForShort.position(0); tempBufferForShort.put(packetData, 0, LengthMessageID); short messageId = tempBufferForShort.getShort(0); // 2-5バイト:パケットサイズ、MTUサイズ以下の値が設定される tempBufferForInt.position(0); tempBufferForInt.put(packetData, IndexPacketSize, LengthPacketSize); int packetSize = tempBufferForInt.getInt(0); // 6-9バイト: パケットポジション、0は設定パケット、1以上の値が設定されている場合はデータパケット tempBufferForInt.position(0); tempBufferForInt.put(packetData, IndexPacketPosition, LengthPacketPosition); int packetPosition = tempBufferForInt.getInt(0); if (packetPosition == 0) { if (mMessageId == null) { boolean matchMessageId = true; if (mReservedMessageId != null) { // 既にリザーブされたMessageIdがあるので、それ以外の設定パケットは受け付けない if (messageId != mReservedMessageId) { matchMessageId = false; } } if (matchMessageId) { mMessageId = messageId; // 設定パケット // 10-13バイト:パケット数(設定パケットも含む)(IntMax上限) tempBufferForInt.position(0); tempBufferForInt.put(packetData, IndexPacketCount, LengthPacketCount); mLeftPacketCount = tempBufferForInt.getInt(0) - 1; mReceiveDataArray = new byte[mLeftPacketCount][]; // 14-17バイト:データサイズ(IntMax上限) tempBufferForInt.position(0); tempBufferForInt.put(packetData, IndexDataSize, LengthDataSize); mDataSize = tempBufferForInt.getInt(0); if (mReservedMessageId != null && mReservedPacketList.size() > 0) { // 保留されているメッセージを展開 for (byte[] reservedPacketData : mReservedPacketList) { addPacket(reservedPacketData); } } mReservedMessageId = null; mReservedPacketList.clear(); } } else { // 別のメッセージパケットを追加しようとしているので、新しい方のメッセージIDを返却 ret = messageId; } } else { if (mMessageId == null) { if (mReservedMessageId == null) { mReservedMessageId = messageId; } if (mReservedMessageId == messageId) { // 設定パケットが未だないので保留リストに mReservedPacketList.add(packetData); } } else if (mMessageId == messageId) { // データパケット if (mReceiveDataArray != null) { mLeftPacketCount--; // 10 バイト:次のパケットがあるかのフラグ、0:次パケットなし、1:次パケットあり tempBufferForInt.position(0); tempBufferForInt.put(packetData, IndexExistNextPacket, LengthExistNextPacket); int existNextPacket = tempBufferForInt.getInt(0); // 以後、0-3バイトに記載されていたサイズ - 9バイトを引算したサイズだけデータが格納 mReceiveDataArray[packetPosition - 1] = Arrays.copyOfRange(packetData, IndexDataStartPosition, packetSize); if ((mLeftPacketCount == 0) || (existNextPacket == NotExistNextPacket)) { // 一旦0に残パケット数を0にして、受信状況に合わせて正しい値にする mLeftPacketCount = 0; for (int i = 0, size = mReceiveDataArray.length; i < size; i++) { if (mReceiveDataArray[i] == null) { mLeftPacketCount++; } } if (mLeftPacketCount == 0) { mReceiveDataFinish = true; } } } } else { ret = messageId; } } } else { ret = AddPacketResultAlreadyFinished; } } else { ret = AddPacketResultNoData; } return ret; } public boolean isCompleted() { return mReceiveDataFinish; } @Override public byte[] getData() { byte[] ret = null; if (mReceiveDataFinish) { if (mData == null) { ByteArrayOutputStream stream = new ByteArrayOutputStream(); try { for (int i = 0, size = mReceiveDataArray.length; i < size; i++) { stream.write(mReceiveDataArray[i]); } mData = stream.toByteArray(); } catch (IOException e) { e.printStackTrace(); } } ret = super.getData(); } return ret; } @Override public Short getMessageId() { if (mReservedMessageId != null && mMessageId == null) { return mReservedMessageId; } else { return super.getMessageId(); } } }
apache-2.0
WhiteBearSolutions/WBSAirback
packages/wbsairback-tomcat/wbsairback-tomcat-7.0.22/test/org/apache/el/TestMethodExpressionImpl.java
15575
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.el; import javax.el.ELContext; import javax.el.ExpressionFactory; import javax.el.MethodExpression; import javax.el.ValueExpression; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import org.junit.Before; import org.junit.Test; import org.apache.jasper.el.ELContextImpl; public class TestMethodExpressionImpl { private ExpressionFactory factory; private ELContext context; @Before public void setUp() { factory = ExpressionFactory.newInstance(); context = new ELContextImpl(); TesterBeanA beanA = new TesterBeanA(); beanA.setName("A"); context.getVariableMapper().setVariable("beanA", factory.createValueExpression(beanA, TesterBeanA.class)); TesterBeanAA beanAA = new TesterBeanAA(); beanAA.setName("AA"); context.getVariableMapper().setVariable("beanAA", factory.createValueExpression(beanAA, TesterBeanAA.class)); TesterBeanAAA beanAAA = new TesterBeanAAA(); beanAAA.setName("AAA"); context.getVariableMapper().setVariable("beanAAA", factory.createValueExpression(beanAAA, TesterBeanAAA.class)); TesterBeanB beanB = new TesterBeanB(); beanB.setName("B"); context.getVariableMapper().setVariable("beanB", factory.createValueExpression(beanB, TesterBeanB.class)); TesterBeanBB beanBB = new TesterBeanBB(); beanBB.setName("BB"); context.getVariableMapper().setVariable("beanBB", factory.createValueExpression(beanBB, TesterBeanBB.class)); TesterBeanBBB beanBBB = new TesterBeanBBB(); beanBBB.setName("BBB"); context.getVariableMapper().setVariable("beanBBB", factory.createValueExpression(beanBBB, TesterBeanBBB.class)); TesterBeanC beanC = new TesterBeanC(); context.getVariableMapper().setVariable("beanC", factory.createValueExpression(beanC, TesterBeanC.class)); } @Test public void testIsParametersProvided() { TesterBeanB beanB = new TesterBeanB(); beanB.setName("Tomcat"); ValueExpression var = factory.createValueExpression(beanB, TesterBeanB.class); context.getVariableMapper().setVariable("beanB", var); MethodExpression me1 = factory.createMethodExpression( context, "${beanB.getName}", String.class, new Class<?>[] {}); MethodExpression me2 = factory.createMethodExpression( context, "${beanB.sayHello('JUnit')}", String.class, new Class<?>[] { String.class }); assertFalse(me1.isParmetersProvided()); assertTrue(me2.isParmetersProvided()); } @Test public void testInvoke() { TesterBeanB beanB = new TesterBeanB(); beanB.setName("B"); context.getVariableMapper().setVariable("beanB", factory.createValueExpression(beanB, TesterBeanB.class)); MethodExpression me1 = factory.createMethodExpression( context, "${beanB.getName}", String.class, new Class<?>[] {}); MethodExpression me2 = factory.createMethodExpression( context, "${beanB.sayHello('JUnit')}", String.class, new Class<?>[] { String.class }); MethodExpression me3 = factory.createMethodExpression( context, "${beanB.sayHello}", String.class, new Class<?>[] { String.class }); assertEquals("B", me1.invoke(context, null)); assertEquals("Hello JUnit from B", me2.invoke(context, null)); assertEquals("Hello JUnit from B", me2.invoke(context, new Object[] { "JUnit2" })); assertEquals("Hello JUnit2 from B", me3.invoke(context, new Object[] { "JUnit2" })); assertEquals("Hello JUnit from B", me2.invoke(context, new Object[] { null })); assertEquals("Hello null from B", me3.invoke(context, new Object[] { null })); } @Test public void testInvokeWithSuper() { MethodExpression me = factory.createMethodExpression(context, "${beanA.setBean(beanBB)}", null , new Class<?>[] { TesterBeanB.class }); me.invoke(context, null); ValueExpression ve = factory.createValueExpression(context, "${beanA.bean.name}", String.class); Object r = ve.getValue(context); assertEquals("BB", r); } @Test public void testInvokeWithSuperABNoReturnTypeNoParamTypes() { MethodExpression me2 = factory.createMethodExpression(context, "${beanC.sayHello(beanA,beanB)}", null , null); Object r2 = me2.invoke(context, null); assertEquals("AB: Hello A from B", r2.toString()); } @Test public void testInvokeWithSuperABReturnTypeNoParamTypes() { MethodExpression me3 = factory.createMethodExpression(context, "${beanC.sayHello(beanA,beanB)}", String.class , null); Object r3 = me3.invoke(context, null); assertEquals("AB: Hello A from B", r3.toString()); } @Test public void testInvokeWithSuperABNoReturnTypeParamTypes() { MethodExpression me4 = factory.createMethodExpression(context, "${beanC.sayHello(beanA,beanB)}", null , new Class<?>[] {TesterBeanA.class, TesterBeanB.class}); Object r4 = me4.invoke(context, null); assertEquals("AB: Hello A from B", r4.toString()); } @Test public void testInvokeWithSuperABReturnTypeParamTypes() { MethodExpression me5 = factory.createMethodExpression(context, "${beanC.sayHello(beanA,beanB)}", String.class , new Class<?>[] {TesterBeanA.class, TesterBeanB.class}); Object r5 = me5.invoke(context, null); assertEquals("AB: Hello A from B", r5.toString()); } @Test public void testInvokeWithSuperABB() { MethodExpression me6 = factory.createMethodExpression(context, "${beanC.sayHello(beanA,beanBB)}", null , null); Object r6 = me6.invoke(context, null); assertEquals("ABB: Hello A from BB", r6.toString()); } @Test public void testInvokeWithSuperABBB() { MethodExpression me7 = factory.createMethodExpression(context, "${beanC.sayHello(beanA,beanBBB)}", null , null); Object r7 = me7.invoke(context, null); assertEquals("ABB: Hello A from BBB", r7.toString()); } @Test public void testInvokeWithSuperAAB() { MethodExpression me8 = factory.createMethodExpression(context, "${beanC.sayHello(beanAA,beanB)}", null , null); Object r8 = me8.invoke(context, null); assertEquals("AAB: Hello AA from B", r8.toString()); } @Test public void testInvokeWithSuperAABB() { MethodExpression me9 = factory.createMethodExpression(context, "${beanC.sayHello(beanAA,beanBB)}", null , null); Exception e = null; try { me9.invoke(context, null); } catch (Exception e1) { e = e1; } // Expected to fail assertNotNull(e); } @Test public void testInvokeWithSuperAABBB() { // The Java compiler reports this as ambiguous. Using the parameter that // matches exactly seems reasonable to limit the scope of the method // search so the EL will find a match. MethodExpression me10 = factory.createMethodExpression(context, "${beanC.sayHello(beanAA,beanBBB)}", null , null); Object r10 = me10.invoke(context, null); assertEquals("AAB: Hello AA from BBB", r10.toString()); } @Test public void testInvokeWithSuperAAAB() { MethodExpression me11 = factory.createMethodExpression(context, "${beanC.sayHello(beanAAA,beanB)}", null , null); Object r11 = me11.invoke(context, null); assertEquals("AAB: Hello AAA from B", r11.toString()); } @Test public void testInvokeWithSuperAAABB() { // The Java compiler reports this as ambiguous. Using the parameter that // matches exactly seems reasonable to limit the scope of the method // search so the EL will find a match. MethodExpression me12 = factory.createMethodExpression(context, "${beanC.sayHello(beanAAA,beanBB)}", null , null); Object r12 = me12.invoke(context, null); assertEquals("ABB: Hello AAA from BB", r12.toString()); } @Test public void testInvokeWithSuperAAABBB() { MethodExpression me13 = factory.createMethodExpression(context, "${beanC.sayHello(beanAAA,beanBBB)}", null , null); Exception e = null; try { me13.invoke(context, null); } catch (Exception e1) { e = e1; } // Expected to fail assertNotNull(e); } @Test public void testInvokeWithVarArgsAB() throws Exception { MethodExpression me1 = factory.createMethodExpression(context, "${beanC.sayHello(beanA,beanB,beanB)}", null , null); Exception e = null; try { me1.invoke(context, null); } catch (Exception e1) { e = e1; } // Expected to fail assertNotNull(e); } @Test public void testInvokeWithVarArgsABB() throws Exception { MethodExpression me2 = factory.createMethodExpression(context, "${beanC.sayHello(beanA,beanBB,beanBB)}", null , null); Object r2 = me2.invoke(context, null); assertEquals("ABB[]: Hello A from BB, BB", r2.toString()); } @Test public void testInvokeWithVarArgsABBB() throws Exception { MethodExpression me3 = factory.createMethodExpression(context, "${beanC.sayHello(beanA,beanBBB,beanBBB)}", null , null); Object r3 = me3.invoke(context, null); assertEquals("ABB[]: Hello A from BBB, BBB", r3.toString()); } @Test public void testInvokeWithVarArgsAAB() throws Exception { MethodExpression me4 = factory.createMethodExpression(context, "${beanC.sayHello(beanAA,beanB,beanB)}", null , null); Exception e = null; try { me4.invoke(context, null); } catch (Exception e1) { e = e1; } // Expected to fail assertNotNull(e); } @Test public void testInvokeWithVarArgsAABB() throws Exception { MethodExpression me5 = factory.createMethodExpression(context, "${beanC.sayHello(beanAA,beanBB,beanBB)}", null , null); Object r5 = me5.invoke(context, null); assertEquals("ABB[]: Hello AA from BB, BB", r5.toString()); } @Test public void testInvokeWithVarArgsAABBB() throws Exception { MethodExpression me6 = factory.createMethodExpression(context, "${beanC.sayHello(beanAA,beanBBB,beanBBB)}", null , null); Object r6 = me6.invoke(context, null); assertEquals("ABB[]: Hello AA from BBB, BBB", r6.toString()); } @Test public void testInvokeWithVarArgsAAAB() throws Exception { MethodExpression me7 = factory.createMethodExpression(context, "${beanC.sayHello(beanAAA,beanB,beanB)}", null , null); Exception e = null; try { me7.invoke(context, null); } catch (Exception e1) { e = e1; } // Expected to fail assertNotNull(e); } @Test public void testInvokeWithVarArgsAAABB() throws Exception { MethodExpression me8 = factory.createMethodExpression(context, "${beanC.sayHello(beanAAA,beanBB,beanBB)}", null , null); Object r8 = me8.invoke(context, null); assertEquals("ABB[]: Hello AAA from BB, BB", r8.toString()); } @Test public void testInvokeWithVarArgsAAABBB() throws Exception { MethodExpression me9 = factory.createMethodExpression(context, "${beanC.sayHello(beanAAA,beanBBB,beanBBB)}", null , null); Object r9 = me9.invoke(context, null); assertEquals("ABB[]: Hello AAA from BBB, BBB", r9.toString()); } /* * This is also tested implicitly in numerous places elsewhere in this * class. */ @Test public void testBug49655() throws Exception { // This is the call the failed MethodExpression me = factory.createMethodExpression(context, "#{beanA.setName('New value')}", null, null); // The rest is to check it worked correctly me.invoke(context, null); ValueExpression ve = factory.createValueExpression(context, "#{beanA.name}", java.lang.String.class); assertEquals("New value", ve.getValue(context)); } @Test public void testBugPrimitives() throws Exception { MethodExpression me = factory.createMethodExpression(context, "${beanA.setValLong(5)}", null, null); me.invoke(context, null); ValueExpression ve = factory.createValueExpression(context, "#{beanA.valLong}", java.lang.String.class); assertEquals("5", ve.getValue(context)); } @Test public void testBug50449a() throws Exception { MethodExpression me1 = factory.createMethodExpression(context, "${beanB.sayHello()}", null, null); String actual = (String) me1.invoke(context, null); assertEquals("Hello from B", actual); } @Test public void testBug50449b() throws Exception { MethodExpression me1 = factory.createMethodExpression(context, "${beanB.sayHello('Tomcat')}", null, null); String actual = (String) me1.invoke(context, null); assertEquals("Hello Tomcat from B", actual); } @Test public void testBug50790a() throws Exception { ValueExpression ve = factory.createValueExpression(context, "#{beanAA.name.contains(beanA.name)}", java.lang.Boolean.class); Boolean actual = (Boolean) ve.getValue(context); assertEquals(Boolean.TRUE, actual); } @Test public void testBug50790b() throws Exception { ValueExpression ve = factory.createValueExpression(context, "#{beanA.name.contains(beanAA.name)}", java.lang.Boolean.class); Boolean actual = (Boolean) ve.getValue(context); assertEquals(Boolean.FALSE, actual); } }
apache-2.0
SolaceSamples/solace-samples-java
src/main/java/com/solace/samples/java/snippets/HowToConsumeDirectMessage.java
11744
/* * Copyright 2021 Solace Corporation. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.solace.samples.java.snippets; import com.solace.messaging.MessagingService; import com.solace.messaging.PubSubPlusClientException; import com.solace.messaging.receiver.DirectMessageReceiver; import com.solace.messaging.receiver.InboundMessage; import com.solace.messaging.receiver.MessageReceiver.InboundMessageSupplier; import com.solace.messaging.receiver.MessageReceiver.MessageHandler; import com.solace.messaging.resources.TopicSubscription; import com.solace.messaging.util.CompletionListener; import com.solace.messaging.util.Converter.BytesToObject; import com.solace.messaging.util.InteroperabilitySupport.RestInteroperabilitySupport; import java.io.Serializable; import java.util.Properties; import java.util.concurrent.CompletionStage; /** * Sampler for direct message consumption */ public class HowToConsumeDirectMessage { /** * Example how to start direct message receiver. This call is blocking. * * @param receiverToBeStarted receiver to be started */ public static void startDirectMessageReceiver(final DirectMessageReceiver receiverToBeStarted) { receiverToBeStarted.start(); } /** * Example how to start direct message receiver using callback listener and asynchronously get * notifications when start operation is complete * * @param receiverToBeStarted receiver to be started */ public static void startDirectMessageReceiverAsyncCallback( final DirectMessageReceiver receiverToBeStarted) { final CompletionListener<DirectMessageReceiver> receiverStartupListener = (directReceiver, throwable) -> { if (throwable == null) { // deal with an exception during start } else { //started successfully, i.e can receive messages } }; receiverToBeStarted.startAsync(receiverStartupListener); } /** * Example how to start direct message receiver using callback listener and asynchronously get * notifications when start operation is complete. * * @param receiverToBeStarted receiver to be started * @see <a href="https://community.oracle.com/docs/DOC-995305">CompletableFuture for Asynchronous * Programming in Java 8</a> */ public static void startDirectMessageReceiverAsyncCompletionStage( final DirectMessageReceiver receiverToBeStarted) { final CompletionStage<DirectMessageReceiver> receiverOnceStartCompleteStage = receiverToBeStarted .startAsync(); // use CompletionStage API for reactive pipeline implementation } /** * Example how to consume raw bytes direct message * * @param service connected instance of a messaging service, ready to be used */ public static void consumeDirectMessageBytePayload(MessagingService service) { final DirectMessageReceiver receiver = service .createDirectMessageReceiverBuilder() .withSubscriptions(TopicSubscription.of("setSubscriptionExpressionHere")) .build().start(); final byte[] messagePayload = receiver.receiveMessage().getPayloadAsBytes(); } /** * Example how to consume converted to utf 8 string direct message * * @param service connected instance of a messaging service, ready to be used */ public static void consumeDirectMessageStringPayload(MessagingService service) { final DirectMessageReceiver receiver = service .createDirectMessageReceiverBuilder() .withSubscriptions(TopicSubscription.of("setSubscriptionExpressionHere")) .build().start(); final String messagePayload = receiver.receiveMessage().getPayloadAsString(); } /** * Example how to consume direct message and extract HTTP/REST specific content from direct * message if available * * @param service connected instance of a messaging service, ready to be used */ public static void consumeDirectMessagePublishedFromRestClient(MessagingService service) { final DirectMessageReceiver receiver = service .createDirectMessageReceiverBuilder() .withSubscriptions(TopicSubscription.of("setSubscriptionExpressionHere")) .build().start(); final InboundMessage message = receiver.receiveMessage(); final RestInteroperabilitySupport restSpecificFields = message.getRestInteroperabilitySupport(); final String contentEncoding = restSpecificFields.getHTTPContentEncoding(); final String contentType = restSpecificFields.getHTTPContentType(); } /** * Example how to consume full direct message * * @param service connected instance of a messaging service, ready to be used */ public static void consumeDirectDetailedMessage(MessagingService service) { final DirectMessageReceiver receiver = service .createDirectMessageReceiverBuilder() .withSubscriptions(TopicSubscription.of("setSubscriptionExpressionHere")) .build().start(); // extensive details about message, payload, header, properties, // message delivery information are available using InboundMessage final InboundMessage message = receiver.receiveMessage(); // i.e message expiration time point (UTC) final long expiration = message.getExpiration(); // in assumption that MyData business object is expected in a message, // a simple converter is provided final BytesToObject<MyData> bytesToBusinessObjectConverter = (bytes) -> { return new MyData(new String(bytes)); }; final MyData myBusinessObjectFromMessage = message .getAndConvertPayload(bytesToBusinessObjectConverter, MyData.class); } /** * Example how to consume (blocking) full direct messages in a loop * * @param service connected instance of a messaging service, ready to be used */ public static void blockingConsumeDirectMessagesInLoop(MessagingService service) { final DirectMessageReceiver receiver = service .createDirectMessageReceiverBuilder() .withSubscriptions(TopicSubscription.of("setSubscriptionExpressionHere")) .build() .start(); int count = 0; //receive next 1000 messages while (count < 1000) { try { final InboundMessage message = receiver.receiveMessage(); // process a message count++; } catch (PubSubPlusClientException e) { // deal with an exception, mostly timeout exception } } } /** * Example how to consume (blocking with timeout) full direct messages in a loop * * @param service connected instance of a messaging service, ready to be used * @param receiveTimeout time out in milliseconds after that blocking receive exits, values &gt; 0 * are expected, use {@code receiveOrElse (..)} method when immediate * response is required */ public static void blockingConsumeDirectMessagesInLoop(MessagingService service, int receiveTimeout) { final DirectMessageReceiver receiver = service .createDirectMessageReceiverBuilder() .withSubscriptions(TopicSubscription.of("setSubscriptionExpressionHere")) .build() .start(); int count = 0; //receive next 1000 messages while (count < 1000) { try { final InboundMessage message = receiver.receiveMessage(receiveTimeout); if (message != null) // process a message //message can be null when timeout expired and new message was received { count++; } } catch (PubSubPlusClientException e) { // deal with an exception, mostly timeout exception } } } /** * Example how to consume (non-blocking) full direct messages in a loop * * @param service connected instance of a messaging service, ready to be used * @param receiveTimeout time out in milliseconds after that blocking receive exits, values &gt; 0 * are expected, use {@code receiveOrElse (..)} method when immediate * response is required */ public static void nonBockingConsumeDirectMessagesInLoop(MessagingService service, int receiveTimeout) { final DirectMessageReceiver receiver = service .createDirectMessageReceiverBuilder() .withSubscriptions(TopicSubscription.of("setSubscriptionExpressionHere")) .build() .start(); int count = 0; // more message supplier are available final InboundMessageSupplier nullSupplier = InboundMessageSupplier.nullMessageSupplier(); //receive next 1000 messages while (count < 1000) { try { InboundMessage message = receiver.receiveOrElse(nullSupplier); if (message != null) // process a message //message can be null since Null supplier is used, // when no message is available to receive, given InboundMessageSupplier is used to generate one // in particular case null is generated { count++; } } catch (PubSubPlusClientException e) { // deal with an exception, mostly timeout exception } } } /** * Example how to consume full direct messages asynchronous using callback * * @param service connected instance of a messaging service, ready to be used */ public static void consumeDirectMessageAsync(MessagingService service) { final DirectMessageReceiver receiver = service .createDirectMessageReceiverBuilder() .withSubscriptions(TopicSubscription.of("setSubscriptionExpressionHere")) .build().start(); final MessageHandler messageHandler = (message) -> { // do something with a message, i.e access raw payload: byte[] bytes = message.getPayloadAsBytes(); }; receiver.receiveAsync(messageHandler); } /** * Example how to configure {@link DirectMessageReceiver} using {@link Properties}. See {@link * com.solace.messaging.DirectMessageReceiverBuilder#fromProperties(Properties)} and {@link * com.solace.messaging.MessageReceiverBuilder#fromProperties(Properties)} for the list available * properties. * * @param service connected instance of a messaging service, ready to be used * @param receiverConfiguration full configuration and/or fine tuning advanced configuration * properties for {@link DirectMessageReceiver}. * @return started direct message receiver ready to receive messages */ public static DirectMessageReceiver configureConsumerFromProperties(MessagingService service, Properties receiverConfiguration) { // Note: property based configuration can be extended/overridden using api calls final DirectMessageReceiver receiver = service .createDirectMessageReceiverBuilder().fromProperties(receiverConfiguration) .build().start(); return receiver; } /** * basic example for a business object to be send in a message */ static class MyData implements Serializable { private static final long serialVersionUID = 1L; private final String name; MyData(String name) { this.name = name; } public String getName() { return name; } } }
apache-2.0
AssafMashiah/assafmashiah-yaml
src/test/java/org/yaml/snakeyaml/util/UriEncoderTest.java
1781
/** * Copyright (c) 2008-2011, http://www.snakeyaml.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.yaml.snakeyaml.util; import java.nio.ByteBuffer; import java.nio.charset.CharacterCodingException; import junit.framework.TestCase; public class UriEncoderTest extends TestCase { public void testEncode() { assertEquals("Acad%C3%A9mico", UriEncoder.encode("Académico")); assertEquals("Check http://yaml.org/spec/1.1/#escaping%20in%20URI/", "[]", UriEncoder.encode("[]")); } public void testDecode() throws CharacterCodingException { ByteBuffer buff = ByteBuffer.allocate(10); buff.put((byte) 0x34); buff.put((byte) 0x35); buff.flip(); assertEquals("45", UriEncoder.decode(buff)); } public void testFailDecode() throws CharacterCodingException { ByteBuffer buff = ByteBuffer.allocate(10); buff.put((byte) 0x34); buff.put((byte) 0xC1); buff.flip(); try { UriEncoder.decode(buff); fail("Invalid UTF-8 must not be accepted."); } catch (Exception e) { assertEquals("Input length = 1", e.getMessage()); } } }
apache-2.0
delebash/orientdb-parent
enterprise/src/main/java/com/orientechnologies/orient/enterprise/channel/binary/OAsynchChannelServiceThread.java
2656
/* * Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orientechnologies.orient.enterprise.channel.binary; import java.io.IOException; import com.orientechnologies.common.thread.OSoftThread; import com.orientechnologies.orient.core.Orient; import com.orientechnologies.orient.core.record.ORecordInternal; /** * Service thread that catches internal messages sent by the server * * @author Luca Garulli (l.garulli--at--orientechnologies.com) */ public class OAsynchChannelServiceThread extends OSoftThread { private OChannelBinaryAsynchClient network; private int sessionId; private ORemoteServerEventListener remoteServerEventListener; public OAsynchChannelServiceThread(final ORemoteServerEventListener iRemoteServerEventListener, final OChannelBinaryAsynchClient iChannel) { super(Orient.instance().getThreadGroup(), "OrientDB <- Asynch Client (" + iChannel.socket.getRemoteSocketAddress() + ")"); sessionId = Integer.MIN_VALUE; remoteServerEventListener = iRemoteServerEventListener; network = iChannel; start(); } @Override protected void execute() throws Exception { try { network.beginResponse(sessionId, 0); final byte request = network.readByte(); Object obj = null; switch (request) { case OChannelBinaryProtocol.REQUEST_PUSH_RECORD: obj = (ORecordInternal<?>) OChannelBinaryProtocol.readIdentifiable(network); break; case OChannelBinaryProtocol.REQUEST_PUSH_DISTRIB_CONFIG: obj = network.readBytes(); break; } if (remoteServerEventListener != null) remoteServerEventListener.onRequest(request, obj); } catch (IOException ioe) { // EXCEPTION RECEIVED (THE SOCKET HAS BEEN CLOSED?) ASSURE TO UNLOCK THE READ AND EXIT THIS THREAD sendShutdown(); if (network != null) { final OChannelBinaryAsynchClient n = network; network = null; n.close(); } } finally { if (network != null) network.endResponse(); } } }
apache-2.0
CS122B-CWP/cs122b
project2/src/main/java/project2/jdbc/bean/OrderSingleBean.java
1033
package project2.jdbc.bean; import org.json.JSONObject; public class OrderSingleBean { private int qty; private double unit_price; private int movie_id; private String title; public int getQty() { return qty; } public void setQty(int qty) { this.qty = qty; } public double getUnit_price() { return unit_price; } public void setUnit_price(double unit_price) { this.unit_price = unit_price; } public int getMovie_id() { return movie_id; } public void setMovie_id(int movie_id) { this.movie_id = movie_id; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public JSONObject toJson() { JSONObject jsonStr = new JSONObject(); jsonStr.put("qty", this.qty); jsonStr.put("unit_price", this.unit_price); jsonStr.put("movie_id", this.movie_id); jsonStr.put("title", this.title); return jsonStr; } @Override public String toString() { return toJson().toString(); } }
apache-2.0
don-philipe/graphhopper
core/src/main/java/com/graphhopper/util/gpx/GpxFromInstructions.java
10232
/* * Licensed to GraphHopper GmbH under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * GraphHopper GmbH licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.graphhopper.util.gpx; import com.graphhopper.util.*; import com.graphhopper.util.shapes.GHPoint3D; import java.text.DateFormat; import java.text.DecimalFormat; import java.text.DecimalFormatSymbols; import java.util.ArrayList; import java.util.List; import java.util.Locale; // todo: the code here does not really belong into core, but we moved it here for now so its available from // map-matching resource (it cannot be in the api module, because it uses AngleCalc). Probably we should separate the // actual gpx conversion (which belongs to the web module) from the angle calculations. Or at least move this code back // into web-bundle once MapMatchingResource is in core. Or we need another module for code that is used in different // modules like web, but does not really fit into core either. public class GpxFromInstructions { private static final AngleCalc AC = AngleCalc.ANGLE_CALC; static String simpleXMLEscape(String str) { // We could even use the 'more flexible' CDATA section but for now do the following. The 'and' could be important sometimes: return str.replaceAll("&", "&amp;"). // but do not care for: replaceAll("[\\<\\>]", "_"); } public static List<GPXEntry> createGPXList(InstructionList instructions) { List<GPXEntry> gpxList = new ArrayList<>(); long timeOffset = 0; for (Instruction instruction : instructions) { int i = 0; for (GHPoint3D point : instruction.getPoints()) { GPXEntry gpxEntry; if (i == 0) { gpxEntry = new GPXEntry(point, timeOffset); } else { // We don't have timestamps for pillar nodes gpxEntry = new GPXEntry(point); } gpxList.add(gpxEntry); i++; } timeOffset = timeOffset + instruction.getTime(); } return gpxList; } private static void createWayPointBlock(StringBuilder output, Instruction instruction, DecimalFormat decimalFormat, Translation tr) { output.append("\n<wpt "); output.append("lat=\"").append(decimalFormat.format(instruction.getPoints().getLatitude(0))); output.append("\" lon=\"").append(decimalFormat.format(instruction.getPoints().getLongitude(0))).append("\">"); String name; if (instruction.getName().isEmpty()) name = instruction.getTurnDescription(tr); else name = instruction.getName(); output.append(" <name>").append(simpleXMLEscape(name)).append("</name>"); output.append("</wpt>"); } public static String createGPX(InstructionList instructions, String trackName, long startTimeMillis, boolean includeElevation, boolean withRoute, boolean withTrack, boolean withWayPoints, String version, Translation tr) { DateFormat formatter = Helper.createFormatter(); DecimalFormat decimalFormat = new DecimalFormat("#", DecimalFormatSymbols.getInstance(Locale.ROOT)); decimalFormat.setMinimumFractionDigits(1); decimalFormat.setMaximumFractionDigits(6); decimalFormat.setMinimumIntegerDigits(1); String header = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\" ?>" + "<gpx xmlns=\"http://www.topografix.com/GPX/1/1\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + " creator=\"Graphhopper version " + version + "\" version=\"1.1\"" // This xmlns:gh acts only as ID, no valid URL necessary. // Use a separate namespace for custom extensions to make basecamp happy. + " xmlns:gh=\"https://graphhopper.com/public/schema/gpx/1.1\">" + "\n<metadata>" + "<copyright author=\"OpenStreetMap contributors\"/>" + "<link href=\"http://graphhopper.com\">" + "<text>GraphHopper GPX</text>" + "</link>" + "<time>" + formatter.format(startTimeMillis) + "</time>" + "</metadata>"; StringBuilder gpxOutput = new StringBuilder(header); if (!instructions.isEmpty()) { if (withWayPoints) { createWayPointBlock(gpxOutput, instructions.get(0), decimalFormat, tr); // Start for (Instruction currInstr : instructions) { if ((currInstr.getSign() == Instruction.REACHED_VIA) // Via || (currInstr.getSign() == Instruction.FINISH)) // End { createWayPointBlock(gpxOutput, currInstr, decimalFormat, tr); } } } if (withRoute) { gpxOutput.append("\n<rte>"); Instruction nextInstr = null; for (Instruction currInstr : instructions) { if (null != nextInstr) createRteptBlock(gpxOutput, nextInstr, currInstr, decimalFormat, tr); nextInstr = currInstr; } createRteptBlock(gpxOutput, nextInstr, null, decimalFormat, tr); gpxOutput.append("\n</rte>"); } } if (withTrack) { gpxOutput.append("\n<trk><name>").append(trackName).append("</name>"); gpxOutput.append("<trkseg>"); for (GPXEntry entry : createGPXList(instructions)) { gpxOutput.append("\n<trkpt lat=\"").append(decimalFormat.format(entry.getPoint().getLat())); gpxOutput.append("\" lon=\"").append(decimalFormat.format(entry.getPoint().getLon())).append("\">"); if (includeElevation) gpxOutput.append("<ele>").append(Helper.round2(((GHPoint3D) entry.getPoint()).getEle())).append("</ele>"); if (entry.getTime() != null) gpxOutput.append("<time>").append(formatter.format(startTimeMillis + entry.getTime())).append("</time>"); gpxOutput.append("</trkpt>"); } gpxOutput.append("\n</trkseg>"); gpxOutput.append("\n</trk>"); } // we could now use 'wpt' for via points gpxOutput.append("\n</gpx>"); return gpxOutput.toString(); } private static void createRteptBlock(StringBuilder output, Instruction instruction, Instruction nextI, DecimalFormat decimalFormat, Translation tr) { output.append("\n<rtept lat=\"").append(decimalFormat.format(instruction.getPoints().getLatitude(0))). append("\" lon=\"").append(decimalFormat.format(instruction.getPoints().getLongitude(0))).append("\">"); if (!instruction.getName().isEmpty()) output.append("<desc>").append(simpleXMLEscape(instruction.getTurnDescription(tr))).append("</desc>"); output.append("<extensions>"); output.append("<gh:distance>").append(Helper.round(instruction.getDistance(), 1)).append("</gh:distance>"); output.append("<gh:time>").append(instruction.getTime()).append("</gh:time>"); String direction = calcDirection(instruction, nextI); if (!direction.isEmpty()) output.append("<gh:direction>").append(direction).append("</gh:direction>"); double azimuth = calcAzimuth(instruction, nextI); if (!Double.isNaN(azimuth)) output.append("<gh:azimuth>").append(Helper.round2(azimuth)).append("</gh:azimuth>"); if (instruction instanceof RoundaboutInstruction) { RoundaboutInstruction ri = (RoundaboutInstruction) instruction; output.append("<gh:exit_number>").append(ri.getExitNumber()).append("</gh:exit_number>"); } output.append("<gh:sign>").append(instruction.getSign()).append("</gh:sign>"); output.append("</extensions>"); output.append("</rtept>"); } /** * Return the direction like 'NE' based on the first tracksegment of the instruction. If * Instruction does not contain enough coordinate points, an empty string will be returned. */ public static String calcDirection(Instruction instruction, Instruction nextI) { double azimuth = calcAzimuth(instruction, nextI); if (Double.isNaN(azimuth)) return ""; return AC.azimuth2compassPoint(azimuth); } /** * Return the azimuth in degree based on the first tracksegment of this instruction. If this * instruction contains less than 2 points then NaN will be returned or the specified * instruction will be used if that is the finish instruction. */ public static double calcAzimuth(Instruction instruction, Instruction nextI) { double nextLat; double nextLon; if (instruction.getPoints().getSize() >= 2) { nextLat = instruction.getPoints().getLatitude(1); nextLon = instruction.getPoints().getLongitude(1); } else if (nextI != null && instruction.getPoints().getSize() == 1) { nextLat = nextI.getPoints().getLatitude(0); nextLon = nextI.getPoints().getLongitude(0); } else { return Double.NaN; } double lat = instruction.getPoints().getLatitude(0); double lon = instruction.getPoints().getLongitude(0); return AC.calcAzimuth(lat, lon, nextLat, nextLon); } }
apache-2.0
lpff/coolweather
src/com/coolweather/app/receiver/AutoUpdateReceiver.java
419
package com.coolweather.app.receiver; import com.coolweather.app.service.AutoUpdateService; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; public class AutoUpdateReceiver extends BroadcastReceiver{ @Override public void onReceive(Context context,Intent intent){ Intent i = new Intent(context,AutoUpdateService.class); context.startService(i); } }
apache-2.0
sabob/ratel
ratel/src/com/google/ratel/deps/jackson/databind/jsonschema/JsonSerializableSchema.java
2431
package com.google.ratel.deps.jackson.databind.jsonschema; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Retention; import java.lang.annotation.ElementType; import java.lang.annotation.Target; import com.google.ratel.deps.jackson.annotation.JacksonAnnotation; /** * Annotation that can be used to define JSON Schema definition for * the annotated class. *<p> * Note that annotation is often not needed: for example, regular * Jackson beans that Jackson can introspect can be used without * annotations, to produce JSON schema definition. * * @author Ryan Heaton * @author Tatu Saloranta */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) @JacksonAnnotation public @interface JsonSerializableSchema { /** * Marker value used to indicate that property has "no value"; * needed because annotations can not have null as default * value. */ public final static String NO_VALUE = "##irrelevant"; /** * Property that can be used to indicate id of the type when * generating JSON Schema; empty String indicates that no id * is defined. */ public String id() default ""; /** * The schema type for this JsonSerializable instance. * Possible values: "string", "number", "boolean", "object", "array", "null", "any" * * @return The schema type for this JsonSerializable instance. */ public String schemaType() default "any"; /** * If the schema type is "object", JSON definition of properties of the object as * a String. * * @return The node representing the schema properties, or "##irrelevant" if irrelevant. * * @deprecated (since 2.1) -- support will be dropped in future, since JSON-as-String is * fundamentally bad way for customizing anything. No direct replacements offered. */ @Deprecated public String schemaObjectPropertiesDefinition() default NO_VALUE; /** * If the schema type if "array", JSON definition of the schema for item types contained. * * @return The schema for the items in the array, or "##irrelevant" if irrelevant. * * @deprecated (since 2.1) -- support will be dropped in future, since JSON-as-String is * fundamentally bad way for customizing anything. No direct replacements offered. */ @Deprecated public String schemaItemDefinition() default NO_VALUE; }
apache-2.0
im-sure/LeetCode
src/_0382LinkedListRandomNode.java
550
import java.util.Random; public class _0382LinkedListRandomNode { ListNode head; Random r; public _0382LinkedListRandomNode(ListNode head) { this.head = head; r = new Random(); } public int getRandom() { ListNode thisN = head; ListNode result = null; for (int n = 1; thisN != null; n++) { if (r.nextInt(n) == 0) result = thisN; thisN = thisN.next; } return result.val; } public class ListNode { int val; ListNode next; ListNode(int x) { val = x; } } }
apache-2.0
christian-posta/wildfly-swarm
fractions/javaee/webservices/src/main/java/org/wildfly/swarm/webservices/runtime/WSDLHostCustomizer.java
693
package org.wildfly.swarm.webservices.runtime; import javax.enterprise.context.ApplicationScoped; import javax.inject.Inject; import org.wildfly.swarm.container.Interface; import org.wildfly.swarm.spi.api.Customizer; import org.wildfly.swarm.spi.runtime.annotations.Post; import org.wildfly.swarm.webservices.WebServicesFraction; /** * @author Bob McWhirter */ @Post @ApplicationScoped public class WSDLHostCustomizer implements Customizer { @Inject Interface iface; @Inject WebServicesFraction fraction; @Override public void customize() { if (fraction.wsdlHost() == null) { fraction.wsdlHost(this.iface.getExpression()); } } }
apache-2.0
thilinamb/mqtt-client-example
src/main/java/com/thilinamb/mqtt/client/sub/SubscriberCallback.java
882
package com.thilinamb.mqtt.client.sub; import org.eclipse.paho.client.mqttv3.IMqttDeliveryToken; import org.eclipse.paho.client.mqttv3.MqttCallback; import org.eclipse.paho.client.mqttv3.MqttMessage; import java.util.logging.Logger; /** * Subscriber callback * Author: Thilina * Date: 7/19/14 */ public class SubscriberCallback implements MqttCallback { private static Logger logger = Logger.getLogger(SubscriberCallback.class.getName()); @Override public void connectionLost(Throwable throwable) { logger.warning("Connection Lost!"); } @Override public void messageArrived(String s, MqttMessage mqttMessage) throws Exception { logger.info("Message Arrived. Topic: " + s + ", Message: " + new String(mqttMessage.getPayload())); } @Override public void deliveryComplete(IMqttDeliveryToken iMqttDeliveryToken) { } }
apache-2.0
McLeodMoores/starling
projects/analytics/src/main/java/com/opengamma/analytics/math/matrix/DoubleMatrix1D.java
3045
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.math.matrix; import java.util.Arrays; import org.apache.commons.lang.Validate; /** * A minimal implementation of a vector (in the mathematical sense) that contains doubles. */ public class DoubleMatrix1D implements Matrix<Double> { private final double[] _data; private final int _elements; /** Empty vector. */ public static final DoubleMatrix1D EMPTY_MATRIX = new DoubleMatrix1D(new double[0]); /** * @param data * The data, not null */ public DoubleMatrix1D(final Double[] data) { Validate.notNull(data); _elements = data.length; _data = new double[_elements]; for (int i = 0; i < _elements; i++) { _data[i] = data[i]; } } /** * @param data * The data, not null */ public DoubleMatrix1D(final double... data) { Validate.notNull(data); _elements = data.length; _data = Arrays.copyOf(data, _elements); } /** * Create an vector of length n with all entries equal to value. * * @param n * number of elements * @param value * value of elements */ public DoubleMatrix1D(final int n, final double value) { _elements = n; _data = new double[_elements]; Arrays.fill(_data, value); } /** * Returns the underlying vector data. If this is changed so is the vector. * * @see #toArray to get a copy of data * @return An array containing the vector elements */ public double[] getData() { return _data; } /** * Convert the vector to a double array. As its elements are copied, the array is independent from the vector data. * * @return An array containing a copy of vector elements */ public double[] toArray() { return Arrays.copyOf(_data, _elements); } /** * {@inheritDoc} */ @Override public int getNumberOfElements() { return _elements; } /** * {@inheritDoc} This method expects one index - any subsequent indices will be ignored. */ @Override public Double getEntry(final int... index) { return _data[index[0]]; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + Arrays.hashCode(_data); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final DoubleMatrix1D other = (DoubleMatrix1D) obj; if (!Arrays.equals(_data, other._data)) { return false; } return true; } @Override public String toString() { final StringBuffer sb = new StringBuffer(); final int n = _data.length; sb.append(" ("); for (int i = 0; i < n - 1; i++) { sb.append(_data[i] + ", "); } sb.append(_data[n - 1] + ") "); return sb.toString(); } }
apache-2.0
shufudong/bboss
bboss-persistent/src/com/frameworkset/common/poolman/sql/PrimaryKeyCache.java
5515
/***************************************************************************** * * * This file is part of the tna framework distribution. * * Documentation and updates may be get from biaoping.yin the author of * * this framework * * * * Sun Public License Notice: * * * * The contents of this file are subject to the Sun Public License Version * * 1.0 (the "License"); you may not use this file except in compliance with * * the License. A copy of the License is available at http://www.sun.com * * * * The Original Code is tag. The Initial Developer of the Original * * Code is biaoping yin. Portions created by biaoping yin are Copyright * * (C) 2000. All Rights Reserved. * * * * GNU Public License Notice: * * * * Alternatively, the contents of this file may be used under the terms of * * the GNU Lesser General Public License (the "LGPL"), in which case the * * provisions of LGPL are applicable instead of those above. If you wish to * * allow use of your version of this file only under the terms of the LGPL * * and not to allow others to use your version of this file under the SPL, * * indicate your decision by deleting the provisions above and replace * * them with the notice and other provisions required by the LGPL. If you * * do not delete the provisions above, a recipient may use your version of * * this file under either the SPL or the LGPL. * * * * biaoping.yin (yin-bp@163.com) * * * *****************************************************************************/ package com.frameworkset.common.poolman.sql; import java.sql.Connection; import java.util.HashMap; import java.util.Map; import org.apache.log4j.Logger; import com.frameworkset.common.poolman.management.BaseTableManager; /** * 缓冲数据库的主键信息 * * @author biaoping.yin created on 2005-3-29 version 1.0 */ public class PrimaryKeyCache { private static Logger log = Logger.getLogger(PrimaryKeyCache.class); // 数据库链接池名称 private String dbname; // private static PrimaryKeyCache primaryKeyCache; private Map id_tables; /** * 没有在tableinfo中存放主键的信息的表的主键信息用NULL_来替换 */ private static final PrimaryKey NULL_ = new PrimaryKey(); public PrimaryKeyCache(String dbname) { this.dbname = dbname; id_tables = new java.util.concurrent.ConcurrentHashMap(new HashMap()); } // public static PrimaryKeyCache getInstance() // { // if(primaryKeyCache == null) // primaryKeyCache = new PrimaryKeyCache(); // return primaryKeyCache; // } public void addIDTable(PrimaryKey primaryKey) { if (!id_tables.containsKey(primaryKey.getTableName())) id_tables.put(primaryKey.getTableName(), primaryKey); } public PrimaryKey getIDTable(String tableName) { return getIDTable(null,tableName); } public PrimaryKey getIDTable(Connection con,String tableName) { PrimaryKey key = (PrimaryKey) id_tables.get(tableName.toLowerCase()); if (key != null) { if(key == NULL_) return null; return key; } else { key = loaderPrimaryKey(con,tableName); return key; } } /** * @return Returns the dbname. */ public String getDbname() { return dbname; } /** * 动态增加表的主键信息到系统缓冲中 * @param tableName * @return */ public PrimaryKey loaderPrimaryKey(String tableName) { return loaderPrimaryKey(null,tableName); } /** * 动态增加表的主键信息到系统缓冲中 * @param tableName * @return */ public PrimaryKey loaderPrimaryKey(Connection con,String tableName) { try { log.debug("开始装载表【" + tableName +"】的主键信息到缓冲。"); // PrimaryKey key = this.getIDTable(tableName); // if(key != null) // { // System.out.println("表【" + tableName +"】的主键信息已经存在,无需装载!"); // return key; // } PrimaryKey key = BaseTableManager.getPoolTableInfo(dbname,con, tableName); if (key != null) { id_tables.put(key.getTableName().trim().toLowerCase(), key); log.debug("完成装载表【" + tableName +"】的主键信息。"); } else { id_tables.put(tableName.trim().toLowerCase(),NULL_); log.debug("完成装载表【" + tableName +"】的主键信息,NULL_,"); } return key; } catch (Exception ex) { // ex.printStackTrace(); log.error(ex.getMessage(),ex); } return null; } public void destroy() { if(id_tables != null) { id_tables.clear(); id_tables = null; } } public void reset() { if(id_tables != null) { id_tables.clear(); // id_tables = null; } } }
apache-2.0
mynewyear/java_pft
mantis-tests/src/test/java/ru/stqa/pft/mantis/appmanager/HttpSession.java
2406
package ru.stqa.pft.mantis.appmanager; import org.apache.http.HttpEntity; import org.apache.http.NameValuePair; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.client.LaxRedirectStrategy; import org.apache.http.message.BasicNameValuePair; import org.apache.http.util.EntityUtils; import java.io.IOException; import java.util.ArrayList; import java.util.List; public class HttpSession { private CloseableHttpClient httpclient; private ApplicationManager app; public HttpSession(ApplicationManager app) { this.app = app; httpclient = HttpClients.custom().setRedirectStrategy(new LaxRedirectStrategy()).build(); } public boolean login(String username, String password) throws IOException { HttpPost post = new HttpPost(app.getProperty("web.baseUrl") + "/login.php"); List<NameValuePair> params = new ArrayList<NameValuePair>(); params.add(new BasicNameValuePair("username", username)); params.add(new BasicNameValuePair("password", password)); params.add(new BasicNameValuePair("secure_session", "on")); params.add(new BasicNameValuePair("return", "index.php")); post.setEntity(new UrlEncodedFormEntity(params)); CloseableHttpResponse response = httpclient.execute(post); String body = geTextFrom(response); return body.contains(String.format("<span id=\"logged-in-user\">%s</span>", username)); } private String geTextFrom(CloseableHttpResponse response) throws IOException { try { return EntityUtils.toString(response.getEntity()); } finally { response.close(); } } public boolean isLoggedInAs(String username) throws IOException { HttpGet get = new HttpGet(app.getProperty("web.baseUrl") + "/index.php"); CloseableHttpResponse response = httpclient.execute(get); String body = geTextFrom(response); // return body.contains(String.format("<span class=\"italic\">%s</span>", username)); return body.contains(String.format("<span id=\"logged-in-user\">%s</span>", username)); } }
apache-2.0
mpi2/exportlibrary
exportlibrary.xmlvalidationresourcescollection/src/test/java/org/mousephenotype/dcc/exportlibrary/xmlvalidationresourcescollection/impress/utils/InstantiatorTest.java
3784
/** * Copyright (C) 2013 Julian Atienza Herrero <j.atienza at har.mrc.ac.uk> * * MEDICAL RESEARCH COUNCIL UK MRC * * Harwell Mammalian Genetics Unit * * http://www.har.mrc.ac.uk * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.mousephenotype.dcc.exportlibrary.xmlvalidationresourcescollection.impress.utils; import java.lang.reflect.InvocationTargetException; import java.math.BigInteger; import java.util.HashMap; import junit.framework.Assert; import org.junit.Test; import org.mousephenotype.dcc.exportlibrary.xmlvalidationdatastructure.external.impress.ImpressPipeline; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author julian */ public class InstantiatorTest { protected static final Logger logger = LoggerFactory.getLogger(InstantiatorTest.class); /* * <xs:attribute name="is_deprecated" type="xs:boolean" use="required"/> * <xs:attribute name="pipeline_name" type="xs:string" use="required"/> * <xs:attribute name="minor_version" type="xs:integer" use="required"/> * <xs:attribute name="pipeline_id" type="xs:integer" use="required"/> * <xs:attribute name="description" type="xs:string" use="required"/> * <xs:attribute name="major_version" type="xs:integer" use="required"/> * <xs:attribute name="pipeline_key" type="xs:string" use="required"/> */ public static HashMap<String, String> getImpressPipelineMap() { HashMap<String, String> map = new HashMap<String, String>(); map.put("is_deprecated", "false"); map.put("pipeline_name", "pipeline_name"); map.put("minor_version", "1"); map.put("pipeline_id", "456"); //map.put("description", "description"); map.put("major_version", "2"); map.put("pipeline_key", "pipeline_key"); return map; } @Test public void testImpressPipeline() { ImpressPipeline impressPipeline = new ImpressPipeline(); HashMap<String, String> map =InstantiatorTest.getImpressPipelineMap(); try { Instantiator.getInstance(ImpressPipeline.class, impressPipeline, map); } catch (NoSuchFieldException ex) { logger.error("", ex); Assert.fail(); } catch (IllegalArgumentException ex) { logger.error("", ex); Assert.fail(); } catch (IllegalAccessException ex) { logger.error("", ex); Assert.fail(); } catch (NoSuchMethodException ex) { logger.error("", ex); Assert.fail(); } catch (InvocationTargetException ex) { logger.error("", ex); Assert.fail(); } ImpressPipeline impressPipeline2 = new ImpressPipeline(); impressPipeline2.setIsDeprecated(false); impressPipeline2.setPipelineName("pipeline_name"); impressPipeline2.setMinorVersion(BigInteger.valueOf(1L)); impressPipeline2.setPipelineId(BigInteger.valueOf(456L)); //impressPipeline2.setDescription("description"); impressPipeline2.setMajorVersion(BigInteger.valueOf(2L)); impressPipeline2.setPipelineKey("pipeline_key"); Assert.assertEquals(impressPipeline2,impressPipeline); } }
apache-2.0
thilinamb/debs14-grand-challenge
src/main/java/edu/colostate/cs/storm/util/OutlierTracker.java
810
package edu.colostate.cs.storm.util; import java.util.HashSet; import java.util.Set; /** * Author: Thilina * Date: 12/6/14 */ public class OutlierTracker { private Set<String> completeSet = new HashSet<String>(); private Set<String> outlierSet = new HashSet<String>(); public void addMember(String key){ completeSet.add(key); } public void addOutlier(String key){ outlierSet.add(key); } public void removeOutlier(String key){ outlierSet.remove(key); } public boolean isOutlier(String key){ return outlierSet.contains(key); } public boolean isMember(String key){ return completeSet.contains(key); } public double getCurrentPercentage(){ return (outlierSet.size() * 1.0)/(completeSet.size()); } }
apache-2.0
CesarPantoja/jena
jena-arq/src/main/java/org/apache/jena/riot/system/StreamRDFLib.java
7950
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.riot.system; import java.io.OutputStream ; import java.io.Writer ; import org.apache.jena.atlas.io.AWriter ; import org.apache.jena.atlas.io.IO ; import org.apache.jena.atlas.lib.CharSpace ; import org.apache.jena.atlas.lib.Sink ; import org.apache.jena.graph.Graph ; import org.apache.jena.graph.Node ; import org.apache.jena.graph.Triple ; import org.apache.jena.riot.lang.StreamRDFCounting ; import org.apache.jena.riot.writer.WriterStreamRDFPlain ; import org.apache.jena.shared.JenaException ; import org.apache.jena.shared.PrefixMapping ; import org.apache.jena.sparql.core.DatasetGraph ; import org.apache.jena.sparql.core.Quad ; /** Various Common StreamRDF setups */ public class StreamRDFLib { /** Send everything to nowhere ... efficiently */ public static StreamRDF sinkNull() { return new StreamRDFBase() ; } public static StreamRDF writer(OutputStream out) { return new WriterStreamRDFPlain(IO.wrapUTF8(out)) ; } public static StreamRDF writer(AWriter out) { return new WriterStreamRDFPlain(out) ; } public static StreamRDF writer(Writer out) { return new WriterStreamRDFPlain(IO.wrap(out)) ; } public static StreamRDF writer(OutputStream out, CharSpace charSpace) { switch (charSpace) { case ASCII: return new WriterStreamRDFPlain(IO.wrapASCII(out), charSpace); case UTF8: default: return writer(out); } } public static StreamRDF writer(AWriter out, CharSpace charSpace) { return new WriterStreamRDFPlain(out, charSpace); } public static StreamRDF writer(Writer out, CharSpace charSpace) { return new WriterStreamRDFPlain(IO.wrap(out), charSpace); } public static StreamRDF graph(Graph graph) { return new ParserOutputGraph(graph) ; } public static StreamRDF dataset(DatasetGraph dataset) { return new ParserOutputDataset(dataset) ; } /** * Output to a sink; prefix and base handled only within the parser. * Unfortunately, Java needs different names for the triples and * quads versions because of type erasure. */ public static StreamRDF sinkTriples(Sink<Triple> sink) { return new ParserOutputSinkTriples(sink) ; } /** * Output to a sink; prefix and base handled only within the parser. * Unfortunately, Java needs different names for the triples and * quads versions because of type erasure. */ public static StreamRDF sinkQuads(Sink<Quad> sink) { return new ParserOutputSinkQuads(sink) ; } /** Convert any triples seen to a quads, adding a graph node of {@link Quad#tripleInQuad} */ public static StreamRDF extendTriplesToQuads(StreamRDF base) { return extendTriplesToQuads(Quad.tripleInQuad, base) ; } /** Convert any triples seen to a quads, adding the specified graph node */ public static StreamRDF extendTriplesToQuads(Node graphNode, StreamRDF base) { return new ParserOutputSinkTriplesToQuads(graphNode, base) ; } public static StreamRDFCounting count() { return new StreamRDFCountingBase(sinkNull()) ; } public static StreamRDFCounting count(StreamRDF other) { return new StreamRDFCountingBase(other) ; } private static class ParserOutputSinkTriplesToQuads extends StreamRDFWrapper { private final Node gn ; ParserOutputSinkTriplesToQuads(Node gn, StreamRDF base) { super(base) ; this.gn = gn ; } @Override public void triple(Triple triple) { other.quad(new Quad(gn, triple)) ; } } private static class ParserOutputSinkTriples extends StreamRDFBase { private final Sink<Triple> sink ; public ParserOutputSinkTriples(Sink<Triple> sink) { this.sink = sink ; } @Override public void triple(Triple triple) { sink.send(triple) ; } @Override public void finish() { sink.flush() ; } } private static class ParserOutputSinkQuads extends StreamRDFBase { private final Sink<Quad> sink ; public ParserOutputSinkQuads(Sink<Quad> sink) { this.sink = sink ; } @Override public void quad(Quad quad) { sink.send(quad) ; } @Override public void finish() { sink.flush() ; } } private static class ParserOutputGraph extends StreamRDFBase { protected final Graph graph ; protected boolean warningIssued = false ; public ParserOutputGraph(Graph graph) { this.graph = graph ; } @Override public void triple(Triple triple) { graph.add(triple) ; } @Override public void quad(Quad quad) { if ( quad.isTriple() || quad.isDefaultGraph() ) graph.add(quad.asTriple()) ; else { if ( ! warningIssued ) { //SysRIOT.getLogger().warn("Only triples or default graph data expected : named graph data ignored") ; // Not ideal - assumes the global default. ErrorHandlerFactory.getDefaultErrorHandler().warning("Only triples or default graph data expected : named graph data ignored", -1, -1) ; } warningIssued = true ; } //throw new IllegalStateException("Quad passed to graph parsing") ; } @Override public void base(String base) { } @Override public void prefix(String prefix, String uri) { try { // Jena applies XML rules to prerfixes. graph.getPrefixMapping().setNsPrefix(prefix, uri) ; } catch (JenaException ex) {} } } private static class ParserOutputDataset extends StreamRDFBase { protected final DatasetGraph dsg ; protected final PrefixMapping prefixMapping ; public ParserOutputDataset(DatasetGraph dsg) { this.dsg = dsg ; this.prefixMapping = dsg.getDefaultGraph().getPrefixMapping() ; // = dsg.getPrefixMapping().setNsPrefix(prefix, uri) ; } @Override public void triple(Triple triple) { dsg.add(Quad.defaultGraphNodeGenerated, triple.getSubject(), triple.getPredicate(), triple.getObject()) ; //throw new IllegalStateException("Triple passed to dataset parsing") ; } @Override public void quad(Quad quad) { if ( quad.isTriple() ) dsg.add(Quad.defaultGraphNodeGenerated, quad.getSubject(), quad.getPredicate(), quad.getObject()) ; else dsg.add(quad) ; } @Override public void base(String base) { } @Override public void prefix(String prefix, String uri) { try { // Jena applies XML rules to prerfixes. prefixMapping.setNsPrefix(prefix, uri) ; } catch (JenaException ex) {} } } }
apache-2.0
mike-tr-adamson/java-driver
driver-core/src/test/java/com/datastax/driver/core/PreparedStatementTest.java
19674
/* * Copyright (C) 2012-2015 DataStax Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datastax.driver.core; import com.datastax.driver.core.exceptions.InvalidQueryException; import com.datastax.driver.core.exceptions.UnsupportedFeatureException; import com.datastax.driver.core.utils.CassandraVersion; import com.google.common.collect.ImmutableList; import org.testng.annotations.Test; import java.net.InetAddress; import java.util.*; import static com.datastax.driver.core.TestUtils.*; import static org.assertj.core.api.Assertions.assertThat; import static org.testng.Assert.*; /** * Prepared statement tests. * <p/> * Note: this class also happens to test all the get methods from Row. */ public class PreparedStatementTest extends CCMBridge.PerClassSingleNodeCluster { private static final String ALL_NATIVE_TABLE = "all_native"; private static final String ALL_LIST_TABLE = "all_list"; private static final String ALL_SET_TABLE = "all_set"; private static final String ALL_MAP_TABLE = "all_map"; private static final String SIMPLE_TABLE = "test"; private static final String SIMPLE_TABLE2 = "test2"; private boolean exclude(DataType t) { return t.getName() == DataType.Name.COUNTER; } @Override protected Collection<String> getTableDefinitions() { List<String> defs = new ArrayList<String>(4); StringBuilder sb = new StringBuilder(); sb.append("CREATE TABLE ").append(ALL_NATIVE_TABLE).append(" (k text PRIMARY KEY"); for (DataType type : DataType.allPrimitiveTypes()) { if (exclude(type)) continue; sb.append(", c_").append(type).append(' ').append(type); } sb.append(')'); defs.add(sb.toString()); sb = new StringBuilder(); sb.append("CREATE TABLE ").append(ALL_LIST_TABLE).append(" (k text PRIMARY KEY"); for (DataType type : DataType.allPrimitiveTypes()) { if (exclude(type)) continue; sb.append(", c_list_").append(type).append(" list<").append(type).append('>'); } sb.append(')'); defs.add(sb.toString()); sb = new StringBuilder(); sb.append("CREATE TABLE ").append(ALL_SET_TABLE).append(" (k text PRIMARY KEY"); for (DataType type : DataType.allPrimitiveTypes()) { // This must be handled separately if (exclude(type)) continue; sb.append(", c_set_").append(type).append(" set<").append(type).append('>'); } sb.append(')'); defs.add(sb.toString()); sb = new StringBuilder(); sb.append("CREATE TABLE ").append(ALL_MAP_TABLE).append(" (k text PRIMARY KEY"); for (DataType keyType : DataType.allPrimitiveTypes()) { // This must be handled separately if (exclude(keyType)) continue; for (DataType valueType : DataType.allPrimitiveTypes()) { // This must be handled separately if (exclude(valueType)) continue; sb.append(", c_map_").append(keyType).append('_').append(valueType).append(" map<").append(keyType).append(',').append(valueType).append('>'); } } sb.append(')'); defs.add(sb.toString()); defs.add(String.format("CREATE TABLE %s (k text PRIMARY KEY, i int)", SIMPLE_TABLE)); defs.add(String.format("CREATE TABLE %s (k text PRIMARY KEY, v text)", SIMPLE_TABLE2)); return defs; } @Override protected Cluster.Builder configure(Cluster.Builder builder) { return builder.withQueryOptions(TestUtils.nonDebouncingQueryOptions()); } @Test(groups = "short") public void preparedNativeTest() { // Test preparing/bounding for all native types for (DataType type : DataType.allPrimitiveTypes()) { // This must be handled separately if (exclude(type)) continue; String name = "c_" + type; PreparedStatement ps = session.prepare(String.format("INSERT INTO %s(k, %s) VALUES ('prepared_native', ?)", ALL_NATIVE_TABLE, name)); BoundStatement bs = ps.bind(); session.execute(setBoundValue(bs, name, type, getFixedValue(type))); Row row = session.execute(String.format("SELECT %s FROM %s WHERE k='prepared_native'", name, ALL_NATIVE_TABLE)).one(); assertEquals(getValue(row, name, type), getFixedValue(type), "For type " + type); } } /** * Almost the same as preparedNativeTest, but it uses getFixedValue2() instead. */ @Test(groups = "short") public void preparedNativeTest2() { // Test preparing/bounding for all native types for (DataType type : DataType.allPrimitiveTypes()) { // This must be handled separately if (exclude(type)) continue; String name = "c_" + type; PreparedStatement ps = session.prepare(String.format("INSERT INTO %s(k, %s) VALUES ('prepared_native', ?)", ALL_NATIVE_TABLE, name)); BoundStatement bs = ps.bind(); session.execute(setBoundValue(bs, name, type, getFixedValue2(type))); Row row = session.execute(String.format("SELECT %s FROM %s WHERE k='prepared_native'", name, ALL_NATIVE_TABLE)).one(); assertEquals(getValue(row, name, type), getFixedValue2(type), "For type " + type); } } @Test(groups = "short") @SuppressWarnings("unchecked") public void prepareListTest() { // Test preparing/bounding for all possible list types for (DataType rawType : DataType.allPrimitiveTypes()) { // This must be handled separately if (exclude(rawType)) continue; String name = "c_list_" + rawType; DataType type = DataType.list(rawType); List<Object> value = (List<Object>) getFixedValue(type); ; PreparedStatement ps = session.prepare(String.format("INSERT INTO %s(k, %s) VALUES ('prepared_list', ?)", ALL_LIST_TABLE, name)); BoundStatement bs = ps.bind(); session.execute(setBoundValue(bs, name, type, value)); Row row = session.execute(String.format("SELECT %s FROM %s WHERE k='prepared_list'", name, ALL_LIST_TABLE)).one(); assertEquals(getValue(row, name, type), value, "For type " + type); } } /** * Almost the same as prepareListTest, but it uses getFixedValue2() instead. */ @Test(groups = "short") @SuppressWarnings("unchecked") public void prepareListTest2() { // Test preparing/bounding for all possible list types for (DataType rawType : DataType.allPrimitiveTypes()) { // This must be handled separately if (exclude(rawType)) continue; String name = "c_list_" + rawType; DataType type = DataType.list(rawType); List<Object> value = (List<Object>) getFixedValue2(type); ; PreparedStatement ps = session.prepare(String.format("INSERT INTO %s(k, %s) VALUES ('prepared_list', ?)", ALL_LIST_TABLE, name)); BoundStatement bs = ps.bind(); session.execute(setBoundValue(bs, name, type, value)); Row row = session.execute(String.format("SELECT %s FROM %s WHERE k='prepared_list'", name, ALL_LIST_TABLE)).one(); assertEquals(getValue(row, name, type), value, "For type " + type); } } @Test(groups = "short") @SuppressWarnings("unchecked") public void prepareSetTest() { // Test preparing/bounding for all possible set types for (DataType rawType : DataType.allPrimitiveTypes()) { // This must be handled separately if (exclude(rawType)) continue; String name = "c_set_" + rawType; DataType type = DataType.set(rawType); Set<Object> value = (Set<Object>) getFixedValue(type); ; PreparedStatement ps = session.prepare(String.format("INSERT INTO %s(k, %s) VALUES ('prepared_set', ?)", ALL_SET_TABLE, name)); BoundStatement bs = ps.bind(); session.execute(setBoundValue(bs, name, type, value)); Row row = session.execute(String.format("SELECT %s FROM %s WHERE k='prepared_set'", name, ALL_SET_TABLE)).one(); assertEquals(getValue(row, name, type), value, "For type " + type); } } /** * Almost the same as prepareSetTest, but it uses getFixedValue2() instead. */ @Test(groups = "short") @SuppressWarnings("unchecked") public void prepareSetTest2() { // Test preparing/bounding for all possible set types for (DataType rawType : DataType.allPrimitiveTypes()) { // This must be handled separately if (exclude(rawType)) continue; String name = "c_set_" + rawType; DataType type = DataType.set(rawType); Set<Object> value = (Set<Object>) getFixedValue2(type); ; PreparedStatement ps = session.prepare(String.format("INSERT INTO %s(k, %s) VALUES ('prepared_set', ?)", ALL_SET_TABLE, name)); BoundStatement bs = ps.bind(); session.execute(setBoundValue(bs, name, type, value)); Row row = session.execute(String.format("SELECT %s FROM %s WHERE k='prepared_set'", name, ALL_SET_TABLE)).one(); assertEquals(getValue(row, name, type), value, "For type " + type); } } @Test(groups = "short") @SuppressWarnings("unchecked") public void prepareMapTest() { // Test preparing/bounding for all possible map types for (DataType rawKeyType : DataType.allPrimitiveTypes()) { // This must be handled separately if (exclude(rawKeyType)) continue; for (DataType rawValueType : DataType.allPrimitiveTypes()) { // This must be handled separately if (exclude(rawValueType)) continue; String name = "c_map_" + rawKeyType + '_' + rawValueType; DataType type = DataType.map(rawKeyType, rawValueType); Map<Object, Object> value = (Map<Object, Object>) getFixedValue(type); ; PreparedStatement ps = session.prepare(String.format("INSERT INTO %s(k, %s) VALUES ('prepared_map', ?)", ALL_MAP_TABLE, name)); BoundStatement bs = ps.bind(); session.execute(setBoundValue(bs, name, type, value)); Row row = session.execute(String.format("SELECT %s FROM %s WHERE k='prepared_map'", name, ALL_MAP_TABLE)).one(); assertEquals(getValue(row, name, type), value, "For type " + type); } } } /** * Almost the same as prepareMapTest, but it uses getFixedValue2() instead. */ @Test(groups = "short") @SuppressWarnings("unchecked") public void prepareMapTest2() { // Test preparing/bounding for all possible map types for (DataType rawKeyType : DataType.allPrimitiveTypes()) { // This must be handled separately if (exclude(rawKeyType)) continue; for (DataType rawValueType : DataType.allPrimitiveTypes()) { // This must be handled separately if (exclude(rawValueType)) continue; String name = "c_map_" + rawKeyType + '_' + rawValueType; DataType type = DataType.map(rawKeyType, rawValueType); Map<Object, Object> value = (Map<Object, Object>) getFixedValue2(type); ; PreparedStatement ps = session.prepare(String.format("INSERT INTO %s(k, %s) VALUES ('prepared_map', ?)", ALL_MAP_TABLE, name)); BoundStatement bs = ps.bind(); session.execute(setBoundValue(bs, name, type, value)); Row row = session.execute(String.format("SELECT %s FROM %s WHERE k='prepared_map'", name, ALL_MAP_TABLE)).one(); assertEquals(getValue(row, name, type), value, "For type " + type); } } } @Test(groups = "short") public void prepareWithNullValuesTest() throws Exception { PreparedStatement ps = session.prepare("INSERT INTO " + SIMPLE_TABLE2 + "(k, v) VALUES (?, ?)"); session.execute(ps.bind("prepWithNull1", null)); BoundStatement bs = ps.bind(); bs.setString("k", "prepWithNull2"); bs.setString("v", null); session.execute(bs); ResultSet rs = session.execute("SELECT * FROM " + SIMPLE_TABLE2 + " WHERE k IN ('prepWithNull1', 'prepWithNull2')"); Row r1 = rs.one(); Row r2 = rs.one(); assertTrue(rs.isExhausted()); assertEquals(r1.getString("k"), "prepWithNull1"); assertEquals(r1.getString("v"), null); assertEquals(r2.getString("k"), "prepWithNull2"); assertEquals(r2.getString("v"), null); } @Test(groups = "short") public void prepareStatementInheritPropertiesTest() { RegularStatement toPrepare = new SimpleStatement("SELECT * FROM test WHERE k=?"); toPrepare.setConsistencyLevel(ConsistencyLevel.QUORUM); toPrepare.enableTracing(); PreparedStatement prepared = session.prepare(toPrepare); BoundStatement bs = prepared.bind("someValue"); assertEquals(ConsistencyLevel.QUORUM, bs.getConsistencyLevel()); assertTrue(bs.isTracing()); } /** * Prints the table definitions that will be used in testing * (for exporting purposes) */ @Test(groups = {"docs"}) public void printTableDefinitions() { for (String definition : getTableDefinitions()) { System.out.println(definition); } } @Test(groups = "short") public void batchTest() throws Exception { try { PreparedStatement ps1 = session.prepare("INSERT INTO " + SIMPLE_TABLE2 + "(k, v) VALUES (?, ?)"); PreparedStatement ps2 = session.prepare("INSERT INTO " + SIMPLE_TABLE2 + "(k, v) VALUES (?, 'bar')"); BatchStatement bs = new BatchStatement(); bs.add(ps1.bind("one", "foo")); bs.add(ps2.bind("two")); bs.add(new SimpleStatement("INSERT INTO " + SIMPLE_TABLE2 + " (k, v) VALUES ('three', 'foobar')")); session.execute(bs); List<Row> all = session.execute("SELECT * FROM " + SIMPLE_TABLE2).all(); assertEquals("three", all.get(0).getString("k")); assertEquals("foobar", all.get(0).getString("v")); assertEquals("one", all.get(1).getString("k")); assertEquals("foo", all.get(1).getString("v")); assertEquals("two", all.get(2).getString("k")); assertEquals("bar", all.get(2).getString("v")); } catch (UnsupportedFeatureException e) { // This is expected when testing the protocol v1 if (cluster.getConfiguration().getProtocolOptions().getProtocolVersionEnum() != ProtocolVersion.V1) throw e; } } @Test(groups = "short", expectedExceptions = {IllegalStateException.class}) public void unboundVariableInBoundStatementTest() { PreparedStatement ps = session.prepare("INSERT INTO " + SIMPLE_TABLE + " (k, i) VALUES (?, ?)"); BoundStatement bs = ps.bind("k"); assertFalse(bs.isSet("i")); session.execute(bs); } @Test(groups = "short", expectedExceptions = {IllegalStateException.class}) @CassandraVersion(major = 2.0) public void unboundVariableInBatchStatementTest() { PreparedStatement ps = session.prepare("INSERT INTO " + SIMPLE_TABLE + " (k, i) VALUES (?, ?)"); BatchStatement batch = new BatchStatement(); batch.add(ps.bind("k")); session.execute(batch); } @Test(groups = "short") public void should_set_routing_key_on_case_insensitive_keyspace_and_table() { session.execute(String.format("CREATE TABLE %s.foo (i int PRIMARY KEY)", keyspace)); PreparedStatement ps = session.prepare(String.format("INSERT INTO %s.foo (i) VALUES (?)", keyspace)); BoundStatement bs = ps.bind(1); assertThat(bs.getRoutingKey()).isNotNull(); } @Test(groups = "short") public void should_set_routing_key_on_case_sensitive_keyspace_and_table() { session.execute("CREATE KEYSPACE \"Test\" WITH replication = { " + " 'class': 'SimpleStrategy'," + " 'replication_factor': '1'" + "}"); session.execute("CREATE TABLE \"Test\".\"Foo\" (i int PRIMARY KEY)"); PreparedStatement ps = session.prepare("INSERT INTO \"Test\".\"Foo\" (i) VALUES (?)"); BoundStatement bs = ps.bind(1); assertThat(bs.getRoutingKey()).isNotNull(); } @Test(groups = "short", expectedExceptions = InvalidQueryException.class) public void should_fail_when_prepared_on_another_cluster() throws Exception { Cluster otherCluster = Cluster.builder() .addContactPointsWithPorts(ImmutableList.of(hostAddress)) .build(); try { PreparedStatement pst = otherCluster.connect().prepare("select * from system.peers where inet = ?"); BoundStatement bs = pst.bind().setInet(0, InetAddress.getByName("localhost")); // We expect that the error gets detected without a roundtrip to the server, so use executeAsync session.executeAsync(bs); } finally { otherCluster.close(); } } @Test(groups = "short") public void should_propagate_idempotence_in_statements() { session.execute(String.format("CREATE TABLE %s.idempotencetest (i int PRIMARY KEY)", keyspace)); SimpleStatement statement; IdempotenceAwarePreparedStatement prepared; BoundStatement bound; statement = new SimpleStatement(String.format("SELECT * FROM %s.idempotencetest WHERE i = ?", keyspace)); prepared = (IdempotenceAwarePreparedStatement) session.prepare(statement); bound = prepared.bind(1); assertThat(prepared.isIdempotent()).isNull(); assertThat(bound.isIdempotent()).isNull(); statement.setIdempotent(true); prepared = (IdempotenceAwarePreparedStatement) session.prepare(statement); bound = prepared.bind(1); assertThat(prepared.isIdempotent()).isTrue(); assertThat(bound.isIdempotent()).isTrue(); statement.setIdempotent(false); prepared = (IdempotenceAwarePreparedStatement) session.prepare(statement); bound = prepared.bind(1); assertThat(prepared.isIdempotent()).isFalse(); assertThat(bound.isIdempotent()).isFalse(); prepared.setIdempotent(true); bound = prepared.bind(1); assertThat(bound.isIdempotent()).isTrue(); } }
apache-2.0
alibaba/nacos
api/src/main/java/com/alibaba/nacos/api/remote/response/ResponseCode.java
1875
/* * Copyright 1999-2020 Alibaba Group Holding Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.nacos.api.remote.response; /** * ResponseCode. * * @author liuzunfei * @version $Id: ResponseCode.java, v 0.1 2020年07月14日 2:04 PM liuzunfei Exp $ */ public enum ResponseCode { /** * Request success. */ SUCCESS(200, "Response ok"), /** * Request failed. */ FAIL(500, "Response fail"); int code; String desc; ResponseCode(int code, String desc) { this.code = code; this.desc = desc; } /** * Getter method for property <tt>code</tt>. * * @return property value of code */ public int getCode() { return code; } /** * Setter method for property <tt>code</tt>. * * @param code value to be assigned to property code */ public void setCode(int code) { this.code = code; } /** * Getter method for property <tt>desc</tt>. * * @return property value of desc */ public String getDesc() { return desc; } /** * Setter method for property <tt>desc</tt>. * * @param desc value to be assigned to property desc */ public void setDesc(String desc) { this.desc = desc; } }
apache-2.0
JoshEliYang/PriceTag
src/main/java/cn/springmvc/service/ShopService.java
65
package cn.springmvc.service; public interface ShopService { }
apache-2.0
is-apps/NotificationPortlet
notification-portlet-webapp/src/main/java/org/jasig/portlet/notice/service/jpa/action/CompleteOnRedirectAction.java
2861
/** * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.portlet.notice.service.jpa.action; import java.io.IOException; import java.util.Date; import java.util.Map; import javax.portlet.ActionRequest; import javax.portlet.ActionResponse; import org.jasig.portlet.notice.NotificationAction; import org.jasig.portlet.notice.NotificationEntry; import org.jasig.portlet.notice.NotificationState; import org.jasig.portlet.notice.service.CacheNotificationService; import org.jasig.portlet.notice.service.jpa.JpaNotificationService; import org.jasig.portlet.notice.util.SpringContext; /** * * @author mglazier */ public class CompleteOnRedirectAction extends NotificationAction { private static final long serialVersionUID = 1L; public CompleteOnRedirectAction() { // Provide a sensible (default) label; most // use cases will use the setter and override setLabel("COMPLETE"); } public CompleteOnRedirectAction(String label) { setLabel(label); } /** * When invoke is called, a configured notification state is set for the entry if it has not already been set. * {@link JpaNotificationService} and {@link CacheNotificationService} are used here to add the entry state and clear * the cache for the user. This class is not managed by Spring, so these objects must be obtained using the * Spring context that {@SpringContext} provides. * * @param req * @param res * @throws IOException */ @Override public void invoke(final ActionRequest req, final ActionResponse res) throws IOException { JpaNotificationService jpaService = (JpaNotificationService) SpringContext.getApplicationContext().getBean("jpaNotificationService"); final NotificationEntry entry = getTarget(); Map<NotificationState, Date> stateMap = entry.getStates(); if (stateMap != null && !stateMap.containsKey(NotificationState.COMPLETED)) { jpaService.addEntryState(req, entry.getId(), NotificationState.COMPLETED); } res.sendRedirect(entry.getUrl()); } }
apache-2.0
qafedev/qafe-platform
qafe-business/src/test/java/test/com/qualogy/qafe/business/integration/adapter/DummyPersonMoreComplexObject.java
1065
/** * Copyright 2008-2017 Qualogy Solutions B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package test.com.qualogy.qafe.business.integration.adapter; import java.util.HashMap; import java.util.Map; public class DummyPersonMoreComplexObject extends DummyPerson { Map<String,Object> members = new HashMap<String,Object>(); public DummyPersonMoreComplexObject(){ } public DummyPersonMoreComplexObject(String name, String lastName){ super(name,lastName); } public void add(String key,Object value){ members.put(key,value); } }
apache-2.0
TatsianaKasiankova/pentaho-kettle
engine/src/main/java/org/pentaho/di/job/entries/deletefolders/JobEntryDeleteFolders.java
15930
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2017 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.job.entries.deletefolders; import org.pentaho.di.job.entry.validator.AbstractFileValidator; import org.pentaho.di.job.entry.validator.AndValidator; import org.pentaho.di.job.entry.validator.JobEntryValidatorUtils; import java.io.IOException; import java.util.List; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSelectInfo; import org.apache.commons.vfs2.FileSelector; import org.apache.commons.vfs2.FileType; import org.pentaho.di.cluster.SlaveServer; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.Result; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.JobMeta; import org.pentaho.di.job.entry.JobEntryBase; import org.pentaho.di.job.entry.JobEntryInterface; import org.pentaho.di.job.entry.validator.ValidatorContext; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.resource.ResourceEntry; import org.pentaho.di.resource.ResourceEntry.ResourceType; import org.pentaho.di.resource.ResourceReference; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; /** * This defines a 'delete folders' job entry. * * @author Samatar Hassan * @since 13-05-2008 */ public class JobEntryDeleteFolders extends JobEntryBase implements Cloneable, JobEntryInterface { private static Class<?> PKG = JobEntryDeleteFolders.class; // for i18n purposes, needed by Translator2!! public boolean argFromPrevious; public String[] arguments; private String success_condition; public String SUCCESS_IF_AT_LEAST_X_FOLDERS_DELETED = "success_when_at_least"; public String SUCCESS_IF_ERRORS_LESS = "success_if_errors_less"; public String SUCCESS_IF_NO_ERRORS = "success_if_no_errors"; private String limit_folders; int NrErrors = 0; int NrSuccess = 0; boolean successConditionBroken = false; boolean successConditionBrokenExit = false; int limitFolders = 0; public JobEntryDeleteFolders( String n ) { super( n, "" ); argFromPrevious = false; arguments = null; success_condition = SUCCESS_IF_NO_ERRORS; limit_folders = "10"; } public JobEntryDeleteFolders() { this( "" ); } public void allocate( int nrFields ) { arguments = new String[nrFields]; } public Object clone() { JobEntryDeleteFolders je = (JobEntryDeleteFolders) super.clone(); if ( arguments != null ) { int nrFields = arguments.length; je.allocate( nrFields ); System.arraycopy( arguments, 0, je.arguments, 0, nrFields ); } return je; } public String getXML() { StringBuilder retval = new StringBuilder( 300 ); retval.append( super.getXML() ); retval.append( " " ).append( XMLHandler.addTagValue( "arg_from_previous", argFromPrevious ) ); retval.append( " " ).append( XMLHandler.addTagValue( "success_condition", success_condition ) ); retval.append( " " ).append( XMLHandler.addTagValue( "limit_folders", limit_folders ) ); retval.append( " <fields>" ).append( Const.CR ); if ( arguments != null ) { for ( int i = 0; i < arguments.length; i++ ) { retval.append( " <field>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "name", arguments[i] ) ); retval.append( " </field>" ).append( Const.CR ); if ( parentJobMeta != null ) { parentJobMeta.getNamedClusterEmbedManager().registerUrl( arguments[i] ); } } } retval.append( " </fields>" ).append( Const.CR ); return retval.toString(); } public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); argFromPrevious = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "arg_from_previous" ) ); success_condition = XMLHandler.getTagValue( entrynode, "success_condition" ); limit_folders = XMLHandler.getTagValue( entrynode, "limit_folders" ); Node fields = XMLHandler.getSubNode( entrynode, "fields" ); // How many field arguments? int nrFields = XMLHandler.countNodes( fields, "field" ); allocate( nrFields ); // Read them all... for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); arguments[i] = XMLHandler.getTagValue( fnode, "name" ); } } catch ( KettleXMLException xe ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JobEntryDeleteFolders.UnableToLoadFromXml" ), xe ); } } public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { argFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" ); limit_folders = rep.getJobEntryAttributeString( id_jobentry, "limit_folders" ); success_condition = rep.getJobEntryAttributeString( id_jobentry, "success_condition" ); // How many arguments? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "name" ); allocate( argnr ); // Read them all... for ( int a = 0; a < argnr; a++ ) { arguments[a] = rep.getJobEntryAttributeString( id_jobentry, a, "name" ); } } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryDeleteFolders.UnableToLoadFromRepo", String .valueOf( id_jobentry ) ), dbe ); } } public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { rep.saveJobEntryAttribute( id_job, getObjectId(), "arg_from_previous", argFromPrevious ); rep.saveJobEntryAttribute( id_job, getObjectId(), "limit_folders", limit_folders ); rep.saveJobEntryAttribute( id_job, getObjectId(), "success_condition", success_condition ); // save the arguments... if ( arguments != null ) { for ( int i = 0; i < arguments.length; i++ ) { rep.saveJobEntryAttribute( id_job, getObjectId(), i, "name", arguments[i] ); } } } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryDeleteFolders.UnableToSaveToRepo", String .valueOf( id_job ) ), dbe ); } } public Result execute( Result result, int nr ) throws KettleException { List<RowMetaAndData> rows = result.getRows(); result.setNrErrors( 1 ); result.setResult( false ); NrErrors = 0; NrSuccess = 0; successConditionBroken = false; successConditionBrokenExit = false; limitFolders = Const.toInt( environmentSubstitute( getLimitFolders() ), 10 ); //Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS if ( parentJobMeta.getNamedClusterEmbedManager() != null ) { parentJobMeta.getNamedClusterEmbedManager() .passEmbeddedMetastoreKey( this, parentJobMeta.getEmbeddedMetastoreProviderKey() ); } if ( argFromPrevious ) { if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobEntryDeleteFolders.FoundPreviousRows", String .valueOf( ( rows != null ? rows.size() : 0 ) ) ) ); } } if ( argFromPrevious && rows != null ) { for ( int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++ ) { if ( successConditionBroken ) { logError( BaseMessages.getString( PKG, "JobEntryDeleteFolders.Error.SuccessConditionbroken", "" + NrErrors ) ); result.setNrErrors( NrErrors ); result.setNrLinesDeleted( NrSuccess ); return result; } RowMetaAndData resultRow = rows.get( iteration ); String args_previous = resultRow.getString( 0, null ); if ( !Utils.isEmpty( args_previous ) ) { if ( deleteFolder( args_previous ) ) { updateSuccess(); } else { updateErrors(); } } else { // empty filename ! logError( BaseMessages.getString( PKG, "JobEntryDeleteFolders.Error.EmptyLine" ) ); } } } else if ( arguments != null ) { for ( int i = 0; i < arguments.length && !parentJob.isStopped(); i++ ) { if ( successConditionBroken ) { logError( BaseMessages.getString( PKG, "JobEntryDeleteFolders.Error.SuccessConditionbroken", "" + NrErrors ) ); result.setNrErrors( NrErrors ); result.setNrLinesDeleted( NrSuccess ); return result; } String realfilename = environmentSubstitute( arguments[i] ); if ( !Utils.isEmpty( realfilename ) ) { if ( deleteFolder( realfilename ) ) { updateSuccess(); } else { updateErrors(); } } else { // empty filename ! logError( BaseMessages.getString( PKG, "JobEntryDeleteFolders.Error.EmptyLine" ) ); } } } if ( log.isDetailed() ) { logDetailed( "=======================================" ); logDetailed( BaseMessages.getString( PKG, "JobEntryDeleteFolders.Log.Info.NrError", "" + NrErrors ) ); logDetailed( BaseMessages.getString( PKG, "JobEntryDeleteFolders.Log.Info.NrDeletedFolders", "" + NrSuccess ) ); logDetailed( "=======================================" ); } result.setNrErrors( NrErrors ); result.setNrLinesDeleted( NrSuccess ); if ( getSuccessStatus() ) { result.setResult( true ); } return result; } private void updateErrors() { NrErrors++; if ( checkIfSuccessConditionBroken() ) { // Success condition was broken successConditionBroken = true; } } private boolean checkIfSuccessConditionBroken() { boolean retval = false; if ( ( NrErrors > 0 && getSuccessCondition().equals( SUCCESS_IF_NO_ERRORS ) ) || ( NrErrors >= limitFolders && getSuccessCondition().equals( SUCCESS_IF_ERRORS_LESS ) ) ) { retval = true; } return retval; } private void updateSuccess() { NrSuccess++; } private boolean getSuccessStatus() { boolean retval = false; if ( ( NrErrors == 0 && getSuccessCondition().equals( SUCCESS_IF_NO_ERRORS ) ) || ( NrSuccess >= limitFolders && getSuccessCondition().equals( SUCCESS_IF_AT_LEAST_X_FOLDERS_DELETED ) ) || ( NrErrors <= limitFolders && getSuccessCondition().equals( SUCCESS_IF_ERRORS_LESS ) ) ) { retval = true; } return retval; } private boolean deleteFolder( String foldername ) { boolean rcode = false; FileObject filefolder = null; try { filefolder = KettleVFS.getFileObject( foldername, this ); if ( filefolder.exists() ) { // the file or folder exists if ( filefolder.getType() == FileType.FOLDER ) { // It's a folder if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobEntryDeleteFolders.ProcessingFolder", foldername ) ); } // Delete Files int Nr = filefolder.delete( new TextFileSelector() ); if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobEntryDeleteFolders.TotalDeleted", foldername, String .valueOf( Nr ) ) ); } rcode = true; } else { // Error...This file is not a folder! logError( BaseMessages.getString( PKG, "JobEntryDeleteFolders.Error.NotFolder" ) ); } } else { // File already deleted, no reason to try to delete it if ( log.isBasic() ) { logBasic( BaseMessages.getString( PKG, "JobEntryDeleteFolders.FolderAlreadyDeleted", foldername ) ); } rcode = true; } } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "JobEntryDeleteFolders.CouldNotDelete", foldername, e.getMessage() ), e ); } finally { if ( filefolder != null ) { try { filefolder.close(); } catch ( IOException ex ) { // Ignore } } } return rcode; } private class TextFileSelector implements FileSelector { public boolean includeFile( FileSelectInfo info ) { return true; } public boolean traverseDescendents( FileSelectInfo info ) { return true; } } public void setPrevious( boolean argFromPrevious ) { this.argFromPrevious = argFromPrevious; } public boolean evaluates() { return true; } public void check( List<CheckResultInterface> remarks, JobMeta jobMeta, VariableSpace space, Repository repository, IMetaStore metaStore ) { boolean res = JobEntryValidatorUtils.andValidator().validate( this, "arguments", remarks, AndValidator.putValidators( JobEntryValidatorUtils.notNullValidator() ) ); if ( !res ) { return; } ValidatorContext ctx = new ValidatorContext(); AbstractFileValidator.putVariableSpace( ctx, getVariables() ); AndValidator.putValidators( ctx, JobEntryValidatorUtils.notNullValidator(), JobEntryValidatorUtils.fileExistsValidator() ); for ( int i = 0; i < arguments.length; i++ ) { JobEntryValidatorUtils.andValidator().validate( this, "arguments[" + i + "]", remarks, ctx ); } } public List<ResourceReference> getResourceDependencies( JobMeta jobMeta ) { List<ResourceReference> references = super.getResourceDependencies( jobMeta ); if ( arguments != null ) { ResourceReference reference = null; for ( int i = 0; i < arguments.length; i++ ) { String filename = jobMeta.environmentSubstitute( arguments[i] ); if ( reference == null ) { reference = new ResourceReference( this ); references.add( reference ); } reference.getEntries().add( new ResourceEntry( filename, ResourceType.FILE ) ); } } return references; } public boolean isArgFromPrevious() { return argFromPrevious; } public String[] getArguments() { return arguments; } public void setSuccessCondition( String success_condition ) { this.success_condition = success_condition; } public String getSuccessCondition() { return success_condition; } public void setLimitFolders( String limit_folders ) { this.limit_folders = limit_folders; } public String getLimitFolders() { return limit_folders; } }
apache-2.0
reportportal/service-api
src/main/java/com/epam/ta/reportportal/ws/controller/FileStorageController.java
5705
/* * Copyright 2019 EPAM Systems * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.epam.ta.reportportal.ws.controller; import com.epam.ta.reportportal.commons.EntityUtils; import com.epam.ta.reportportal.commons.ReportPortalUser; import com.epam.ta.reportportal.core.file.DeleteFilesHandler; import com.epam.ta.reportportal.core.file.GetFileHandler; import com.epam.ta.reportportal.core.user.EditUserHandler; import com.epam.ta.reportportal.entity.attachment.BinaryData; import com.epam.ta.reportportal.exception.ReportPortalException; import com.epam.ta.reportportal.util.ProjectExtractor; import com.epam.ta.reportportal.ws.model.OperationCompletionRS; import io.swagger.annotations.ApiOperation; import org.apache.commons.io.IOUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.security.core.annotation.AuthenticationPrincipal; import org.springframework.transaction.annotation.Transactional; import org.springframework.web.bind.annotation.*; import org.springframework.web.multipart.MultipartFile; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.InputStream; import static com.epam.ta.reportportal.auth.permissions.Permissions.*; /** * @author Dzianis_Shybeka */ @RestController @RequestMapping("/v1/data") public class FileStorageController { private final ProjectExtractor projectExtractor; private final EditUserHandler editUserHandler; private final GetFileHandler getFileHandler; private final DeleteFilesHandler deleteFilesHandler; @Autowired public FileStorageController(ProjectExtractor projectExtractor, EditUserHandler editUserHandler, GetFileHandler getFileHandler, DeleteFilesHandler deleteFilesHandler) { this.projectExtractor = projectExtractor; this.editUserHandler = editUserHandler; this.getFileHandler = getFileHandler; this.deleteFilesHandler = deleteFilesHandler; } @Transactional(readOnly = true) @PreAuthorize(ASSIGNED_TO_PROJECT) @GetMapping(value = "/{projectName}/{dataId}") public void getFile(@PathVariable String projectName, @PathVariable("dataId") Long dataId, HttpServletResponse response, @AuthenticationPrincipal ReportPortalUser user) { toResponse(response, getFileHandler.loadFileById(dataId, projectExtractor.extractProjectDetails(user, projectName))); } /** * (non-Javadoc) */ @Transactional(readOnly = true) @GetMapping(value = "/photo") @ApiOperation("Get photo of current user") public void getMyPhoto(@AuthenticationPrincipal ReportPortalUser user, HttpServletResponse response, @RequestParam(value = "loadThumbnail", required = false) boolean loadThumbnail) { toResponse(response, getFileHandler.getUserPhoto(user, loadThumbnail)); } /** * (non-Javadoc) */ @Transactional(readOnly = true) @PreAuthorize(NOT_CUSTOMER) @GetMapping(value = "/{projectName}/userphoto") @ApiOperation("Get user's photo") public void getUserPhoto(@PathVariable String projectName, @RequestParam(value = "id") String username, @RequestParam(value = "loadThumbnail", required = false) boolean loadThumbnail, HttpServletResponse response, @AuthenticationPrincipal ReportPortalUser user) { BinaryData userPhoto = getFileHandler.getUserPhoto(EntityUtils.normalizeId(username), user, projectName, loadThumbnail); toResponse(response, userPhoto); } @Transactional @PostMapping(value = "/photo", consumes = { MediaType.MULTIPART_FORM_DATA_VALUE }) @ApiOperation("Upload user's photo") public OperationCompletionRS uploadPhoto(@RequestParam("file") MultipartFile file, @AuthenticationPrincipal ReportPortalUser user) { return editUserHandler.uploadPhoto(EntityUtils.normalizeId(user.getUsername()), file); } @Transactional @DeleteMapping(value = "/photo") @ApiOperation("Delete user's photo") public OperationCompletionRS deletePhoto(@AuthenticationPrincipal ReportPortalUser user) { return editUserHandler.deletePhoto(EntityUtils.normalizeId(user.getUsername())); } @Transactional @PreAuthorize(ADMIN_ONLY) @PostMapping(value = "/clean", consumes = { MediaType.MULTIPART_FORM_DATA_VALUE }) @ApiOperation("Remove attachments from file storage according to uploaded csv file") public OperationCompletionRS removeAttachmentsByCsv(@RequestParam("file") MultipartFile file, @AuthenticationPrincipal ReportPortalUser user) { return deleteFilesHandler.removeFilesByCsv(file); } /** * Copies data from provided {@link InputStream} to Response * * @param response Response * @param binaryData Stored data */ private void toResponse(HttpServletResponse response, BinaryData binaryData) { //TODO investigate stream closing requirement if (binaryData.getInputStream() != null) { try { response.setContentType(binaryData.getContentType()); IOUtils.copy(binaryData.getInputStream(), response.getOutputStream()); } catch (IOException e) { throw new ReportPortalException("Unable to retrieve binary data from data storage", e); } } else { response.setStatus(HttpStatus.NO_CONTENT.value()); } } }
apache-2.0
shensiduanxing/devops-metadata-svc
src/main/java/org/flysnow/cloud/buildmeta/ui/resteasy/exception/BuildMetadataServiceException.java
557
package org.flysnow.cloud.buildmeta.ui.resteasy.exception; import java.io.Serializable; public class BuildMetadataServiceException extends RuntimeException implements Serializable { private static final long serialVersionUID = 7786141544419367058L; public BuildMetadataServiceException(){ super(); } public BuildMetadataServiceException(String message, Throwable cause){ super(message, cause); } public BuildMetadataServiceException(Throwable cause){ super(cause); } public BuildMetadataServiceException(String msg){ super(msg); } }
apache-2.0
Sargul/dbeaver
plugins/org.jkiss.dbeaver.ext.mysql/src/org/jkiss/dbeaver/ext/mysql/tasks/MySQLToolTableCheckSettings.java
2432
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2021 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ext.mysql.tasks; import org.jkiss.dbeaver.ext.mysql.model.MySQLTableBase; import org.jkiss.dbeaver.model.data.json.JSONUtils; import org.jkiss.dbeaver.model.meta.IPropertyValueListProvider; import org.jkiss.dbeaver.model.meta.Property; import org.jkiss.dbeaver.model.runtime.DBRRunnableContext; import org.jkiss.dbeaver.model.sql.task.SQLToolExecuteSettings; import java.util.Map; /** * Table check settings */ public class MySQLToolTableCheckSettings extends SQLToolExecuteSettings<MySQLTableBase> { private String option; @Property(viewable = true, editable = true, updatable = true, listProvider = CheckOptionListProvider.class) public String getOption() { return option; } public void setOption(String option) { this.option = option; } @Override public void loadConfiguration(DBRRunnableContext runnableContext, Map<String, Object> config) { super.loadConfiguration(runnableContext, config); option = JSONUtils.getString(config, "option"); } @Override public void saveConfiguration(Map<String, Object> config) { super.saveConfiguration(config); config.put("option", option); } public static class CheckOptionListProvider implements IPropertyValueListProvider<MySQLToolTableCheckSettings> { @Override public boolean allowCustomValue() { return false; } @Override public Object[] getPossibleValues(MySQLToolTableCheckSettings object) { return new String[] { "", "FOR UPGRADE", "QUICK", "FAST", "MEDIUM", "EXTENDED", "CHANGED" }; } } }
apache-2.0
guiguegon/GeoApi
app/src/androidTest/java/es/guiguegon/geoapi/ExampleInstrumentedTest.java
741
package es.guiguegon.geoapi; import android.content.Context; import android.support.test.InstrumentationRegistry; import android.support.test.runner.AndroidJUnit4; import org.junit.Test; import org.junit.runner.RunWith; import static org.junit.Assert.*; /** * Instrumentation test, which will execute on an Android device. * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ @RunWith(AndroidJUnit4.class) public class ExampleInstrumentedTest { @Test public void useAppContext() throws Exception { // Context of the app under test. Context appContext = InstrumentationRegistry.getTargetContext(); assertEquals("es.guiguegon.geoapi", appContext.getPackageName()); } }
apache-2.0
dragonzhou/humor
src/ca/pfv/spmf/test/MainTestKMeans_saveToFile.java
910
package ca.pfv.spmf.test; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URL; import ca.pfv.spmf.algorithms.clustering.kmeans.AlgoKMeans; /** * Example of how to use the KMEans algorithm, in source code. */ public class MainTestKMeans_saveToFile { public static void main(String []args) throws NumberFormatException, IOException{ String input = fileToPath("configKmeans.txt"); String output = "d://output.txt"; int k=4; // Apply the algorithm AlgoKMeans algoKMeans = new AlgoKMeans(); // we request 3 clusters algoKMeans.runAlgorithm(input, k); algoKMeans.printStatistics(); algoKMeans.saveToFile(output); } public static String fileToPath(String filename) throws UnsupportedEncodingException{ URL url = MainTestKMeans_saveToFile.class.getResource(filename); return java.net.URLDecoder.decode(url.getPath(),"UTF-8"); } }
apache-2.0
donbeave/grails-console-charts
src/gwt/grails/plugin/console/charts/client/gin/DesktopModule.java
1050
/* * Copyright 2014 the original author or authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package grails.plugin.console.charts.client.gin; import com.gwtplatform.mvp.client.gin.AbstractPresenterModule; import grails.plugin.console.charts.client.application.ApplicationDesktopModule; /** * @author <a href='mailto:donbeave@gmail.com'>Alexey Zhokhov</a> */ public class DesktopModule extends AbstractPresenterModule { @Override protected void configure() { install(new ApplicationDesktopModule()); } }
apache-2.0
adeelahmedkhanabbasi/QuranTeacher
QuranTeacher/src/QuranTeacher/ AudioPreferencesPanel.java
6792
/* * In the name of Allah * This file is part of The "Quran Teacher or Learn Arabic" Project. Use is subject to * license terms. * * @author: Fazle Rabbi Rahat * */ package QuranTeacher; import javax.swing.ButtonGroup; import javax.swing.JComboBox; import javax.swing.JPanel; import java.awt.GridBagLayout; import java.awt.Color; import javax.swing.JLabel; import java.awt.GridBagConstraints; import java.awt.Font; import java.awt.Insets; import javax.swing.JRadioButton; import java.awt.event.ItemListener; import java.awt.event.ItemEvent; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.List; import QuranTeacher.Preferences.AudioPreferences; import javax.swing.JTextArea; public class AudioPreferencesPanel extends JPanel { /** * Preferences panel to handle audio preferences. It doesn't extends * the PreferencesPanel class */ private static final long serialVersionUID = 1L; private List<String> QariNames=new ArrayList<>(); private static List<String> audioSourceLinks=new ArrayList<>(); private AudioPreferences audioSetupPrefs; private JRadioButton rdbtnOn; private JRadioButton rdbtnOff; private JComboBox<String>comboBox; /** * Create the panel. */ @SuppressWarnings({ "rawtypes", "unchecked" }) public AudioPreferencesPanel(final AudioPreferences audioPrefs) { this.audioSetupPrefs=audioPrefs; setBackground(Color.DARK_GRAY); setForeground(Color.RED); GridBagLayout gridBagLayout = new GridBagLayout(); gridBagLayout.columnWidths = new int[]{0, 0, 0, 0}; gridBagLayout.rowHeights = new int[]{0, 0, 0, 0, 32, 0}; gridBagLayout.columnWeights = new double[]{0.0, 1.0, 0.0, Double.MIN_VALUE}; gridBagLayout.rowWeights = new double[]{0.0, 0.0, 0.0, 0.0, Double.MIN_VALUE, 1.0}; setLayout(gridBagLayout); JLabel lblHeader = new JLabel("Recitation Preferences"); lblHeader.setForeground(Color.MAGENTA); lblHeader.setFont(new Font("Tahoma", Font.PLAIN, 18)); GridBagConstraints gbc_lblHeader = new GridBagConstraints(); gbc_lblHeader.gridwidth = 4; gbc_lblHeader.insets = new Insets(0, 0, 5, 0); gbc_lblHeader.gridx = 0; gbc_lblHeader.gridy = 0; add(lblHeader, gbc_lblHeader); JLabel lblAudioState = new JLabel("Recitation State :"); lblAudioState.setFont(new Font("Tahoma", Font.PLAIN, 18)); lblAudioState.setForeground(Color.ORANGE); GridBagConstraints gbc_lblAudioState = new GridBagConstraints(); gbc_lblAudioState.insets = new Insets(0, 0, 5, 5); gbc_lblAudioState.gridx = 0; gbc_lblAudioState.gridy = 2; add(lblAudioState, gbc_lblAudioState); rdbtnOn = new JRadioButton("ON"); rdbtnOn.setFont(new Font("Tahoma", Font.PLAIN, 18)); rdbtnOn.addItemListener(new ItemListener() { public void itemStateChanged(ItemEvent e) { if(e.getStateChange()==ItemEvent.SELECTED) { audioSetupPrefs.setAudioON(true); //System.out.println("On"); } else { audioSetupPrefs.setAudioON(false); //System.out.println("Off"); } } }); rdbtnOn.setBackground(Color.DARK_GRAY); rdbtnOn.setForeground(Color.GREEN); GridBagConstraints gbc_rdbtnOn = new GridBagConstraints(); gbc_rdbtnOn.insets = new Insets(0, 0, 5, 5); gbc_rdbtnOn.gridx = 1; gbc_rdbtnOn.gridy = 2; add(rdbtnOn, gbc_rdbtnOn); rdbtnOff = new JRadioButton("Off"); rdbtnOff.setFont(new Font("Tahoma", Font.PLAIN, 18)); rdbtnOff.setBackground(Color.DARK_GRAY); rdbtnOff.setForeground(Color.GREEN); GridBagConstraints gbc_rdbtnOff = new GridBagConstraints(); gbc_rdbtnOff.insets = new Insets(0, 0, 5, 5); gbc_rdbtnOff.gridx = 2; gbc_rdbtnOff.gridy = 2; add(rdbtnOff, gbc_rdbtnOff); ButtonGroup buttonGroup=new ButtonGroup(); buttonGroup.add(rdbtnOn); buttonGroup.add(rdbtnOff); JLabel lblSelectQari = new JLabel("Select Qari :"); lblSelectQari.setForeground(Color.ORANGE); lblSelectQari.setFont(new Font("Tahoma", Font.PLAIN, 18)); GridBagConstraints gbc_lblSelectQari = new GridBagConstraints(); gbc_lblSelectQari.anchor = GridBagConstraints.WEST; gbc_lblSelectQari.insets = new Insets(0, 0, 5, 5); gbc_lblSelectQari.gridx = 0; gbc_lblSelectQari.gridy = 4; add(lblSelectQari, gbc_lblSelectQari); storeQariSource(); comboBox = new JComboBox(QariNames.toArray()); comboBox.addItemListener(new ItemListener() { public void itemStateChanged(ItemEvent e) { audioSetupPrefs.setAudioSourceIndex(comboBox.getSelectedIndex()); //System.out.println(AudioPreferences.audioSource); } }); comboBox.setFont(new Font("Tahoma", Font.PLAIN, 16)); int k=audioSetupPrefs.getAudioSourceIndex(); if(k<0 || k>QariNames.size())k=0; comboBox.setSelectedIndex(k); GridBagConstraints gbc_comboBox = new GridBagConstraints(); gbc_comboBox.gridwidth = 0; gbc_comboBox.insets = new Insets(0, 0, 5, 0); gbc_comboBox.fill = GridBagConstraints.HORIZONTAL; gbc_comboBox.gridx = 1; gbc_comboBox.gridy = 4; add(comboBox, gbc_comboBox); JTextArea txtrNote = new JTextArea(); txtrNote.setFont(new Font("Monospaced", Font.PLAIN, 16)); txtrNote.setEditable(false); txtrNote.setLineWrap(true); txtrNote.setWrapStyleWord(true); txtrNote.setForeground(Color.PINK); txtrNote.setBackground(Color.DARK_GRAY); txtrNote.setText("Note: If you change Qari name, it will take effect only for the \"next to be downoaded\" recitation files. So, the Qari for the previously downloaded files will not change. "); GridBagConstraints gbc_txtrNote = new GridBagConstraints(); gbc_txtrNote.gridwidth = 0; gbc_txtrNote.insets = new Insets(0, 0, 0, 5); gbc_txtrNote.fill = GridBagConstraints.BOTH; gbc_txtrNote.gridx = 0; gbc_txtrNote.gridy = 5; add(txtrNote, gbc_txtrNote); updateButtonGroup(); } private void updateButtonGroup() { if(audioSetupPrefs.isAudioON()) rdbtnOn.setSelected(true); else rdbtnOff.setSelected(true); } private void storeQariSource() { InputStream inStream=this.getClass().getResourceAsStream("files/AudioLinks"); BufferedReader reader=new BufferedReader(new InputStreamReader(inStream)); String text; try { while((text=reader.readLine())!=null) { if(text.startsWith("name")) { QariNames.add(text.split("=")[1]); } else if(text.startsWith("link")) { audioSourceLinks.add(text.split("=")[1]); } } reader.close(); } catch (IOException e) { e.printStackTrace(); } } public static String getAudioSourceLink(int index) { return audioSourceLinks.get(index); } public void updateSetupPanel() { updateButtonGroup(); int k=audioSetupPrefs.getAudioSourceIndex(); if(k<0 || k>QariNames.size())k=0; comboBox.setSelectedIndex(k); } }
apache-2.0
haoch/kylin
source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaRequester.java
7848
/* * * * Licensed to the Apache Software Foundation (ASF) under one or more * * contributor license agreements. See the NOTICE file distributed with * * this work for additional information regarding copyright ownership. * * The ASF licenses this file to You under the Apache License, Version 2.0 * * (the "License"); you may not use this file except in compliance with * * the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * / */ package org.apache.kylin.source.kafka.util; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentMap; import javax.annotation.Nullable; import kafka.api.FetchRequestBuilder; import kafka.api.PartitionOffsetRequestInfo; import kafka.cluster.Broker; import kafka.common.TopicAndPartition; import kafka.javaapi.FetchResponse; import kafka.javaapi.OffsetRequest; import kafka.javaapi.OffsetResponse; import kafka.javaapi.PartitionMetadata; import kafka.javaapi.TopicMetadata; import kafka.javaapi.TopicMetadataRequest; import kafka.javaapi.TopicMetadataResponse; import kafka.javaapi.consumer.SimpleConsumer; import org.apache.kylin.source.kafka.TopicMeta; import org.apache.kylin.source.kafka.config.KafkaClusterConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Function; import com.google.common.collect.Lists; import com.google.common.collect.Maps; /** */ public final class KafkaRequester { private static final Logger logger = LoggerFactory.getLogger(KafkaRequester.class); private static ConcurrentMap<String, SimpleConsumer> consumerCache = Maps.newConcurrentMap(); static { Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { @Override public void run() { KafkaRequester.shutdown(); } })); } private static SimpleConsumer getSimpleConsumer(Broker broker, int timeout, int bufferSize, String clientId) { String key = createKey(broker, timeout, bufferSize, clientId); if (consumerCache.containsKey(key)) { return consumerCache.get(key); } else { consumerCache.putIfAbsent(key, new SimpleConsumer(broker.host(), broker.port(), timeout, bufferSize, clientId)); return consumerCache.get(key); } } private static String createKey(Broker broker, int timeout, int bufferSize, String clientId) { return broker.getConnectionString() + "_" + timeout + "_" + bufferSize + "_" + clientId; } public static TopicMeta getKafkaTopicMeta(KafkaClusterConfig kafkaClusterConfig) { SimpleConsumer consumer; for (Broker broker : kafkaClusterConfig.getBrokers()) { consumer = getSimpleConsumer(broker, kafkaClusterConfig.getTimeout(), kafkaClusterConfig.getBufferSize(), "topic_meta_lookup"); List<String> topics = Collections.singletonList(kafkaClusterConfig.getTopic()); TopicMetadataRequest req = new TopicMetadataRequest(topics); TopicMetadataResponse resp = consumer.send(req); final List<TopicMetadata> topicMetadatas = resp.topicsMetadata(); if (topicMetadatas.size() != 1) { break; } final TopicMetadata topicMetadata = topicMetadatas.get(0); if (topicMetadata.errorCode() != 0) { break; } List<Integer> partitionIds = Lists.transform(topicMetadata.partitionsMetadata(), new Function<PartitionMetadata, Integer>() { @Nullable @Override public Integer apply(PartitionMetadata partitionMetadata) { return partitionMetadata.partitionId(); } }); return new TopicMeta(kafkaClusterConfig.getTopic(), partitionIds); } logger.debug("cannot find topic:" + kafkaClusterConfig.getTopic()); return null; } public static PartitionMetadata getPartitionMetadata(String topic, int partitionId, List<Broker> brokers, KafkaClusterConfig kafkaClusterConfig) { SimpleConsumer consumer; for (Broker broker : brokers) { consumer = getSimpleConsumer(broker, kafkaClusterConfig.getTimeout(), kafkaClusterConfig.getBufferSize(), "topic_meta_lookup"); List<String> topics = Collections.singletonList(topic); TopicMetadataRequest req = new TopicMetadataRequest(topics); TopicMetadataResponse resp = consumer.send(req); final List<TopicMetadata> topicMetadatas = resp.topicsMetadata(); if (topicMetadatas.size() != 1) { logger.warn("invalid topicMetadata size:" + topicMetadatas.size()); break; } final TopicMetadata topicMetadata = topicMetadatas.get(0); if (topicMetadata.errorCode() != 0) { logger.warn("fetching topicMetadata with errorCode:" + topicMetadata.errorCode()); break; } for (PartitionMetadata partitionMetadata : topicMetadata.partitionsMetadata()) { if (partitionMetadata.partitionId() == partitionId) { return partitionMetadata; } } } logger.debug("cannot find PartitionMetadata, topic:" + topic + " partitionId:" + partitionId); return null; } public static FetchResponse fetchResponse(String topic, int partitionId, long offset, Broker broker, KafkaClusterConfig kafkaClusterConfig) { final String clientName = "client_" + topic + "_" + partitionId; SimpleConsumer consumer = getSimpleConsumer(broker, kafkaClusterConfig.getTimeout(), kafkaClusterConfig.getBufferSize(), clientName); kafka.api.FetchRequest req = new FetchRequestBuilder().clientId(clientName).addFetch(topic, partitionId, offset, 1048576) // Note: this fetchSize of 100000 might need to be increased if large batches are written to Kafka, 1048576 is the default value on shell .build(); return consumer.fetch(req); } public static long getLastOffset(String topic, int partitionId, long whichTime, Broker broker, KafkaClusterConfig kafkaClusterConfig) { String clientName = "client_" + topic + "_" + partitionId; SimpleConsumer consumer = getSimpleConsumer(broker, kafkaClusterConfig.getTimeout(), kafkaClusterConfig.getBufferSize(), clientName); TopicAndPartition topicAndPartition = new TopicAndPartition(topic, partitionId); Map<TopicAndPartition, PartitionOffsetRequestInfo> requestInfo = new HashMap<TopicAndPartition, PartitionOffsetRequestInfo>(); requestInfo.put(topicAndPartition, new PartitionOffsetRequestInfo(whichTime, 1)); OffsetRequest request = new OffsetRequest(requestInfo, kafka.api.OffsetRequest.CurrentVersion(), clientName); OffsetResponse response = consumer.getOffsetsBefore(request); if (response.hasError()) { logger.error("Error fetching data Offset Data the Broker. Reason: " + response.errorCode(topic, partitionId)); return 0; } long[] offsets = response.offsets(topic, partitionId); return offsets[0]; } public static void shutdown() { for (SimpleConsumer simpleConsumer : consumerCache.values()) { simpleConsumer.close(); } } }
apache-2.0
McLeodMoores/starling
projects/util/src/main/java/com/opengamma/DataDuplicationException.java
954
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma; /** * Runtime exception used to indicate that the action would create a duplicate. * <p> * A typical use case is when adding data and a similar item already exists. */ public class DataDuplicationException extends OpenGammaRuntimeException { /** Serialization version. */ private static final long serialVersionUID = 1L; /** * Creates an exception with a message. * * @param message the message, may be null */ public DataDuplicationException(final String message) { super(message); } /** * Creates an exception with a message. * * @param message the message, may be null * @param cause the underlying cause, may be null */ public DataDuplicationException(final String message, final Throwable cause) { super(message, cause); } }
apache-2.0
flipkart-incubator/hbase-object-mapper
src/test/java/com/flipkart/hbaseobjectmapper/testcases/TestsReactiveHBDAO.java
29551
package com.flipkart.hbaseobjectmapper.testcases; import com.flipkart.hbaseobjectmapper.HBAdmin; import com.flipkart.hbaseobjectmapper.Records; import com.flipkart.hbaseobjectmapper.WrappedHBColumnTC; import com.flipkart.hbaseobjectmapper.testcases.daos.reactive.CitizenDAO; import com.flipkart.hbaseobjectmapper.testcases.daos.reactive.CitizenSummaryDAO; import com.flipkart.hbaseobjectmapper.testcases.daos.reactive.CounterDAO; import com.flipkart.hbaseobjectmapper.testcases.daos.reactive.CrawlDAO; import com.flipkart.hbaseobjectmapper.testcases.daos.reactive.CrawlNoVersionDAO; import com.flipkart.hbaseobjectmapper.testcases.daos.reactive.EmployeeDAO; import com.flipkart.hbaseobjectmapper.testcases.entities.Citizen; import com.flipkart.hbaseobjectmapper.testcases.entities.CitizenSummary; import com.flipkart.hbaseobjectmapper.testcases.entities.Contact; import com.flipkart.hbaseobjectmapper.testcases.entities.Counter; import com.flipkart.hbaseobjectmapper.testcases.entities.Crawl; import com.flipkart.hbaseobjectmapper.testcases.entities.CrawlNoVersion; import com.flipkart.hbaseobjectmapper.testcases.entities.Dependents; import com.flipkart.hbaseobjectmapper.testcases.entities.Employee; import com.flipkart.hbaseobjectmapper.testcases.util.cluster.InMemoryHBaseCluster; import com.flipkart.hbaseobjectmapper.testcases.util.cluster.RealHBaseCluster; import com.google.common.collect.Iterables; import org.apache.hadoop.hbase.client.AsyncConnection; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.util.Bytes; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import java.io.IOException; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import static com.flipkart.hbaseobjectmapper.testcases.util.LiteralsUtil.a; import static com.flipkart.hbaseobjectmapper.testcases.util.LiteralsUtil.e; import static com.flipkart.hbaseobjectmapper.testcases.util.LiteralsUtil.l; import static com.flipkart.hbaseobjectmapper.testcases.util.LiteralsUtil.m; import static com.flipkart.hbaseobjectmapper.testcases.util.LiteralsUtil.nm; import static com.flipkart.hbaseobjectmapper.testcases.util.LiteralsUtil.s; import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; // TODO: fix the tests. Currently, the tests are copied from the sync DAO as-is, but is not idiomatic with real reactive client usage. class TestsReactiveHBDAO extends BaseHBDAOTests { private static AsyncConnection connection; @BeforeAll public static void setup() { try { Logger.getRootLogger().setLevel(Level.WARN); String useRealHBase = System.getenv(RealHBaseCluster.USE_REAL_HBASE); if (useRealHBase != null && (useRealHBase.equals("1") || useRealHBase.equalsIgnoreCase("true"))) { hBaseCluster = new RealHBaseCluster(); } else { String inMemoryHBaseClusterStartTimeout = System.getenv(InMemoryHBaseCluster.INMEMORY_CLUSTER_START_TIMEOUT); if (inMemoryHBaseClusterStartTimeout != null) { hBaseCluster = new InMemoryHBaseCluster(Long.parseLong(inMemoryHBaseClusterStartTimeout)); } else { hBaseCluster = new InMemoryHBaseCluster(); } } connection = hBaseCluster.startAsync().join(); hbAdmin = HBAdmin.create(connection); hbAdmin.createNamespace("govt"); hbAdmin.createNamespace("corp"); } catch (NumberFormatException e) { fail("The environmental variable " + InMemoryHBaseCluster.INMEMORY_CLUSTER_START_TIMEOUT + " is specified incorrectly (Must be numeric)"); } catch (Exception e) { e.printStackTrace(System.err); fail(String.format("Failed to connect to HBase. Aborted execution of DAO-related test cases." + "Reason:%n%s", e.getMessage())); } } @Test public void testCRUD() throws IOException { try { createTables(Citizen.class, CitizenSummary.class); CitizenDAO citizenDao = new CitizenDAO(connection); CitizenSummaryDAO citizenSummaryDAO = new CitizenSummaryDAO(connection); final List<Citizen> records = TestObjects.validCitizenObjects; assertEquals("citizens", citizenDao.getTableName()); final Set<String> columnFamiliesCitizen = citizenDao.getColumnFamiliesAndVersions().keySet(), columnFamiliesCitizenSummary = citizenSummaryDAO.getColumnFamiliesAndVersions().keySet(); assertEquals(s("main", "optional"), columnFamiliesCitizen, "Issue with column families of 'citizens' table%n" + columnFamiliesCitizen); assertEquals("citizens_summary", citizenSummaryDAO.getTableName()); assertEquals(s("a"), columnFamiliesCitizenSummary, "Issue with column families of 'citizens_summary' table%n" + columnFamiliesCitizenSummary); String[] allRowKeys = new String[records.size()]; Map<String, Map<String, Object>> expectedFieldValues = new HashMap<>(); for (int i = 0; i < records.size(); i++) { // for each test object, Citizen record = records.get(i); final String rowKey = citizenDao.persist(record).join(); allRowKeys[i] = rowKey; Citizen serDeserRecord = citizenDao.get(rowKey, Integer.MAX_VALUE).join(); assertEquals(record, serDeserRecord, "Entry got corrupted upon persisting and fetching back"); for (int numVersions = 1; numVersions <= 4; numVersions++) { final Citizen citizenNVersionsActual = citizenDao.get(rowKey, numVersions).join(), citizenNVersionsExpected = pruneVersionsBeyond(record, numVersions); assertEquals(citizenNVersionsExpected, citizenNVersionsActual, String.format("Mismatch in data between 'record pruned for %d versions' and 'record fetched from HBase for %d versions' for record: %s", numVersions, numVersions, record)); } for (String f : citizenDao.getFields()) { // for each field of the given test object, try { Field field = Citizen.class.getDeclaredField(f); WrappedHBColumnTC hbColumn = new WrappedHBColumnTC(field); field.setAccessible(true); if (hbColumn.isMultiVersioned()) { NavigableMap expected = (NavigableMap) field.get(record); final NavigableMap actual = citizenDao.fetchFieldValue(rowKey, f, Integer.MAX_VALUE).join(); assertEquals(expected, actual, String.format("Data for (multi-versioned) field \"%s\" got corrupted upon persisting and fetching back object: %s", field.getName(), record)); if (actual == null) continue; if (expectedFieldValues.containsKey(f)) { expectedFieldValues.get(f).put(rowKey, actual); } else { expectedFieldValues.put(f, m(e(rowKey, (Object) actual))); } } else { final Object actual = citizenDao.fetchFieldValue(rowKey, f).join(); Object expected = field.get(record); assertEquals(expected, actual, String.format("Data for field \"%s\" got corrupted upon persisting and fetching back object: %s", field.getName(), record)); if (actual == null) continue; if (expectedFieldValues.containsKey(f)) { expectedFieldValues.get(f).put(rowKey, actual); } else { expectedFieldValues.put(f, m(e(rowKey, actual))); } } } catch (IllegalAccessException e) { e.printStackTrace(); fail(String.format("Can't read field '%s' from object %s", f, record)); } catch (NoSuchFieldException e) { e.printStackTrace(); fail(String.format("Field missing: %s", f)); } } } // Test on range scan final String startRowKey = allRowKeys[0], endRowKey = allRowKeys[allRowKeys.length - 1]; List<Citizen> citizens = citizenDao.get(startRowKey, endRowKey, Integer.MAX_VALUE).join(); for (int i = 0; i < citizens.size(); i++) { assertEquals(records.get(i), citizens.get(i), String.format("[range scan] The result of get(%s, %s) returned unexpected entry at position " + i, startRowKey, endRowKey)); } try (Records<Citizen> citizenIterable = citizenDao.records(startRowKey, endRowKey, Integer.MAX_VALUE)) { Citizen[] expectedCitizens = citizens.toArray(new Citizen[0]); // this contains all records except the last one Citizen[] actualCitizens = Iterables.toArray(citizenIterable, Citizen.class); assertArrayEquals(expectedCitizens, actualCitizens, "Fetch directly vs fetch via iterable differ in results [start row key, end row key]"); } Citizen[] allCitizens = citizenDao.get(allRowKeys).map(CompletableFuture::join).toArray(Citizen[]::new); List<Citizen> citizensByPrefix = citizenDao.getByPrefix(citizenDao.toBytes("IND#")).join(); assertArrayEquals(citizensByPrefix.toArray(new Citizen[0]), allCitizens, "get by prefix is returning incorrect result"); try (Records<Citizen> citizenIterable = citizenDao.recordsByPrefix(citizenDao.toBytes("IND#"))) { Citizen[] expectedCitizens = citizenDao.get(allRowKeys).map(CompletableFuture::join).toArray(Citizen[]::new); assertArrayEquals(expectedCitizens, Iterables.toArray(citizenIterable, Citizen.class), "Fetch directly vs fetch via iterable differ in results [row key prefix]"); assertArrayEquals(expectedCitizens, allCitizens, "Results of Get by array of row keys did not match that of list"); } try (Records<Citizen> citizenIterable = citizenDao.records("IND#101", true, "IND#102", true, 1, 1000)) { Iterator<Citizen> iterator = citizenIterable.iterator(); Citizen citizen1 = iterator.next(); Citizen citizen2 = iterator.next(); assertEquals(citizenDao.get("IND#101").join(), citizen1, "Get by iterable didn't match get by individual record"); assertEquals(citizenDao.get("IND#102").join(), citizen2, "Get by iterable didn't match get by individual record"); } assertTrue(Iterables.elementsEqual( citizenDao.records("IND#102", "IND#104"), citizenDao.records("IND#102", true, "IND#104", false, 1, 10) ), "Mismatch in result between records() method with and without default options"); assertTrue(Iterables.elementsEqual( citizenDao.records("IND#102", "IND#104"), citizenDao.get("IND#102", "IND#104").join() ), "Mismatch in result between records() and get() methods"); // Check exists: assertTrue(citizenDao.exists("IND#101").join(), "Row key exists, but couldn't be detected"); assertFalse(citizenDao.exists("IND#100").join(), "Row key doesn't exist"); assertArrayEquals(new Boolean[]{false, true, true, false, false}, citizenDao.exists(a("IND#100", "IND#101", "IND#102", "IND#121", "IND#141")) .map(CompletableFuture::join).toArray(Boolean[]::new)); // Range Get vs Bulk Get (Single-version) for (String f : citizenDao.getFields()) { Map<String, Object> fieldValuesBulkGetFull = citizenDao.fetchFieldValues(allRowKeys, f).join(), fieldValuesRangeGetFull = citizenDao.fetchFieldValues("A", "z", f).join(); assertEquals(fieldValuesBulkGetFull, fieldValuesRangeGetFull, "[Field " + f + "] Difference between 'fetch by array of row keys' and 'fetch by range of row keys' when fetched for full range"); Map<String, Object> fieldValuesBulkGetPartial = citizenDao.fetchFieldValues(a("IND#104", "IND#105", "IND#106"), f).join(), fieldValuesRangeGetPartial = citizenDao.fetchFieldValues("IND#104", "IND#107", f).join(); assertEquals(fieldValuesBulkGetPartial, fieldValuesRangeGetPartial, "[Field " + f + "] Difference between 'fetch by array of row keys' and 'fetch by range of row keys' when fetched for partial range"); } // Range Get vs Bulk Get (Multi-version) for (String f : citizenDao.getFields()) { Map<String, NavigableMap<Long, Object>> fieldValuesBulkGetFull = citizenDao.fetchFieldValues(allRowKeys, f, Integer.MAX_VALUE).join(), fieldValuesRangeGetFull = citizenDao.fetchFieldValues("A", "z", f, Integer.MAX_VALUE).join(); assertEquals(fieldValuesBulkGetFull, fieldValuesRangeGetFull, "[Field " + f + "] Difference between 'bulk fetch by array of row keys' and 'bulk fetch by range of row keys' when fetched for full range"); Map<String, NavigableMap<Long, Object>> fieldValuesBulkGetPartial = citizenDao.fetchFieldValues(a("IND#101", "IND#102", "IND#103"), f, Integer.MAX_VALUE).join(), fieldValuesRangeGetPartial = citizenDao.fetchFieldValues("IND#101", "IND#104", f, Integer.MAX_VALUE).join(); assertEquals(fieldValuesBulkGetPartial, fieldValuesRangeGetPartial, "[Field " + f + "] Difference between 'bulk fetch by array of row keys' and 'bulk fetch by range of row keys' when fetched for partial range"); } // Test for a single field (redundant test, but that's ok): Map<String, Object> actualSalaries = citizenDao.fetchFieldValues(allRowKeys, "sal").join(); long actualSumOfSalaries = 0; for (Object s : actualSalaries.values()) { actualSumOfSalaries += s == null ? 0 : (Integer) s; } long expectedSumOfSalaries = 0; for (Citizen c : records) { expectedSumOfSalaries += c.getSal() == null ? 0 : c.getSal(); } assertEquals(expectedSumOfSalaries, actualSumOfSalaries); // Test record deletion: Citizen citizenToBeDeleted = records.get(0); citizenDao.delete(citizenToBeDeleted).join(); assertNull(citizenDao.get(citizenToBeDeleted.composeRowKey()).join(), "Record was not deleted: " + citizenToBeDeleted); List<Citizen> citizensToBeDeleted = Arrays.asList(records.get(1), records.get(2)); citizenDao.delete(citizensToBeDeleted).map(CompletableFuture::join).forEach(System.out::println); assertNull(citizenDao.get(citizensToBeDeleted.get(0).composeRowKey()).join(), "Record was not deleted when deleted by 'list of objects': " + citizensToBeDeleted.get(0)); assertNull(citizenDao.get(citizensToBeDeleted.get(1).composeRowKey()).join(), "Record was not deleted when deleted by 'list of objects': " + citizensToBeDeleted.get(1)); final String rowKey3 = records.get(3).composeRowKey(), rowKey4 = records.get(4).composeRowKey(); citizenDao.delete(new String[]{rowKey3, rowKey4}).map(CompletableFuture::join).forEach(System.out::println); assertNull(citizenDao.get(rowKey3).join(), "Record was not deleted when deleted by 'array of row keys': " + rowKey3); assertNull(citizenDao.get(rowKey4).join(), "Record was not deleted when deleted by 'array of row keys': " + rowKey4); } finally { deleteTables(Citizen.class, CitizenSummary.class); } } @Test public void testAppend() throws IOException { try { createTables(Citizen.class); CitizenDAO citizenDao = new CitizenDAO(connection); Citizen citizenBeforeAppend = new Citizen("IND", 120, "Abdul", null, null, null, null, null, null, null, null, null, null, new Dependents(null, Arrays.asList(141, 142)), null); assertNull(citizenBeforeAppend.getSal()); String rowKey = citizenDao.persist(citizenBeforeAppend).join(); Integer expectedSalary = 30000; citizenDao.append(rowKey, "sal", expectedSalary).join(); try { citizenDao.append(rowKey, "blahblah", 5).join(); fail("An attempt was made to append value a non-existent field. This should have thrown an exception - It didn't."); } catch (Exception e) { System.out.printf("[egde case] Got error as expected, for non-existent column: %s%n", e.getMessage()); } Citizen citizenAfter1Append = citizenDao.get(rowKey).join(); assertEquals(expectedSalary, citizenAfter1Append.getSal(), "Append operation didn't work as expected on field 'sal'"); List<Contact> expectedContacts = l(new Contact("contact1", 23411)); citizenDao.append(rowKey, "emergencyContacts1", expectedContacts).join(); Citizen citizenAfter2Append = citizenDao.get(rowKey).join(); assertEquals(expectedContacts, citizenAfter2Append.getEmergencyContacts1(), "Append operation didn't work as expected on field 'emergencyContacts1'"); try { citizenDao.append(rowKey, m(e("f3", 123L), e("f4", "blah blah blah"))).join(); fail("An attempt was made to append a BigDecimal field with a String value - This should have thrown an exception - It didn't."); } catch (Exception e) { System.out.printf("[edge case] Got error as expected, for type mismatch in columns: %s%n", e.getMessage()); } Citizen citizenAfter3Append = citizenDao.get(rowKey).join(); assertNull(citizenAfter3Append.getF3(), "Append operation broke 'all or none' semantics"); citizenDao.append(rowKey, m(e("f3", 123L))).join(); Citizen citizenAfter4Append = citizenDao.get(rowKey).join(); assertEquals(123L, (long) citizenAfter4Append.getF3(), "Append operation failed for f3"); citizenDao.append(rowKey, "name", " Kalam").join(); assertEquals("Abdul Kalam", citizenDao.fetchFieldValue(rowKey, "name").join(), "Append operation failed for name"); } finally { deleteTables(Citizen.class); } } @Test public void testCustom() throws IOException { try { createTables(Counter.class); CounterDAO counterDAO = new CounterDAO(connection); Counter counter = new Counter("c1"); for (int i = 1; i <= 10; i++) { counter.setValue((long) i, (long) i); } counter.setVar(0L); final String rowKey = counterDAO.persist(counter).join(); // Test custom timestamp values: assertEquals(counterDAO.get(rowKey, 7).join(), counterDAO.getOnGet(counterDAO.getGet(rowKey).readVersions(7)).join(), "Unexpected values on get (number of versions)"); assertEquals(nm(e(10L, 10L)), counterDAO.getOnGet(counterDAO.getGet(rowKey).setTimestamp(10)).join().getValue(), "Unexpected values on get (given timestamp)"); assertEquals(Arrays.asList(new Counter("c1", nm(e(1L, 1L), e(2L, 2L), e(3L, 3L), e(4L, 4L))), new Counter("c1", nm(e(3L, 3L), e(4L, 4L)))), counterDAO.getOnGets(Arrays.asList(counterDAO.getGet(rowKey).setTimeRange(1, 5).readAllVersions(), counterDAO.getGet(rowKey).setTimeRange(1, 5).readVersions(2))).map(CompletableFuture::join).collect(Collectors.toList()), "Unexpected values on bulk get"); // Test increment features: assertEquals(1L, counterDAO.increment(rowKey, "var", 1L).join(), "Increment didn't apply - basic"); assertEquals(1L, (long) counterDAO.fetchFieldValue(rowKey, "var").join(), "Increment apply didn't reflect in fetch field - basic"); assertEquals(3L, counterDAO.increment(rowKey, "var", 2L, Durability.SKIP_WAL).join(), "Increment didn't apply - with durability flag"); assertEquals(3L, counterDAO.fetchFieldValue(rowKey, "var").join(), "Increment apply didn't reflect in fetch field - with durability flag"); Increment increment = counterDAO.getIncrement(rowKey).addColumn(Bytes.toBytes("a"), Bytes.toBytes("var"), 5L); Counter persistedCounter = counterDAO.increment(increment).join(); assertEquals(8L, persistedCounter.getVar(), "Increment didn't reflect in object get - native way"); assertEquals(8L, (long) counterDAO.fetchFieldValue(rowKey, "var").join(), "Increment didn't apply in fetch field - native way"); try { counterDAO.increment(rowKey, "badvarI", 4L).join(); fail("Attempt to increment a field that isn't Long succeeded (it shouldn't have)"); } catch (Exception ignored) { //nothing } } finally { deleteTables(Counter.class); } } @Test public void testVersioning() throws IOException { try { createTables(Crawl.class); CrawlDAO crawlDAO = new CrawlDAO(connection); CrawlNoVersionDAO crawlNoVersionDAO = new CrawlNoVersionDAO(connection); final int NUM_VERSIONS = 3; Double[] testNumbers = new Double[]{-1.0, Double.MAX_VALUE, Double.MIN_VALUE, 3.14159, 2.71828, 1.0}; Double[] testNumbersOfRange = Arrays.copyOfRange(testNumbers, testNumbers.length - NUM_VERSIONS, testNumbers.length); // Written as unversioned, read as versioned List<CrawlNoVersion> objs = new ArrayList<>(); for (Double n : testNumbers) { objs.add(new CrawlNoVersion("key").setF1(n)); } crawlNoVersionDAO.persist(objs).map(CompletableFuture::join).forEach(System.out::print); Crawl crawl = crawlDAO.get("key", NUM_VERSIONS).join(); assertEquals(1.0, crawl.getF1Versioned().values().iterator().next(), 1e-9, "Issue with version history implementation when written as unversioned and read as versioned"); crawlDAO.delete("key").join(); Crawl versioned = crawlDAO.get("key").join(); assertNull(versioned, "Deleted row (with key " + versioned + ") still exists when accessed as versioned DAO"); CrawlNoVersion versionless = crawlNoVersionDAO.get("key").join(); assertNull(versionless, "Deleted row (with key " + versionless + ") still exists when accessed as versionless DAO"); // Written as versioned, read as unversioned+versioned Crawl crawl2 = new Crawl("key2"); long timestamp = System.currentTimeMillis(); long i = 0; for (Double n : testNumbers) { crawl2.addF1(timestamp + i, n); i++; } crawlDAO.persist(crawl2).join(); CrawlNoVersion crawlNoVersion = crawlNoVersionDAO.get("key2").join(); assertEquals(crawlNoVersion.getF1(), testNumbers[testNumbers.length - 1], "Entry with the highest version (i.e. timestamp) isn't the one that was returned by DAO get"); assertArrayEquals(testNumbersOfRange, crawlDAO.get("key2", NUM_VERSIONS).join().getF1Versioned().values().toArray(), "Issue with version history implementation when written as versioned and read as unversioned"); List<String> rowKeysList = new ArrayList<>(); for (int v = 0; v <= 9; v++) { for (int k = 1; k <= 4; k++) { String key = "oKey" + k; crawlDAO.persist(new Crawl(key).addF1((double) v)).join(); rowKeysList.add(key); } } String[] rowKeys = rowKeysList.toArray(new String[0]); Set<Double> oldestValuesRangeScan = new HashSet<>(), oldestValuesBulkScan = new HashSet<>(); for (int k = 1; k <= NUM_VERSIONS; k++) { Set<Double> latestValuesRangeScan = new HashSet<>(); NavigableMap<String, NavigableMap<Long, Object>> fieldValues1 = crawlDAO.fetchFieldValues("oKey0", "oKey9", "f1", k).join(); for (NavigableMap.Entry<String, NavigableMap<Long, Object>> e : fieldValues1.entrySet()) { latestValuesRangeScan.add((Double) e.getValue().lastEntry().getValue()); oldestValuesRangeScan.add((Double) e.getValue().firstEntry().getValue()); } assertEquals(1, latestValuesRangeScan.size(), "When fetching multiple versions of a field, the latest version of field is not as expected"); Set<Double> latestValuesBulkScan = new HashSet<>(); Map<String, NavigableMap<Long, Object>> fieldValues2 = crawlDAO.fetchFieldValues(rowKeys, "f1", k).join(); for (NavigableMap.Entry<String, NavigableMap<Long, Object>> e : fieldValues2.entrySet()) { latestValuesBulkScan.add((Double) e.getValue().lastEntry().getValue()); oldestValuesBulkScan.add((Double) e.getValue().firstEntry().getValue()); } assertEquals(1, latestValuesBulkScan.size(), "When fetching multiple versions of a field, the latest version of field is not as expected"); } assertEquals(NUM_VERSIONS, oldestValuesRangeScan.size(), "When fetching multiple versions of a field through bulk scan, the oldest version of field is not as expected"); assertEquals(NUM_VERSIONS, oldestValuesBulkScan.size(), "When fetching multiple versions of a field through range scan, the oldest version of field is not as expected"); assertEquals(oldestValuesRangeScan, oldestValuesBulkScan, "Fetch by array and fetch by range differ"); // Deletion tests: // Written as unversioned, deleted as unversioned: final String deleteKey1 = "write_unversioned__delete_unversioned"; crawlNoVersionDAO.persist(new Crawl(deleteKey1).addF1(10.01)).join(); crawlNoVersionDAO.delete(deleteKey1).join(); assertNull(crawlNoVersionDAO.get(deleteKey1).join(), "Row with key '" + deleteKey1 + "' exists, when written through unversioned DAO and deleted through unversioned DAO!"); // Written as versioned, deleted as versioned: final String deleteKey2 = "write_versioned__delete_versioned"; crawlDAO.persist(new Crawl(deleteKey2).addF1(10.02)).join(); crawlDAO.delete(deleteKey2).join(); assertNull(crawlNoVersionDAO.get(deleteKey2).join(), "Row with key '" + deleteKey2 + "' exists, when written through versioned DAO and deleted through versioned DAO!"); // Written as unversioned, deleted as versioned: final String deleteKey3 = "write_unversioned__delete_versioned"; crawlNoVersionDAO.persist(new Crawl(deleteKey3).addF1(10.03)).join(); crawlDAO.delete(deleteKey3).join(); assertNull(crawlNoVersionDAO.get(deleteKey3).join(), "Row with key '" + deleteKey3 + "' exists, when written through unversioned DAO and deleted through versioned DAO!"); // Written as versioned, deleted as unversioned: final String deleteKey4 = "write_versioned__delete_unversioned"; crawlDAO.persist(new Crawl(deleteKey4).addF1(10.04)).join(); crawlNoVersionDAO.delete(deleteKey4).join(); assertNull(crawlNoVersionDAO.get(deleteKey4).join(), "Row with key '" + deleteKey4 + "' exists, when written through versioned DAO and deleted through unversioned DAO!"); } finally { deleteTables(Crawl.class); } } @Test public void testNonStringRowkeys() throws IOException { try { createTables(Employee.class); EmployeeDAO employeeDAO = new EmployeeDAO(connection); Employee ePre = new Employee(100L, "E1", (short) 3, System.currentTimeMillis()); Long rowKey = employeeDAO.persist(ePre).join(); Employee ePost = employeeDAO.get(rowKey).join(); assertEquals(ePre, ePost, "Object got corrupted after persist and get"); } finally { deleteTables(Employee.class); } } }
apache-2.0
A-Miracle/QiangHongBao
themelib/src/main/java/com/afollestad/appthemeengine/views/ATERadioButton.java
1317
package com.afollestad.appthemeengine.views; import android.content.Context; import android.content.res.TypedArray; import android.util.AttributeSet; import androidx.appcompat.widget.AppCompatRadioButton; import com.afollestad.appthemeengine.ATE; import com.afollestad.appthemeengine.R; /** * @author Aidan Follestad (afollestad) */ @PreMadeView public class ATERadioButton extends AppCompatRadioButton { public ATERadioButton(Context context) { super(context); init(context, null); } public ATERadioButton(Context context, AttributeSet attrs) { super(context, attrs); init(context, attrs); } public ATERadioButton(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(context, attrs); } private void init(Context context, AttributeSet attrs) { setTag("tint_accent_color,text_primary"); String key = null; if (attrs != null) { TypedArray a = context.getTheme().obtainStyledAttributes(attrs, R.styleable.ATERadioButton, 0, 0); try { key = a.getString(R.styleable.ATERadioButton_ateKey_radioButton); } finally { a.recycle(); } } ATE.apply(context, this, key); } }
apache-2.0
dmillett/prank
src/test/java/net/prank/core/PranksterTest.java
12515
package net.prank.core; import net.prank.example.ExampleObject; import net.prank.example.ExampleScoreCard; import org.junit.Test; import java.math.BigDecimal; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.fail; /** * * @author dmillett * * Copyright 2012 David Millett * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ public class PranksterTest { private static final double DELTA = 1e-10; @Test public void test__execute() { ScoreCard<ExampleObject> exampleScoreCard = new ExampleScoreCard(2, 2, 4.0, 50.0); Prankster<ExampleObject> prankster = buildPrankster(exampleScoreCard); ExampleObject exampleObject = new ExampleObject(3, new BigDecimal("5.00"), new BigDecimal("50.00")); RequestOptions options = new RequestOptions.RequestOptionsBuilder().build(); Map<String, RequestOptions> optionsMap = new HashMap<>(); optionsMap.put(exampleScoreCard.getName(), options); Request<ExampleObject> exampleRequest = new Request<>(exampleObject, optionsMap); Set<Future<Result>> futureResult = prankster.setupScoring(exampleRequest); for (Future<Result> future : futureResult) { try { // Should return quickly, they were already submitted to the pool future.get(50,TimeUnit.MILLISECONDS); } catch (Exception e) { fail("Should not"); } } assertEquals(1, exampleObject.getScoreSummary().getResults().size()); Result result = exampleObject.getScoreSummary().getResultByScoreCard(exampleScoreCard.getName()); assertNotNull(result); assertEquals(new BigDecimal("5"), result.getScoreData().getScore()); assertEquals(2, result.getPosition().getOriginalIndex()); assertEquals(9.0, result.getStatistics().getAverage().doubleValue(), DELTA); assertEquals(50.0, result.getStatistics().getStandardDeviation().doubleValue(), DELTA); } @Test public void test__execute_updateObjectsWithScores() { ScoreCard<ExampleObject> exampleScoreCard = new ExampleScoreCard(2, 2, 4.0, 50.0); Prankster<ExampleObject> prankster = buildPrankster(exampleScoreCard); ExampleObject exampleObject = new ExampleObject(3, new BigDecimal("5.00"), new BigDecimal("50.00")); RequestOptions options = new RequestOptions.RequestOptionsBuilder().build(); Map<String, RequestOptions> optionsMap = new HashMap<>(); optionsMap.put(exampleScoreCard.getName(), options); Request<ExampleObject> exampleRequest = new Request<>(exampleObject, optionsMap); Set<Prankster.ScoringFuture> futureResult = prankster.buildScoringUpdateFutures(exampleRequest, 50); for (Prankster.ScoringFuture scoringFuture : futureResult) { try { // Should return quickly, they were already submitted to the pool scoringFuture.getFuture().get(50, TimeUnit.MILLISECONDS); } catch (Exception e) { fail("Should not"); } } assertEquals(1, exampleObject.getScoreSummary().getResults().size()); Result result = exampleObject.getScoreSummary().getResultByScoreCard(exampleScoreCard.getName()); assertNotNull(result); assertEquals(new BigDecimal("5"), result.getScoreData().getScore()); assertEquals(2, result.getPosition().getOriginalIndex()); assertEquals(9.0, result.getStatistics().getAverage().doubleValue(), DELTA); assertEquals(50.0, result.getStatistics().getStandardDeviation().doubleValue(), DELTA); } @Test public void test__updateObjectScore() { ScoreCard<ExampleObject> exampleScoreCard = new ExampleScoreCard(2, 2, 4.0, 50.0); Prankster<ExampleObject> prankster = buildPrankster(exampleScoreCard); ExampleObject exampleObject = new ExampleObject(3, new BigDecimal("5.00"), new BigDecimal("50.00")); Request<ExampleObject> exampleRequest = new Request<>(exampleObject); prankster.updateObjectsWithScores(exampleRequest, 50); assertEquals(1, exampleObject.getScoreSummary().getResults().size()); Result result = exampleObject.getScoreSummary().getResultByScoreCard(exampleScoreCard.getName()); assertNotNull(result); assertEquals(5.0, result.getScoreData().getScore() .doubleValue(), DELTA); assertEquals(2, result.getPosition().getOriginalIndex()); assertEquals(9.0, result.getStatistics().getAverage().doubleValue(), DELTA); assertEquals(50.0, result.getStatistics().getStandardDeviation().doubleValue(), DELTA); } @Test public void test__updateObjectsWithScores() { ScoreCard<ExampleObject> exampleScoreCard = new ExampleScoreCard(2, 2, 4.0, 50.0); Prankster<ExampleObject> prankster = buildPrankster(exampleScoreCard); ExampleObject exampleObject = new ExampleObject(3, new BigDecimal("5.00"), new BigDecimal("50.00")); Request<ExampleObject> exampleRequest = new Request<>(exampleObject); prankster.updateObjectsWithScores(exampleRequest, 50); assertEquals(1, exampleObject.getScoreSummary().getResults().size()); Result result = exampleObject.getScoreSummary().getResultByScoreCard(exampleScoreCard.getName()); assertNotNull(result); assertEquals(5.0, result.getScoreData().getScore() .doubleValue(), DELTA); assertEquals(2, result.getPosition().getOriginalIndex()); assertEquals(9.0, result.getStatistics().getAverage().doubleValue(), DELTA); assertEquals(50.0, result.getStatistics().getStandardDeviation().doubleValue(), DELTA); } @Test public void test__execute_disabled() { ScoreCard<ExampleObject> exampleScoreCard = new ExampleScoreCard(2, 4, 5.0, 0.75); Prankster<ExampleObject> prankster = buildPrankster(exampleScoreCard); ExampleObject exampleObject = new ExampleObject(3, new BigDecimal("5.00"), new BigDecimal("50.00")); RequestOptions options = new RequestOptions.RequestOptionsBuilder().setEnabledB(false).build(); Map<String, RequestOptions> optionsMap = new HashMap<>(); optionsMap.put(exampleScoreCard.getName(), options); Request<ExampleObject> exampleRequest = new Request<>(exampleObject, optionsMap); prankster.updateObjectsWithScores(exampleRequest, 50); assertEquals(0, exampleObject.getScoreSummary().getResults().size()); } @Test public void test__execute_disabled_updateObjectsWithScores() { ScoreCard<ExampleObject> exampleScoreCard = new ExampleScoreCard(2, 4, 5.0, 0.75); Prankster<ExampleObject> prankster = buildPrankster(exampleScoreCard); RequestOptions options = new RequestOptions.RequestOptionsBuilder().setEnabledB(false).build(); Map<String, RequestOptions> optionsMap = new HashMap<>(); optionsMap.put(exampleScoreCard.getName(), options); ExampleObject exampleObject = new ExampleObject(3, new BigDecimal("5.00"), new BigDecimal("50.00")); Request<ExampleObject> exampleRequest = new Request<ExampleObject>(exampleObject, optionsMap); prankster.updateObjectsWithScores(exampleRequest, 50); assertEquals(0, exampleObject.getScoreSummary().getResults().size()); } @Test public void test__execute_enabled() { ScoreCard<ExampleObject> exampleScoreCard = new ExampleScoreCard(2, 4, 5.0, 0.75); ExampleObject exampleObject = new ExampleObject(3, new BigDecimal("5.00"), new BigDecimal("55.00")); Request<ExampleObject> exampleRequest = new Request<>(exampleObject); Prankster<ExampleObject> prankster = buildPrankster(exampleScoreCard); prankster.updateObjectsWithScores(exampleRequest, 50); assertEquals(1, exampleObject.getScoreSummary().getResults().size()); Result result = exampleObject.getScoreSummary().getResultByScoreCard(exampleScoreCard.getName()); assertNotNull(result); assertEquals(5.0, result.getScoreData().getScore().doubleValue(), DELTA); assertEquals(4, result.getPosition().getOriginalIndex()); assertEquals(10.0, result.getStatistics().getAverage().doubleValue(), DELTA); assertEquals(0.75, result.getStatistics().getStandardDeviation().doubleValue(), DELTA); } @Test public void test__execute_enabled_updateObjectsWithScores() { ScoreCard<ExampleObject> exampleScoreCard = new ExampleScoreCard(2, 4, 5.0, 0.75); ExampleObject exampleObject = new ExampleObject(3, new BigDecimal("5.00"), new BigDecimal("55.00")); Request<ExampleObject> exampleRequest = new Request<>(exampleObject); Prankster<ExampleObject> prankster = buildPrankster(exampleScoreCard); prankster.updateObjectsWithScores(exampleRequest, 50); assertEquals(1, exampleObject.getScoreSummary().getResults().size()); Result result = exampleObject.getScoreSummary().getResultByScoreCard(exampleScoreCard.getName()); assertNotNull(result); assertEquals(5.0, result.getScoreData().getScore().doubleValue(), DELTA); assertEquals(4, result.getPosition().getOriginalIndex()); assertEquals(10.0, result.getStatistics().getAverage().doubleValue(), DELTA); assertEquals(0.75, result.getStatistics().getStandardDeviation().doubleValue(), DELTA); } @Test public void test__determineTimeout_default() { long defaultTimeout = 10; ScoreCard<ExampleObject> exampleScoreCard = new ExampleScoreCard(2, 4, 5.0, 0.75); Prankster<ExampleObject> prankster = buildPrankster(exampleScoreCard); long timeoutNull = prankster.determineTimeout(defaultTimeout, exampleScoreCard.getName(), null); assertEquals(10, timeoutNull); Map<String, RequestOptions> optionsMap = new HashMap<>(); long timeoutEmpty = prankster.determineTimeout(defaultTimeout, exampleScoreCard.getName(), optionsMap); assertEquals(10, timeoutEmpty); } @Test public void test__determineTimeout_per_request() { long defaultTimeout = 10; RequestOptions options = new RequestOptions.RequestOptionsBuilder().setTimeoutMillisB(1).build(); Map<String, RequestOptions> optionsMap = new HashMap<>(); ScoreCard<ExampleObject> exampleScoreCard = new ExampleScoreCard(2, 4, 5.0, 0.75); optionsMap.put(exampleScoreCard.getName(), options); Prankster<ExampleObject> prankster = buildPrankster(exampleScoreCard); long timeout = prankster.determineTimeout(defaultTimeout, exampleScoreCard.getName(), optionsMap); assertEquals(1, timeout); } @Test public void test__determineTimeout_per_request_disabled() { long defaultTimeout = 10; RequestOptions options = new RequestOptions.RequestOptionsBuilder().setTimeoutMillisB(1).setEnabledB(false).build(); Map<String, RequestOptions> optionsMap = new HashMap<>(); ScoreCard<ExampleObject> exampleScoreCard = new ExampleScoreCard(2, 4, 5.0, 0.75); optionsMap.put(exampleScoreCard.getName(), options); Prankster<ExampleObject> prankster = buildPrankster(exampleScoreCard); long timeout = prankster.determineTimeout(defaultTimeout, exampleScoreCard.getName(), optionsMap); assertEquals(10, timeout); } private Prankster<ExampleObject> buildPrankster(ScoreCard... scoreCard) { Set<ScoreCard<ExampleObject>> scoreCards = new HashSet<>(); for (ScoreCard card : scoreCard) { scoreCards.add(card); } return new Prankster<>(scoreCards, 1); } }
apache-2.0
quanla/quan-util-core
src/main/java/qj/util/cache/Cache2.java
615
package qj.util.cache; import java.util.ArrayList; import qj.util.funct.F2; public class Cache2<A, B, T> { private final F2<A, B, T> func; ArrayList<Holder> holders = new ArrayList<Holder>(); public Cache2(F2<A, B, T> func) { this.func = func; } public T get(A a, B b) { for (Holder holder : holders) { if (holder.a.equals(a) && holder.b.equals(b)) { return holder.t; } } T t = func.e(a, b); holders.add(new Holder(a, b, t)); return t; } private class Holder { A a; B b; T t; public Holder(A a, B b, T t) { this.a = a; this.b = b; this.t = t; } } }
apache-2.0
adligo/tests4j_4jacoco.adligo.org
src/org/adligo/tests4j_4jacoco/plugin/discovery/CircularDependenciesDiscovery.java
4828
package org.adligo.tests4j_4jacoco.plugin.discovery; import java.io.IOException; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.adligo.tests4j.models.shared.association.ClassAssociationsLocal; import org.adligo.tests4j.models.shared.association.ClassAssociationsLocalMutant; import org.adligo.tests4j.models.shared.association.I_ClassAssociationsCache; import org.adligo.tests4j.models.shared.association.I_ClassAssociationsLocal; import org.adligo.tests4j.models.shared.association.I_ClassParentsLocal; import org.adligo.tests4j.run.helpers.I_ClassFilter; import org.adligo.tests4j.shared.asserts.reference.ClassAliasLocal; import org.adligo.tests4j.shared.asserts.reference.I_ClassAliasLocal; import org.adligo.tests4j.shared.output.I_Tests4J_Log; /** * a model like (non thread safe) class that loads classes into * the class loader, discovers * references. * * references and dependencies are very similar concepts in this package as follows; * references illuminate that one class references another. * dependencies illuminate that one class depends on another, * and indicate * The I_CachedClassBytesClassLoader is shared memory between threads. * Also this model keeps a cache of the references for classes * it reads, so it doesn't need to re-ASM byte code read them. * * * @author scott * */ public class CircularDependenciesDiscovery implements I_ClassDependenciesDiscovery { private I_Tests4J_Log log; private I_ClassFilter classFilter; private I_ClassAssociationsCache cache; /** * this contains the initial references */ private Map<I_ClassAliasLocal, I_ClassAssociationsLocal> refMap = new HashMap<I_ClassAliasLocal,I_ClassAssociationsLocal>(); private Set<I_ClassParentsLocal> initalRefsToIdentify = new HashSet<I_ClassParentsLocal>(); private Set<I_ClassParentsLocal> fullRefsFound = new HashSet<I_ClassParentsLocal>(); private I_ClassDependenciesDiscovery fullDependenciesDiscovery; public CircularDependenciesDiscovery() {} /** * @diagram_sync with DiscoveryOverview.seq on 8/17/2014 */ public I_ClassAssociationsLocal findOrLoad(Class<?> c) throws IOException, ClassNotFoundException { if (log.isLogEnabled(CircularDependenciesDiscovery.class)) { log.log(".discoverAndLoad " + c.getName()); } String className = c.getName(); refMap.clear(); initalRefsToIdentify.clear(); fullRefsFound.clear(); I_ClassAssociationsLocal crefs = cache.getDependencies(className); if (crefs != null) { return crefs; } if (classFilter.isFiltered(c)) { I_ClassAssociationsLocal toRet = new ClassAssociationsLocal(fullDependenciesDiscovery.findOrLoad(c)); cache.putDependenciesIfAbsent(toRet); return toRet; } I_ClassAssociationsLocal preCircleRefs = fullDependenciesDiscovery.findOrLoad(c); refMap.put(new ClassAliasLocal(preCircleRefs), preCircleRefs); Set<I_ClassParentsLocal> refs = preCircleRefs.getDependenciesLocal(); for (I_ClassParentsLocal ref: refs) { I_ClassAssociationsLocal preCircleDelegate = fullDependenciesDiscovery.findOrLoad(ref.getTarget()); refMap.put(new ClassAliasLocal(preCircleDelegate), preCircleDelegate); } ClassAssociationsLocal toRet = calcCircles(preCircleRefs); cache.putDependenciesIfAbsent(toRet);; return toRet; } /** * @diagram_sync with DiscoveryOverview.seq on 8/17/2014 * @param preCircleRefs * @return */ private ClassAssociationsLocal calcCircles(I_ClassAssociationsLocal preCircleRefs) { ClassAssociationsLocalMutant crlm = new ClassAssociationsLocalMutant(preCircleRefs); Collection<I_ClassAssociationsLocal> entries = refMap.values(); Set<I_ClassAssociationsLocal> copy = new HashSet<I_ClassAssociationsLocal>(entries); copy.remove(new ClassAliasLocal(crlm.getTarget())); for (I_ClassAssociationsLocal cr: copy) { Set<I_ClassParentsLocal> refs = cr.getDependenciesLocal(); if (refs != null) { if (refs.contains(crlm)) { crlm.addCircularReferences(cr); } } } return new ClassAssociationsLocal(crlm); } public I_Tests4J_Log getLog() { return log; } public I_ClassFilter getClassFilter() { return classFilter; } public I_ClassAssociationsCache getCache() { return cache; } public I_ClassDependenciesDiscovery getFullDependenciesDiscovery() { return fullDependenciesDiscovery; } public void setLog(I_Tests4J_Log log) { this.log = log; } public void setClassFilter(I_ClassFilter classFilter) { this.classFilter = classFilter; } public void setCache(I_ClassAssociationsCache cache) { this.cache = cache; } public void setFullDependenciesDiscovery( I_ClassDependenciesDiscovery classDependenciesDiscovery) { this.fullDependenciesDiscovery = classDependenciesDiscovery; } }
apache-2.0
benbenw/jmeter
src/functions/src/main/java/org/apache/jmeter/functions/TimeShift.java
9758
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jmeter.functions; import java.time.Duration; import java.time.Instant; import java.time.Year; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; import java.time.format.DateTimeParseException; import java.time.temporal.ChronoField; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Locale; import org.apache.commons.lang3.LocaleUtils; import org.apache.commons.lang3.StringUtils; import org.apache.jmeter.engine.util.CompoundVariable; import org.apache.jmeter.samplers.SampleResult; import org.apache.jmeter.samplers.Sampler; import org.apache.jmeter.threads.JMeterVariables; import org.apache.jmeter.util.JMeterUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.github.benmanes.caffeine.cache.Cache; import com.github.benmanes.caffeine.cache.Caffeine; /** * timeShifting Function permit to shift a date * <p> * Parameters: * <ul> * <li>format date @see * https://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html * (optional - defaults to epoch time in millisecond)</li> * <li>date to shift formatted * as first param (optional - defaults now)</li> * <li>amount of (seconds, minutes, hours, days ) to add (optional - default nothing is add)</li> * <li>a string of the locale for the format ( optional )</li> * <li>variable name ( optional )</li> * </ul> * Returns: * <p>a formatted date with the specified number of (seconds, minutes, * hours, days or months ) added. Value is also saved in the variable for * later re-use. * * @since 3.3 */ public class TimeShift extends AbstractFunction { private static final Logger log = LoggerFactory.getLogger(TimeShift.class); private static final String KEY = "__timeShift"; // $NON-NLS-1$ private static final List<String> desc = Arrays.asList(JMeterUtils.getResString("time_format_shift"), JMeterUtils.getResString("date_to_shift"), JMeterUtils.getResString("value_to_shift"), JMeterUtils.getResString("locale_format"), JMeterUtils.getResString("function_name_paropt")); // Ensure that these are set, even if no parameters are provided private String format = ""; //$NON-NLS-1$ private CompoundVariable dateToShiftCompound; // $NON-NLS-1$ private CompoundVariable amountToShiftCompound; // $NON-NLS-1$ private Locale locale = JMeterUtils.getLocale(); // $NON-NLS-1$ private String variableName = ""; //$NON-NLS-1$ private ZoneId systemDefaultZoneID = ZoneId.systemDefault(); private static final class LocaleFormatObject { private String format; private Locale locale; public LocaleFormatObject(String format, Locale locale) { this.format = format; this.locale = locale; } public String getFormat() { return format; } public Locale getLocale() { return locale; } @Override public int hashCode() { return format.hashCode() + locale.hashCode(); } @Override public boolean equals(Object other) { if (!(other instanceof LocaleFormatObject)) { return false; } LocaleFormatObject otherError = (LocaleFormatObject) other; return format.equals(otherError.getFormat()) && locale.getDisplayName().equals(otherError.getLocale().getDisplayName()); } /** * @see java.lang.Object#toString() */ @Override public String toString() { return "LocaleFormatObject [format=" + format + ", locale=" + locale + "]"; } } /** Date time format cache handler **/ private Cache<LocaleFormatObject, DateTimeFormatter> dateTimeFormatterCache = null; public TimeShift() { super(); } /** {@inheritDoc} */ @Override public String execute(SampleResult previousResult, Sampler currentSampler) throws InvalidVariableException { String amountToShift = amountToShiftCompound.execute().trim(); String dateToShift = dateToShiftCompound.execute().trim(); ZonedDateTime zonedDateTimeToShift = ZonedDateTime.now(systemDefaultZoneID); DateTimeFormatter formatter = null; if (!StringUtils.isEmpty(format)) { try { LocaleFormatObject lfo = new LocaleFormatObject(format, locale); formatter = dateTimeFormatterCache.get(lfo, this::createFormatter); } catch (IllegalArgumentException ex) { log.error("Format date pattern '{}' is invalid " + "(see https://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html)", format, ex); // $NON-NLS-1$ return ""; } } if (!dateToShift.isEmpty()) { try { if (formatter != null) { zonedDateTimeToShift = ZonedDateTime.parse(dateToShift, formatter); } else { zonedDateTimeToShift = ZonedDateTime.ofInstant(Instant.ofEpochMilli(Long.parseLong(dateToShift)), systemDefaultZoneID); } } catch (DateTimeParseException | NumberFormatException ex) { log.error("Failed to parse the date '{}' to shift with formatter '{}'", dateToShift, formatter, ex); // $NON-NLS-1$ } } // Check amount value to shift if (!StringUtils.isEmpty(amountToShift)) { try { Duration duration = Duration.parse(amountToShift); zonedDateTimeToShift = zonedDateTimeToShift.plus(duration); } catch (DateTimeParseException ex) { log.error( "Failed to parse the amount duration '{}' to shift " + "(see https://docs.oracle.com/javase/8/docs/api/java/time/Duration.html#parse-java.lang.CharSequence-) ", amountToShift, ex); // $NON-NLS-1$ } } String dateString; if (formatter != null) { dateString = zonedDateTimeToShift.format(formatter); } else { dateString = String.valueOf(zonedDateTimeToShift.toInstant().toEpochMilli()); } if (!StringUtils.isEmpty(variableName)) { JMeterVariables vars = getVariables(); if (vars != null) {// vars will be null on TestPlan vars.put(variableName, dateString); } } return dateString; } private DateTimeFormatter createFormatter(LocaleFormatObject format) { log.debug("Create a new instance of DateTimeFormatter for format '{}' in the cache", format); return new DateTimeFormatterBuilder().appendPattern(format.getFormat()) .parseDefaulting(ChronoField.NANO_OF_SECOND, 0) .parseDefaulting(ChronoField.MILLI_OF_SECOND, 0) .parseDefaulting(ChronoField.SECOND_OF_MINUTE, 0) .parseDefaulting(ChronoField.MINUTE_OF_HOUR, 0) .parseDefaulting(ChronoField.HOUR_OF_DAY, 0) .parseDefaulting(ChronoField.DAY_OF_MONTH, 1) .parseDefaulting(ChronoField.MONTH_OF_YEAR, 1) .parseDefaulting(ChronoField.YEAR_OF_ERA, Year.now().getValue()) .parseDefaulting(ChronoField.OFFSET_SECONDS, ZonedDateTime.now().getOffset().getTotalSeconds()) .toFormatter(format.getLocale()); } /** {@inheritDoc} */ @Override public void setParameters(Collection<CompoundVariable> parameters) throws InvalidVariableException { checkParameterCount(parameters, 4, 5); Object[] values = parameters.toArray(); format = ((CompoundVariable) values[0]).execute().trim(); dateToShiftCompound = (CompoundVariable) values[1]; amountToShiftCompound = (CompoundVariable) values[2]; if (values.length == 4) { variableName = ((CompoundVariable) values[3]).execute().trim(); } else { String localeAsString = ((CompoundVariable) values[3]).execute().trim(); if (!localeAsString.trim().isEmpty()) { locale = LocaleUtils.toLocale(localeAsString); } variableName = ((CompoundVariable) values[4]).execute().trim(); } // Create the cache if (dateTimeFormatterCache == null) { dateTimeFormatterCache = Caffeine.newBuilder() .maximumSize(100).build(); } } /** {@inheritDoc} */ @Override public String getReferenceKey() { return KEY; } /** {@inheritDoc} */ @Override public List<String> getArgumentDesc() { return desc; } }
apache-2.0
ashward/buddycloud-server-java
src/test/java/org/buddycloud/channelserver/packetprocessor/iq/namespace/pubsub/set/PublishTest.java
17181
package org.buddycloud.channelserver.packetprocessor.iq.namespace.pubsub.set; import java.util.ArrayList; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import junit.framework.Assert; import org.buddycloud.channelserver.Configuration; import org.buddycloud.channelserver.channel.ChannelManager; import org.buddycloud.channelserver.channel.validate.AtomEntry; import org.buddycloud.channelserver.db.exception.NodeStoreException; import org.buddycloud.channelserver.packetHandler.iq.IQTestHandler; import org.buddycloud.channelserver.packetprocessor.iq.namespace.pubsub.JabberPubsub; import org.buddycloud.channelserver.pubsub.affiliation.Affiliations; import org.buddycloud.channelserver.pubsub.model.NodeMembership; import org.buddycloud.channelserver.pubsub.model.NodeSubscription; import org.buddycloud.channelserver.pubsub.model.impl.GlobalItemIDImpl; import org.buddycloud.channelserver.pubsub.model.impl.NodeItemImpl; import org.buddycloud.channelserver.pubsub.model.impl.NodeMembershipImpl; import org.buddycloud.channelserver.pubsub.model.impl.NodeSubscriptionImpl; import org.buddycloud.channelserver.pubsub.subscription.Subscriptions; import org.buddycloud.channelserver.utils.XMLConstants; import org.dom4j.Element; import org.dom4j.tree.BaseElement; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import org.xmpp.packet.IQ; import org.xmpp.packet.JID; import org.xmpp.packet.Message; import org.xmpp.packet.Packet; import org.xmpp.packet.PacketError; import org.xmpp.resultsetmanagement.ResultSetImpl; public class PublishTest extends IQTestHandler { private IQ request; private ChannelManager channelManager; private Publish publish; private JID jid; private Element element; private BlockingQueue<Packet> queue = new LinkedBlockingQueue<Packet>(); private String node = "/user/romeo@shakespeare.lit/posts"; private String server = "channels.shakespeare.lit"; private AtomEntry validateEntry; private Element entry; @Before public void setUp() throws Exception { channelManager = Mockito.mock(ChannelManager.class); validateEntry = Mockito.mock(AtomEntry.class); Configuration.getInstance().putProperty(Configuration.CONFIGURATION_LOCAL_DOMAIN_CHECKER, Boolean.TRUE.toString()); queue = new LinkedBlockingQueue<Packet>(); publish = new Publish(queue, channelManager); jid = new JID("juliet@shakespeare.lit/balcony"); request = readStanzaAsIq("/iq/pubsub/publish/request.stanza"); publish.setServerDomain("shakespeare.lit"); publish.setChannelManager(channelManager); publish.setEntryValidator(validateEntry); entry = request.getChildElement().element("publish").element("item").element("entry").createCopy(); element = new BaseElement("publish"); Mockito.when(channelManager.nodeExists(node)).thenReturn(true); NodeMembership membership = new NodeMembershipImpl(node, jid, Subscriptions.subscribed, Affiliations.publisher, null); Mockito.when(channelManager.getNodeMembership(Mockito.eq(node), Mockito.eq(jid))).thenReturn(membership); Mockito.when(channelManager.getNodeSubscriptionListeners(Mockito.eq(node))).thenReturn( new ResultSetImpl<NodeSubscription>(new ArrayList<NodeSubscription>())); validateEntry.setPayload(request.getChildElement().element("publish").element("item").createCopy()); Mockito.when(validateEntry.getGlobalItemId()).thenReturn( new GlobalItemIDImpl(new JID(request.getTo().toBareJID()), node, entry.elementText("id")).toString()); Mockito.when(validateEntry.getLocalItemId()).thenCallRealMethod(); Mockito.when(validateEntry.isValid()).thenReturn(true); Mockito.when(validateEntry.getPayload()).thenReturn(entry); } @Test public void passingRetractAsElementNameReturnsTrue() { Element element = new BaseElement("publish"); Assert.assertTrue(publish.accept(element)); } @Test public void passingNotRetractAsElementNameReturnsFalse() { Element element = new BaseElement("not-publish"); Assert.assertFalse(publish.accept(element)); } @Test public void passingNoNodeResultsInErrorStanza() throws Exception { IQ request = this.request.createCopy(); request.getChildElement().element("publish").attribute("node").detach(); publish.process(element, jid, request, null); Packet response = queue.poll(); PacketError error = response.getError(); Assert.assertNotNull(error); Assert.assertEquals(PacketError.Type.modify, error.getType()); Assert.assertEquals(XMLConstants.NODE_ID_REQUIRED, error.getApplicationConditionName()); } @Test public void nodeStoreExceptionReturnsErrorStanza() throws Exception { Mockito.doThrow(new NodeStoreException()).when(channelManager).nodeExists(Mockito.eq(node)); publish.process(element, jid, request, null); Packet response = queue.poll(); PacketError error = response.getError(); Assert.assertNotNull(error); Assert.assertEquals(PacketError.Condition.internal_server_error, error.getCondition()); Assert.assertEquals(PacketError.Type.wait, error.getType()); } @Test public void providingNodeWhichDoesntExistReturnsError() throws Exception { Mockito.when(channelManager.nodeExists(node)).thenReturn(false); publish.process(element, jid, request, null); Packet response = queue.poll(); PacketError error = response.getError(); Assert.assertNotNull(error); Assert.assertEquals(PacketError.Type.cancel, error.getType()); Assert.assertEquals(PacketError.Condition.item_not_found, error.getCondition()); } @Test public void requestToRemoteNodeResultsInForwardedPacket() throws Exception { Configuration.getInstance().remove(Configuration.CONFIGURATION_LOCAL_DOMAIN_CHECKER); Configuration.getInstance().putProperty(Configuration.CONFIGURATION_SERVER_DOMAIN, "shakespeare.lit"); Assert.assertEquals(new JID("channels.shakespeare.lit"), request.getTo()); request.getElement().element("pubsub").element("publish").addAttribute("node", "/user/romeo@barracks.lit/posts"); publish.process(element, jid, request, null); Assert.assertEquals(1, queue.size()); Packet response = queue.poll(); Assert.assertEquals(new JID("barracks.lit"), response.getTo()); } @Test public void unsubscribedUserCanNotPublish() throws Exception { NodeMembership membership = new NodeMembershipImpl(node, jid, Subscriptions.none, Affiliations.publisher, null); Mockito.when(channelManager.getNodeMembership(Mockito.eq(node), Mockito.eq(jid))).thenReturn(membership); publish.process(element, jid, request, null); Packet response = queue.poll(); PacketError error = response.getError(); Assert.assertNotNull(error); Assert.assertEquals(PacketError.Type.auth, error.getType()); Assert.assertEquals(PacketError.Condition.forbidden, error.getCondition()); } @Test public void pendingSubscriptionCanNotPublish() throws Exception { NodeMembership membership = new NodeMembershipImpl(node, jid, Subscriptions.pending, Affiliations.publisher, null); Mockito.when(channelManager.getNodeMembership(Mockito.eq(node), Mockito.eq(jid))).thenReturn(membership); publish.process(element, jid, request, null); Packet response = queue.poll(); PacketError error = response.getError(); Assert.assertNotNull(error); Assert.assertEquals(PacketError.Type.auth, error.getType()); Assert.assertEquals(PacketError.Condition.forbidden, error.getCondition()); } @Test public void noAffiliationCanNotPublish() throws Exception { NodeMembership membership = new NodeMembershipImpl(node, jid, Subscriptions.subscribed, Affiliations.none, null); Mockito.when(channelManager.getNodeMembership(Mockito.eq(node), Mockito.eq(jid))).thenReturn(membership); publish.process(element, jid, request, null); Packet response = queue.poll(); PacketError error = response.getError(); Assert.assertNotNull(error); Assert.assertEquals(PacketError.Type.auth, error.getType()); Assert.assertEquals(PacketError.Condition.forbidden, error.getCondition()); } @Test public void memberAffiliationCanNotPublish() throws Exception { NodeMembership membership = new NodeMembershipImpl(node, jid, Subscriptions.subscribed, Affiliations.member, null); Mockito.when(channelManager.getNodeMembership(Mockito.eq(node), Mockito.eq(jid))).thenReturn(membership); publish.process(element, jid, request, null); Packet response = queue.poll(); PacketError error = response.getError(); Assert.assertNotNull(error); Assert.assertEquals(PacketError.Type.auth, error.getType()); Assert.assertEquals(PacketError.Condition.forbidden, error.getCondition()); } @Test public void outcastAffiliationCanNotPublish() throws Exception { NodeMembership membership = new NodeMembershipImpl(node, jid, Subscriptions.subscribed, Affiliations.outcast, null); Mockito.when(channelManager.getNodeMembership(Mockito.eq(node), Mockito.eq(jid))).thenReturn(membership); publish.process(element, jid, request, null); Packet response = queue.poll(); PacketError error = response.getError(); Assert.assertNotNull(error); Assert.assertEquals(PacketError.Type.auth, error.getType()); Assert.assertEquals(PacketError.Condition.forbidden, error.getCondition()); } @Test public void noItemElementReturnsError() throws Exception { IQ request = this.request.createCopy(); request.getChildElement().element("publish").element("item").detach(); publish.process(element, jid, request, null); Packet response = queue.poll(); PacketError error = response.getError(); Assert.assertNotNull(error); Assert.assertEquals(PacketError.Type.modify, error.getType()); Assert.assertEquals(PacketError.Condition.bad_request, error.getCondition()); Assert.assertEquals(XMLConstants.ITEM_REQUIRED_ELEM, error.getApplicationConditionName()); } @Test public void invalidEntryReturnsError() throws Exception { String errorMessage = "errorMessage"; Mockito.when(validateEntry.isValid()).thenReturn(false); Mockito.when(validateEntry.getErrorMessage()).thenReturn(errorMessage); publish.process(element, jid, request, null); Packet response = queue.poll(); PacketError error = response.getError(); Assert.assertNotNull(error); Assert.assertEquals(PacketError.Type.modify, error.getType()); Assert.assertEquals(PacketError.Condition.bad_request, error.getCondition()); Assert.assertEquals(errorMessage, error.getApplicationConditionName()); } @Test public void itemIsSavedAsExpected() throws Exception { IQ request = this.request.createCopy(); publish.process(element, jid, request, null); Mockito.verify(channelManager, Mockito.times(1)).addNodeItem(Mockito.any(NodeItemImpl.class)); } @Test public void expectedSuccessResponseReceived() throws Exception { IQ request = this.request.createCopy(); publish.process(element, jid, request, null); IQ response = (IQ) queue.poll(); Assert.assertEquals(IQ.Type.result, response.getType()); Assert.assertEquals(request.getFrom(), response.getTo()); Assert.assertEquals(request.getTo(), response.getFrom()); Element pubsub = response.getElement().element("pubsub"); Assert.assertEquals(JabberPubsub.NAMESPACE_URI, pubsub.getNamespaceURI()); Element publish = pubsub.element("publish"); Assert.assertEquals(node, publish.attributeValue("node")); Element item = publish.element("item"); Assert.assertNotNull(item); Assert.assertTrue(item.attributeValue("id").length() > 0); Assert.assertTrue(GlobalItemIDImpl.isGlobalId(item.attributeValue("id"))); } @Test public void sendsOutExpectedNotifications() throws Exception { NodeSubscription subscriber1 = new NodeSubscriptionImpl(node, new JID("romeo@shakespeare.lit"), Subscriptions.subscribed, null); // Expect not to see this user (subscription: 'pending') NodeSubscription subscriber2 = new NodeSubscriptionImpl(node, new JID("titania@shakespeare.lit"), Subscriptions.pending, null); NodeSubscription subscriber3 = new NodeSubscriptionImpl(node, new JID("faustus@marlowe.lit"), new JID("channels.marlowe.lit"), Subscriptions.subscribed, null); ArrayList<NodeSubscription> subscribers = new ArrayList<NodeSubscription>(); subscribers.add(subscriber1); subscribers.add(subscriber2); subscribers.add(subscriber3); Mockito.when(channelManager.getNodeSubscriptionListeners(Mockito.eq(node))).thenReturn(new ResultSetImpl<NodeSubscription>(subscribers)); IQ request = this.request.createCopy(); publish.process(element, jid, request, null); Assert.assertEquals(5, queue.size()); queue.poll(); Message notification = (Message) queue.poll(); Assert.assertEquals(Message.Type.headline, notification.getType()); Assert.assertEquals(subscriber1.getUser(), notification.getTo()); Assert.assertEquals(server, notification.getFrom().toString()); Element event = notification.getElement().element("event"); Assert.assertEquals(JabberPubsub.NS_PUBSUB_EVENT, event.getNamespaceURI()); Element items = event.element("items"); Assert.assertEquals(node, items.attributeValue("node")); Element item = items.element("item"); Assert.assertTrue(item.attributeValue("id").length() > 0); Assert.assertTrue(GlobalItemIDImpl.isGlobalId(item.attributeValue("id"))); Element responseEntry = item.element("entry"); Assert.assertEquals(entry.asXML(), responseEntry.asXML()); notification = (Message) queue.poll(); Assert.assertEquals(subscriber3.getListener(), notification.getTo()); notification = (Message) queue.poll(); Assert.assertEquals(new JID("user1@server1"), notification.getTo()); notification = (Message) queue.poll(); Assert.assertEquals(new JID("user2@server1"), notification.getTo()); } @Test public void inReplyToIdIsSavedToDatabase() throws Exception { IQ request = readStanzaAsIq("/iq/pubsub/publish/reply.stanza"); Mockito.when(validateEntry.getPayload()).thenReturn(request.getChildElement().element("publish").element("item").element("entry")); Mockito.when(validateEntry.getInReplyTo()).thenReturn( GlobalItemIDImpl.toLocalId(request.getChildElement().element("publish").element("item").element("entry").element("in-reply-to") .attributeValue("ref"))); publish.process(element, jid, request, null); Assert.assertEquals(IQ.Type.result, ((IQ) queue.poll()).getType()); ArgumentCaptor<NodeItemImpl> argument = ArgumentCaptor.forClass(NodeItemImpl.class); Mockito.verify(channelManager, Mockito.times(1)).addNodeItem(argument.capture()); Assert.assertEquals("fc362eb42085f017ed9ccd9c4004b095", argument.getValue().getInReplyTo()); Assert.assertEquals(node, argument.getValue().getNodeId()); } @Test public void replyUpdatesThreadParentDate() throws Exception { IQ request = readStanzaAsIq("/iq/pubsub/publish/reply.stanza"); Mockito.when(validateEntry.getPayload()).thenReturn(request.getChildElement().element("publish").element("item").element("entry")); Mockito.when(validateEntry.getInReplyTo()).thenReturn("fc362eb42085f017ed9ccd9c4004b095"); publish.process(element, jid, request, null); Assert.assertEquals(IQ.Type.result, ((IQ) queue.poll()).getType()); ArgumentCaptor<String> inReplyTo = ArgumentCaptor.forClass(String.class); ArgumentCaptor<String> passedNode = ArgumentCaptor.forClass(String.class); Mockito.verify(channelManager, Mockito.times(1)).updateThreadParent(passedNode.capture(), inReplyTo.capture()); Assert.assertEquals("fc362eb42085f017ed9ccd9c4004b095", inReplyTo.getValue()); Assert.assertEquals(node, passedNode.getValue()); } @Test public void doesNotUpdateParentThreadIfNotReply() throws Exception { IQ request = this.request.createCopy(); publish.process(element, jid, request, null); Mockito.verify(channelManager, Mockito.times(0)).updateThreadParent(Mockito.anyString(), Mockito.anyString()); } }
apache-2.0
neykov/incubator-brooklyn
utils/common/src/main/java/brooklyn/util/javalang/StackTraceSimplifier.java
6792
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package brooklyn.util.javalang; import java.io.PrintWriter; import java.io.StringWriter; import java.util.Arrays; import java.util.Collection; import java.util.LinkedList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import brooklyn.util.text.Strings; import com.google.common.collect.ImmutableSet; /** * Utility class for cleaning up stacktraces. */ public class StackTraceSimplifier { private static final Logger log = LoggerFactory.getLogger(StackTraceSimplifier.class); /** comma-separated prefixes (not regexes) */ public static final String DEFAULT_BLACKLIST_SYSTEM_PROPERTY_NAME = "brooklyn.util.javalang.StackTraceSimplifier.blacklist"; /** @deprecated since 0.6.0 use {@link #DEFAULT_BLACKLIST_SYSTEM_PROPERTY_NAME} */ @Deprecated public static final String LEGACY_DEFAULT_BLACKLIST_SYSTEM_PROPERTY_NAME = "groovy.sanitized.stacktraces"; private static final Collection<String> DEFAULT_BLACKLIST; static { ImmutableSet.Builder<String> blacklist = ImmutableSet.builder(); blacklist.addAll(Arrays.asList( System.getProperty(DEFAULT_BLACKLIST_SYSTEM_PROPERTY_NAME, "java.," + "javax.," + "sun.," + "groovy.," + "org.codehaus.groovy.," + "gjdk.groovy.," ).split("(\\s|,)+"))); String legacyDefaults = System.getProperty(LEGACY_DEFAULT_BLACKLIST_SYSTEM_PROPERTY_NAME); if (Strings.isNonBlank(legacyDefaults)) { log.warn("Detected ude of legacy system property "+LEGACY_DEFAULT_BLACKLIST_SYSTEM_PROPERTY_NAME); blacklist.addAll(Arrays.asList(legacyDefaults.split("(\\s|,)+"))); } DEFAULT_BLACKLIST = blacklist.build(); } private static final StackTraceSimplifier DEFAULT_INSTACE = newInstance(); private final Collection<String> blacklist; protected StackTraceSimplifier() { this(true); } protected StackTraceSimplifier(boolean includeDefaultBlacklist, String ...packages) { ImmutableSet.Builder<String> blacklistB = ImmutableSet.builder(); if (includeDefaultBlacklist) blacklistB.addAll(DEFAULT_BLACKLIST); blacklistB.add(packages); blacklist = blacklistB.build(); } public static StackTraceSimplifier newInstance() { return new StackTraceSimplifier(); } public static StackTraceSimplifier newInstance(String ...additionalBlacklistPackagePrefixes) { return new StackTraceSimplifier(true, additionalBlacklistPackagePrefixes); } public static StackTraceSimplifier newInstanceExcludingOnly(String ...blacklistPackagePrefixes) { return new StackTraceSimplifier(false, blacklistPackagePrefixes); } /** @return whether the given element is useful, that is, not in the blacklist */ public boolean isUseful(StackTraceElement el) { for (String s: blacklist){ if (el.getClassName().startsWith(s)) return false;; // gets underscores in some contexts ? if (el.getClassName().replace('_', '.').startsWith(s)) return false; } return true; } /** @return new list containing just the {@link #isUseful(StackTraceElement)} stack trace elements */ public List<StackTraceElement> clean(Iterable<StackTraceElement> st) { List<StackTraceElement> result = new LinkedList<StackTraceElement>(); for (StackTraceElement element: st){ if (isUseful(element)){ result.add(element); } } return result; } /** @return new array containing just the {@link #isUseful(StackTraceElement)} stack trace elements */ public StackTraceElement[] clean(StackTraceElement[] st) { List<StackTraceElement> result = clean(Arrays.asList(st)); return result.toArray(new StackTraceElement[result.size()]); } /** @return first {@link #isUseful(StackTraceElement)} stack trace elements, or null */ public StackTraceElement firstUseful(StackTraceElement[] st) { return nthUseful(0, st); } /** @return (n+1)th {@link #isUseful(StackTraceElement)} stack trace elements (ie 0 is {@link #firstUseful(StackTraceElement[])}), or null */ public StackTraceElement nthUseful(int n, StackTraceElement[] st) { for (StackTraceElement element: st){ if (isUseful(element)) { if (n==0) return element; n--; } } return null; } /** {@link #clean(StackTraceElement[])} the given throwable instance, returning the same instance for convenience */ public <T extends Throwable> T cleaned(T t) { t.setStackTrace(clean(t.getStackTrace())); return t; } // ---- statics /** static convenience for {@link #isUseful(StackTraceElement)} */ public static boolean isStackTraceElementUseful(StackTraceElement el) { return DEFAULT_INSTACE.isUseful(el); } /** static convenience for {@link #clean(Iterable)} */ public static List<StackTraceElement> cleanStackTrace(Iterable<StackTraceElement> st) { return DEFAULT_INSTACE.clean(st); } /** static convenience for {@link #clean(StackTraceElement[])} */ public static StackTraceElement[] cleanStackTrace(StackTraceElement[] st) { return DEFAULT_INSTACE.clean(st); } /** static convenience for {@link #cleaned(Throwable)} */ public static <T extends Throwable> T cleanedStackTrace(T t) { return DEFAULT_INSTACE.cleaned(t); } public static String toString(Throwable t) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); t.printStackTrace(pw); return sw.getBuffer().toString(); } }
apache-2.0
wardev/orekit
src/main/java/org/orekit/propagation/events/DihedralFieldOfViewDetector.java
9006
/* Copyright 2002-2014 CS Systèmes d'Information * Licensed to CS Systèmes d'Information (CS) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * CS licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.orekit.propagation.events; import org.apache.commons.math3.geometry.euclidean.threed.Vector3D; import org.apache.commons.math3.util.FastMath; import org.orekit.errors.OrekitException; import org.orekit.propagation.SpacecraftState; import org.orekit.propagation.events.handlers.EventHandler; import org.orekit.propagation.events.handlers.StopOnDecreasing; import org.orekit.utils.PVCoordinatesProvider; /** Finder for body entering/exiting dihedral FOV events. * <p>This class finds dihedral field of view events (i.e. body entry and exit in FOV).</p> * <p>The default implementation behavior is to {@link * org.orekit.propagation.events.handlers.EventHandler.Action#CONTINUE continue} * propagation at entry and to {@link * org.orekit.propagation.events.handlers.EventHandler.Action#STOP stop} propagation * at exit. This can be changed by calling * {@link #withHandler(EventHandler)} after construction.</p> * @see org.orekit.propagation.Propagator#addEventDetector(EventDetector) * @see CircularFieldOfViewDetector * @author V&eacute;ronique Pommier-Maurussane */ public class DihedralFieldOfViewDetector extends AbstractDetector<DihedralFieldOfViewDetector> { /** Serializable UID. */ private static final long serialVersionUID = 20131118L; /** Position/velocity provider of the considered target. */ private final PVCoordinatesProvider targetPVProvider; /** Direction of the FOV center. */ private final Vector3D center; /** FOV dihedral axis 1. */ private final Vector3D axis1; /** FOV normal to first center plane. */ private final Vector3D normalCenterPlane1; /** FOV dihedral half aperture angle 1. */ private final double halfAperture1; /** FOV dihedral axis 2. */ private final Vector3D axis2; /** FOV normal to second center plane. */ private final Vector3D normalCenterPlane2; /** FOV dihedral half aperture angle 2. */ private final double halfAperture2; /** Build a new instance. * <p>The maximal interval between distance to FOV boundary checks should * be smaller than the half duration of the minimal pass to handle, * otherwise some short passes could be missed.</p> * @param maxCheck maximal interval in seconds * @param pvTarget Position/velocity provider of the considered target * @param center Direction of the FOV center * @param axis1 FOV dihedral axis 1 * @param halfAperture1 FOV dihedral half aperture angle 1 * @param axis2 FOV dihedral axis 2 * @param halfAperture2 FOV dihedral half aperture angle 2 */ public DihedralFieldOfViewDetector(final double maxCheck, final PVCoordinatesProvider pvTarget, final Vector3D center, final Vector3D axis1, final double halfAperture1, final Vector3D axis2, final double halfAperture2) { this(maxCheck, 1.0e-3, DEFAULT_MAX_ITER, new StopOnDecreasing<DihedralFieldOfViewDetector>(), pvTarget, center, axis1, halfAperture1, axis2, halfAperture2); } /** Private constructor with full parameters. * <p> * This constructor is private as users are expected to use the builder * API with the various {@code withXxx()} methods to set up the instance * in a readable manner without using a huge amount of parameters. * </p> * @param maxCheck maximum checking interval (s) * @param threshold convergence threshold (s) * @param maxIter maximum number of iterations in the event time search * @param handler event handler to call at event occurrences * @param pvTarget Position/velocity provider of the considered target * @param center Direction of the FOV center * @param axis1 FOV dihedral axis 1 * @param halfAperture1 FOV dihedral half aperture angle 1 * @param axis2 FOV dihedral axis 2 * @param halfAperture2 FOV dihedral half aperture angle 2 * @since 6.1 */ private DihedralFieldOfViewDetector(final double maxCheck, final double threshold, final int maxIter, final EventHandler<DihedralFieldOfViewDetector> handler, final PVCoordinatesProvider pvTarget, final Vector3D center, final Vector3D axis1, final double halfAperture1, final Vector3D axis2, final double halfAperture2) { super(maxCheck, threshold, maxIter, handler); this.targetPVProvider = pvTarget; this.center = center; // Computation of the center plane normal for dihedra 1 this.axis1 = axis1; this.normalCenterPlane1 = Vector3D.crossProduct(axis1, center); // Computation of the center plane normal for dihedra 2 this.axis2 = axis2; this.normalCenterPlane2 = Vector3D.crossProduct(axis2, center); this.halfAperture1 = halfAperture1; this.halfAperture2 = halfAperture2; } /** {@inheritDoc} */ @Override protected DihedralFieldOfViewDetector create(final double newMaxCheck, final double newThreshold, final int newMaxIter, final EventHandler<DihedralFieldOfViewDetector> newHandler) { return new DihedralFieldOfViewDetector(newMaxCheck, newThreshold, newMaxIter, newHandler, targetPVProvider, center, axis1, halfAperture1, axis2, halfAperture2); } /** Get the position/velocity provider of the target . * @return the position/velocity provider of the target */ public PVCoordinatesProvider getPVTarget() { return targetPVProvider; } /** Get the direction of FOV center. * @return the direction of FOV center */ public Vector3D getCenter() { return center; } /** Get the direction of FOV 1st dihedral axis. * @return the direction of FOV 1st dihedral axis */ public Vector3D getAxis1() { return axis1; } /** Get the half aperture angle of FOV 1st dihedra. * @return the half aperture angle of FOV 1st dihedras */ public double getHalfAperture1() { return halfAperture1; } /** Get the half aperture angle of FOV 2nd dihedra. * @return the half aperture angle of FOV 2nd dihedras */ public double getHalfAperture2() { return halfAperture2; } /** Get the direction of FOV 2nd dihedral axis. * @return the direction of FOV 2nd dihedral axis */ public Vector3D getAxis2() { return axis2; } /** {@inheritDoc} * g function value is the target signed distance to the closest FOV boundary. * It is positive inside the FOV, and negative outside. */ public double g(final SpacecraftState s) throws OrekitException { // Get position of target at current date in spacecraft frame. final Vector3D targetPosInert = new Vector3D(1, targetPVProvider.getPVCoordinates(s.getDate(), s.getFrame()).getPosition(), -1, s.getPVCoordinates().getPosition()); final Vector3D targetPosSat = s.getAttitude().getRotation().applyTo(targetPosInert); // Compute the four angles from the four FOV boundaries. final double angle1 = FastMath.atan2(Vector3D.dotProduct(targetPosSat, normalCenterPlane1), Vector3D.dotProduct(targetPosSat, center)); final double angle2 = FastMath.atan2(Vector3D.dotProduct(targetPosSat, normalCenterPlane2), Vector3D.dotProduct(targetPosSat, center)); // g function value is distance to the FOV boundary, computed as a dihedral angle. // It is positive inside the FOV, and negative outside. return FastMath.min(halfAperture1 - FastMath.abs(angle1) , halfAperture2 - FastMath.abs(angle2)); } }
apache-2.0
blue7zz/BlueDemo
permission/src/test/java/top/blue7/permission/ExampleUnitTest.java
398
package top.blue7.permission; import org.junit.Test; import static org.junit.Assert.*; /** * Example local unit test, which will execute on the development machine (host). * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ public class ExampleUnitTest { @Test public void addition_isCorrect() throws Exception { assertEquals(4, 2 + 2); } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-connect/src/main/java/com/amazonaws/services/connect/model/transform/CreateUseCaseResultJsonUnmarshaller.java
3047
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.connect.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.connect.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * CreateUseCaseResult JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CreateUseCaseResultJsonUnmarshaller implements Unmarshaller<CreateUseCaseResult, JsonUnmarshallerContext> { public CreateUseCaseResult unmarshall(JsonUnmarshallerContext context) throws Exception { CreateUseCaseResult createUseCaseResult = new CreateUseCaseResult(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return createUseCaseResult; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("UseCaseId", targetDepth)) { context.nextToken(); createUseCaseResult.setUseCaseId(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("UseCaseArn", targetDepth)) { context.nextToken(); createUseCaseResult.setUseCaseArn(context.getUnmarshaller(String.class).unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return createUseCaseResult; } private static CreateUseCaseResultJsonUnmarshaller instance; public static CreateUseCaseResultJsonUnmarshaller getInstance() { if (instance == null) instance = new CreateUseCaseResultJsonUnmarshaller(); return instance; } }
apache-2.0
sdgdsffdsfff/appleframework
apple-distributed/src/test/java/com/appleframework/distributed/id/snowflake/IdProviderTest.java
640
package com.appleframework.distributed.id.snowflake; import com.appleframework.distributed.id.snowflake.IdProvider; import com.appleframework.distributed.id.snowflake.IdProviderFactory; public class IdProviderTest { public static void main(String[] args) throws Exception { IdProviderFactory idProviderFactory = new IdProviderFactory("localhost:2181"); final IdProvider idProvider1 = idProviderFactory.getProvider(); final IdProvider idProvider2 = idProviderFactory.getProvider(); for (int i = 0; i < 1000; i++) { System.out.println(idProvider1.getId()); System.out.println(idProvider2.getId()); } } }
apache-2.0
bingoogolapple/J2EENote
algorithmhome/src/main/java/com/bingoogol/algorithmhome/dao/UserInfoDao.java
177
package com.bingoogol.algorithmhome.dao; public interface UserInfoDao { public int plusPrice(String sellerid, int price); public int minusPrice(String buyerid, int price); }
apache-2.0
jluisred/HotSpots
src/fr/eurecom/hotspots/web/main.java
1105
package fr.eurecom.hotspots.web; import java.io.File; import java.io.IOException; import java.util.List; import org.apache.commons.io.FileUtils; import com.googlecode.mp4parser.authoring.tracks.TextTrackImpl.Line; import fr.eurecom.hotspots.core.HotSpotGenerator; import fr.eurecom.hotspots.datastructures.Timeline; public class main { public static void main(String[] args) { String subtitleString = null; String chapterString = null; //Parse Chapters from TED try { subtitleString = FileUtils.readFileToString(new File("./alanna_shaikh/alanna_shaikh.srt")); chapterString = FileUtils.readFileToString(new File ("./alanna_shaikh/alanna_shaikh.ch")); System.out.println(chapterString); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } HotSpotGenerator hpG = new HotSpotGenerator ("http://linkedtv.eurecom.fr/video/6c75499e-d840-4914-ad2b-d5ff9511c7f7", false, subtitleString, chapterString, "6c75499e-d840-4914-ad2b-d5ff9511c7f7"); String resultsjson = hpG.generate(); System.out.println(resultsjson); } }
apache-2.0
LorenzReinhart/ONOSnew
providers/ietfte/tunnel/src/main/java/org/onosproject/provider/te/tunnel/TeTunnelRestconfProvider.java
14517
/* * Copyright 2016-present Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.provider.te.tunnel; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.onosproject.incubator.net.tunnel.Tunnel; import org.onosproject.incubator.net.tunnel.TunnelDescription; import org.onosproject.incubator.net.tunnel.TunnelId; import org.onosproject.incubator.net.tunnel.TunnelProvider; import org.onosproject.incubator.net.tunnel.TunnelProviderRegistry; import org.onosproject.net.DeviceId; import org.onosproject.net.ElementId; import org.onosproject.net.Path; import org.onosproject.net.provider.AbstractProvider; import org.onosproject.net.provider.ProviderId; import org.onosproject.protocol.restconf.RestConfSBController; import org.onosproject.protocol.restconf.RestconfNotificationEventListener; import org.onosproject.provider.te.utils.DefaultJsonCodec; import org.onosproject.provider.te.utils.YangCompositeEncodingImpl; import org.onosproject.tetopology.management.api.TeTopology; import org.onosproject.tetopology.management.api.TeTopologyKey; import org.onosproject.tetopology.management.api.TeTopologyService; import org.onosproject.tetunnel.api.TeTunnelProviderService; import org.onosproject.tetunnel.api.TeTunnelService; import org.onosproject.tetunnel.api.tunnel.DefaultTeTunnel; import org.onosproject.tetunnel.api.tunnel.TeTunnel; import org.onosproject.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.te.rev20160705.IetfTe; import org.onosproject.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.te.rev20160705.ietfte.tunnelsgrouping.Tunnels; import org.onosproject.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.te.types.rev20160705.IetfTeTypes; import org.onosproject.yms.ych.YangCodecHandler; import org.onosproject.yms.ych.YangCompositeEncoding; import org.onosproject.yms.ych.YangProtocolEncodingFormat; import org.onosproject.yms.ymsm.YmsService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.util.List; import java.util.Optional; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static org.onosproject.provider.te.utils.CodecTools.jsonToString; import static org.onosproject.provider.te.utils.CodecTools.toJson; import static org.onosproject.tetopology.management.api.TeTopology.BIT_MERGED; import static org.onosproject.teyang.utils.tunnel.TunnelConverter.buildIetfTe; import static org.onosproject.teyang.utils.tunnel.TunnelConverter.yang2TeTunnel; import static org.onosproject.yms.ych.YangProtocolEncodingFormat.JSON; import static org.onosproject.yms.ych.YangResourceIdentifierType.URI; import static org.onosproject.yms.ydt.YmsOperationType.EDIT_CONFIG_REQUEST; import static org.onosproject.yms.ydt.YmsOperationType.QUERY_REPLY; /** * Provider which uses RESTCONF to do cross-domain tunnel creation/deletion/ * update/deletion and so on operations on the domain networks. */ @Component(immediate = true) public class TeTunnelRestconfProvider extends AbstractProvider implements TunnelProvider { private final Logger log = LoggerFactory.getLogger(getClass()); private static final String SCHEMA = "ietf"; private static final String IETF = "ietf"; private static final String TE = "te"; private static final int DEFAULT_INDEX = 1; private static final String TUNNELS = "tunnels"; private static final String TUNNELS_URL = IETF + ":" + TE + "/" + TUNNELS; private static final String IETF_NOTIFICATION_URI = "netconf"; private static final String MEDIA_TYPE_JSON = "json"; private static final String SHOULD_IN_ONE = "Tunnel should be setup in one topo"; private static final String PROVIDER_ID = "org.onosproject.provider.ietf"; private static final String RESTCONF_ROOT = "/onos/restconf"; private static final String TE_TUNNEL_KEY = "TeTunnelKey"; //private final RestconfNotificationEventListener listener = // new InternalTunnelNotificationListener(); @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected RestConfSBController controller; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected YmsService ymsService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected TeTunnelService tunnelService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected TeTunnelProviderService providerService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected TeTopologyService topologyService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected TunnelProviderRegistry tunnelProviderRegistry; private YangCodecHandler codecHandler; @Activate public void activate() { tunnelProviderRegistry.register(this); codecHandler = ymsService.getYangCodecHandler(); codecHandler.addDeviceSchema(IetfTe.class); codecHandler.addDeviceSchema(IetfTeTypes.class); codecHandler.registerOverriddenCodec(new DefaultJsonCodec(ymsService), YangProtocolEncodingFormat.JSON); collectInitialTunnels(); subscribe(); log.info("Started"); } @Deactivate public void deactivate() { tunnelProviderRegistry.unregister(this); unsubscribe(); log.info("Stopped"); } public TeTunnelRestconfProvider() { super(new ProviderId(SCHEMA, PROVIDER_ID)); } private void collectInitialTunnels() { for (DeviceId deviceId : controller.getDevices().keySet()) { ObjectNode jsonNodes = executeGetRequest(deviceId); if (jsonNodes == null) { continue; } ObjectNode tunnelsNode = (ObjectNode) jsonNodes.get(TUNNELS); if (tunnelsNode == null) { continue; } Tunnels teTunnels = getYangTunnelsObject(tunnelsNode); if (teTunnels == null) { continue; } updateTeTunnels(teTunnels); } } private void subscribe() { for (DeviceId deviceId : controller.getDevices().keySet()) { try { if (!controller.isNotificationEnabled(deviceId)) { controller.enableNotifications(deviceId, IETF_NOTIFICATION_URI, "application/json", new InternalTunnelNotificationListener()); } else { controller.addNotificationListener(deviceId, new InternalTunnelNotificationListener()); } } catch (Exception e) { log.error("Failed to subscribe for {} : {}", deviceId, e.getMessage()); } } } private void unsubscribe() { controller.getDevices() .keySet() .forEach(deviceId -> controller .removeNotificationListener(deviceId, new InternalTunnelNotificationListener())); } @Override public void setupTunnel(Tunnel tunnel, Path path) { TeTunnel teTunnel = tunnelService.getTeTunnel(tunnel.tunnelId()); long tid = teTunnel.srcNode().topologyId(); checkState(tid == teTunnel.dstNode().topologyId(), SHOULD_IN_ONE); setupTunnel(getOwnDevice(tid), tunnel, path); } @Override public void setupTunnel(ElementId srcElement, Tunnel tunnel, Path path) { if (!tunnel.annotations().keys().contains(TE_TUNNEL_KEY)) { log.warn("No tunnel key info in tunnel {}", tunnel); return; } String teTunnelKey = tunnel.annotations().value(TE_TUNNEL_KEY); Optional<TeTunnel> optTunnel = tunnelService.getTeTunnels() .stream() .filter(t -> t.teTunnelKey().toString().equals(teTunnelKey)) .findFirst(); if (!optTunnel.isPresent()) { log.warn("No te tunnel map to tunnel {}", tunnel); return; } IetfTe ietfTe = buildIetfTe(optTunnel.get(), true); YangCompositeEncoding encoding = codecHandler. encodeCompositeOperation(RESTCONF_ROOT, null, ietfTe, JSON, EDIT_CONFIG_REQUEST); String identifier = encoding.getResourceIdentifier(); String resourceInformation = encoding.getResourceInformation(); if (srcElement == null) { log.error("Can't find remote device for tunnel : {}", tunnel); return; } controller.post((DeviceId) srcElement, identifier, new ByteArrayInputStream(resourceInformation.getBytes()), MEDIA_TYPE_JSON, ObjectNode.class); } @Override public void releaseTunnel(Tunnel tunnel) { //TODO implement release tunnel method } @Override public void releaseTunnel(ElementId srcElement, Tunnel tunnel) { //TODO implement release tunnel with src method } @Override public void updateTunnel(Tunnel tunnel, Path path) { //TODO implement update tunnel method } @Override public void updateTunnel(ElementId srcElement, Tunnel tunnel, Path path) { //TODO implement update tunnel with src method } @Override public TunnelId tunnelAdded(TunnelDescription tunnel) { //TODO implement tunnel add method when te tunnel app merged to core return null; } @Override public void tunnelRemoved(TunnelDescription tunnel) { //TODO implement tunnel remove method when te tunnel app merged to core } @Override public void tunnelUpdated(TunnelDescription tunnel) { //TODO implement tunnel update method when te tunnel app merged to core } @Override public Tunnel tunnelQueryById(TunnelId tunnelId) { return null; } private ObjectNode executeGetRequest(DeviceId deviceId) { //the request url is ietf-te:te/tunnels //the response node will begin with tunnels //be careful here to when get the tunnels data InputStream resultStream = controller.get(deviceId, TUNNELS_URL, MEDIA_TYPE_JSON); return toJson(resultStream); } private Tunnels getYangTunnelsObject(ObjectNode tunnelsNode) { checkNotNull(tunnelsNode, "Input object node should not be null"); YangCompositeEncoding yce = new YangCompositeEncodingImpl(URI, TUNNELS_URL, jsonToString(tunnelsNode)); Object yo = codecHandler.decode(yce, JSON, QUERY_REPLY); if (yo == null) { log.error("YMS decoder returns null"); return null; } IetfTe ietfTe = null; Tunnels tunnels = null; if (yo instanceof List) { List<Object> list = (List<Object>) yo; ietfTe = (IetfTe) list.get(DEFAULT_INDEX); } if (ietfTe != null && ietfTe.te() != null) { tunnels = ietfTe.te().tunnels(); } return tunnels; } private void updateTeTunnels(Tunnels tunnels) { TeTopologyKey key = getTopologyKey(); tunnels.tunnel().forEach(tunnel -> { DefaultTeTunnel teTunnel = yang2TeTunnel(tunnel, key); providerService.updateTeTunnel(teTunnel); }); } private TeTopologyKey getTopologyKey() { TeTopologyKey key = null; Optional<TeTopology> teTopology = topologyService.teTopologies() .teTopologies() .values() .stream() .filter(topology -> topology.flags().get(BIT_MERGED)) .findFirst(); if (teTopology.isPresent()) { TeTopology topology = teTopology.get(); key = topology.teTopologyId(); } return key; } private DeviceId getOwnDevice(long topologyId) { DeviceId deviceId = null; Optional<TeTopology> topoOpt = topologyService.teTopologies() .teTopologies() .values() .stream() .filter(tp -> tp.teTopologyId().topologyId() == topologyId) .findFirst(); if (topoOpt.isPresent()) { deviceId = topoOpt.get().ownerId(); } return deviceId; } private class InternalTunnelNotificationListener implements RestconfNotificationEventListener { @Override public void handleNotificationEvent(DeviceId deviceId, Object eventJsonString) { ObjectNode response = toJson((String) eventJsonString); if (response == null) { return; } JsonNode teNode = response.get(TE); if (teNode == null) { log.error("Illegal te json object from {}", deviceId); return; } JsonNode tunnelsNode = teNode.get(TUNNELS); if (tunnelsNode == null) { log.error("Illegal tunnel json object from {}", deviceId); return; } Tunnels tunnels = getYangTunnelsObject((ObjectNode) tunnelsNode); if (tunnels == null) { return; } updateTeTunnels(tunnels); } } }
apache-2.0
aws/aws-sdk-java-v2
services-custom/dynamodb-enhanced/src/main/java/software/amazon/awssdk/enhanced/dynamodb/internal/AttributeConfiguration.java
1882
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package software.amazon.awssdk.enhanced.dynamodb.internal; import software.amazon.awssdk.annotations.SdkInternalApi; /** * Internal configuration for attribute */ @SdkInternalApi public final class AttributeConfiguration { private final boolean preserveEmptyObject; private final boolean ignoreNulls; public AttributeConfiguration(Builder builder) { this.preserveEmptyObject = builder.preserveEmptyObject; this.ignoreNulls = builder.ignoreNulls; } public boolean preserveEmptyObject() { return preserveEmptyObject; } public boolean ignoreNulls() { return ignoreNulls; } public static Builder builder() { return new Builder(); } public static final class Builder { private boolean preserveEmptyObject; private boolean ignoreNulls; private Builder() { } public Builder preserveEmptyObject(boolean preserveEmptyObject) { this.preserveEmptyObject = preserveEmptyObject; return this; } public Builder ignoreNulls(boolean ignoreNulls) { this.ignoreNulls = ignoreNulls; return this; } public AttributeConfiguration build() { return new AttributeConfiguration(this); } } }
apache-2.0
dadarom/dubbo
dubbo-remoting/dubbo-remoting-netty4/src/main/java/com/alibaba/dubbo/remoting/transport/netty4/logging/FormattingTuple.java
2138
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.dubbo.remoting.transport.netty4.logging; /** * Holds the results of formatting done by {@link MessageFormatter}. */ class FormattingTuple { static final FormattingTuple NULL = new FormattingTuple(null); private final String message; private final Throwable throwable; private final Object[] argArray; FormattingTuple(String message) { this(message, null, null); } FormattingTuple(String message, Object[] argArray, Throwable throwable) { this.message = message; this.throwable = throwable; if (throwable == null) { this.argArray = argArray; } else { this.argArray = trimmedCopy(argArray); } } static Object[] trimmedCopy(Object[] argArray) { if (argArray == null || argArray.length == 0) { throw new IllegalStateException("non-sensical empty or null argument array"); } final int trimemdLen = argArray.length - 1; Object[] trimmed = new Object[trimemdLen]; System.arraycopy(argArray, 0, trimmed, 0, trimemdLen); return trimmed; } public String getMessage() { return message; } public Object[] getArgArray() { return argArray; } public Throwable getThrowable() { return throwable; } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-devicefarm/src/main/java/com/amazonaws/services/devicefarm/model/transform/CreateDevicePoolRequestProtocolMarshaller.java
2719
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.devicefarm.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.Request; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.devicefarm.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.protocol.*; import com.amazonaws.protocol.Protocol; import com.amazonaws.annotation.SdkInternalApi; /** * CreateDevicePoolRequest Marshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class CreateDevicePoolRequestProtocolMarshaller implements Marshaller<Request<CreateDevicePoolRequest>, CreateDevicePoolRequest> { private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.AWS_JSON).requestUri("/") .httpMethodName(HttpMethodName.POST).hasExplicitPayloadMember(false).hasPayloadMembers(true) .operationIdentifier("DeviceFarm_20150623.CreateDevicePool").serviceName("AWSDeviceFarm").build(); private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory; public CreateDevicePoolRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) { this.protocolFactory = protocolFactory; } public Request<CreateDevicePoolRequest> marshall(CreateDevicePoolRequest createDevicePoolRequest) { if (createDevicePoolRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { final ProtocolRequestMarshaller<CreateDevicePoolRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(SDK_OPERATION_BINDING, createDevicePoolRequest); protocolMarshaller.startMarshalling(); CreateDevicePoolRequestMarshaller.getInstance().marshall(createDevicePoolRequest, protocolMarshaller); return protocolMarshaller.finishMarshalling(); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
UCL/EIDP-4
eidpdata/src/main/java/uk/ac/ucl/eidp/data/NodeQualifier.java
823
package uk.ac.ucl.eidp.data; import static java.lang.annotation.ElementType.FIELD; import static java.lang.annotation.ElementType.METHOD; import static java.lang.annotation.ElementType.PARAMETER; import static java.lang.annotation.ElementType.TYPE; import static java.lang.annotation.RetentionPolicy.RUNTIME; import java.lang.annotation.Retention; import java.lang.annotation.Target; import javax.inject.Qualifier; /** * * @author David Guzman {@literal d.guzman at ucl.ac.uk} */ @Qualifier @Retention(RUNTIME) @Target( {TYPE, METHOD, FIELD, PARAMETER} ) public @interface NodeQualifier { /** * The NodeType value will tell {@link StrategyResolver} how to connect to a database. * @return {@link NodeType} the type of database node to connect to (pool, remote EIDP, jdbc) */ NodeType value(); }
apache-2.0
leonarduk/unison
src/main/java/uk/co/sleonard/unison/input/DataHibernatorWorker.java
4719
/** * DataHibernatorWorker.java * * Created on 24 October 2007, 18:10 * * To change this template, choose Tools | Template Manager and open the template in the editor. */ package uk.co.sleonard.unison.input; import java.util.ArrayList; import java.util.ListIterator; import java.util.concurrent.LinkedBlockingQueue; import org.apache.log4j.Logger; import org.hibernate.Session; import uk.co.sleonard.unison.UNISoNLogger; import uk.co.sleonard.unison.datahandling.HibernateHelper; /** * The Class DataHibernatorWorker. * * @author Stephen <github@leonarduk.com> * @since v1.0.0 * */ public class DataHibernatorWorker extends SwingWorker { /** The logger. */ private static Logger logger = Logger.getLogger("DataHibernatorWorker"); /** The number of hibernators. */ private static int numberofHibernators = 20; /** The log. */ private static UNISoNLogger log; /** The workers. */ private static ArrayList<DataHibernatorWorker> workers = new ArrayList<>(); /** The reader. */ private final NewsGroupReader reader; /** The save to database. */ private boolean saveToDatabase = true; private final HibernateHelper helper; private final LinkedBlockingQueue<NewsArticle> queue; private final Session session; /** * Sets the logger. * * @param logger * the new logger */ public static void setLogger(final UNISoNLogger logger) { DataHibernatorWorker.log = logger; } /** * Start hibernators. * * @param helper2 * @param queue2 * @param session2 */ public synchronized static void startHibernators(final NewsGroupReader nntpReader, final HibernateHelper helper2, final LinkedBlockingQueue<NewsArticle> queue2, final Session session2) { while (DataHibernatorWorker.workers.size() < DataHibernatorWorker.numberofHibernators) { DataHibernatorWorker.workers .add(new DataHibernatorWorker(nntpReader, helper2, queue2, session2)); } } /** * Stop download. */ static void stopDownload() { for (final ListIterator<DataHibernatorWorker> iter = DataHibernatorWorker.workers .listIterator(); iter.hasNext();) { iter.next().interrupt(); } } /** * Creates a new instance of DataHibernatorWorker. * * @param reader * the reader * @param helper2 * @param session */ private DataHibernatorWorker(final NewsGroupReader reader, final HibernateHelper helper2, final LinkedBlockingQueue<NewsArticle> queue, final Session session2) { super("DataHibernatorWorker"); this.helper = helper2; this.reader = reader; this.queue = queue; this.session = session2; DataHibernatorWorker.logger .debug("Creating " + this.getClass() + " " + reader.getNumberOfMessages()); this.start(); } /* * (non-Javadoc) * * @see uk.co.sleonard.unison.input.SwingWorker#construct() */ @Override public Object construct() { DataHibernatorWorker.logger .debug("construct : " + this.saveToDatabase + " queue " + this.queue.size()); try { // HAve one session per worker rather than per message while (this.saveToDatabase) { this.pollQueue(this.queue, this.session); // wait a second Thread.sleep(5000); // completed save so close down if (this.queue.isEmpty()) { this.saveToDatabase = false; } } DataHibernatorWorker.workers.remove(this); if (DataHibernatorWorker.workers.size() == 0) { DataHibernatorWorker.log.alert("Download complete"); } } catch (@SuppressWarnings("unused") final InterruptedException e) { return "Interrupted"; } return "Completed"; } /** * Poll for message. * * @param queue * the queue * @return the news article */ private synchronized NewsArticle pollForMessage(final LinkedBlockingQueue<NewsArticle> queue) { final NewsArticle article = queue.poll(); return article; } private void pollQueue(final LinkedBlockingQueue<NewsArticle> queue, final Session session) throws InterruptedException { while (!queue.isEmpty()) { if (Thread.interrupted()) { this.stopHibernatingData(); throw new InterruptedException(); } final NewsArticle article = this.pollForMessage(queue); if (null != article) { DataHibernatorWorker.logger .debug("Hibernating " + article.getArticleId() + " " + queue.size()); if (this.helper.hibernateData(article, session)) { this.reader.incrementMessagesStored(); } else { this.reader.incrementMessagesSkipped(); } this.reader.showDownloadStatus(); } } } /** * Stop hibernating data. */ private void stopHibernatingData() { DataHibernatorWorker.logger.warn("StopHibernatingData"); this.saveToDatabase = false; } }
apache-2.0
yangjiandong/MobileBase.G
MobileBase/src/main/java/com/ek/mobileapp/model/Entity.java
380
package com.ek.mobileapp.model; import java.io.Serializable; public abstract class Entity implements Serializable { protected int id; public int getId() { return id; } protected String cacheKey; public String getCacheKey() { return cacheKey; } public void setCacheKey(String cacheKey) { this.cacheKey = cacheKey; } }
apache-2.0
zpao/buck
test/com/facebook/buck/parser/DefaultUnconfiguredTargetNodeFactoryTest.java
6692
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.parser; import static org.junit.Assert.assertEquals; import com.facebook.buck.core.cell.Cell; import com.facebook.buck.core.cell.TestCellBuilder; import com.facebook.buck.core.exceptions.DependencyStack; import com.facebook.buck.core.model.ConfigurationBuildTargetFactoryForTests; import com.facebook.buck.core.model.RuleType; import com.facebook.buck.core.model.UnconfiguredBuildTargetFactoryForTests; import com.facebook.buck.core.model.UnconfiguredBuildTargetView; import com.facebook.buck.core.model.targetgraph.impl.Package; import com.facebook.buck.core.model.targetgraph.raw.UnconfiguredTargetNode; import com.facebook.buck.core.parser.buildtargetparser.ParsingUnconfiguredBuildTargetViewFactory; import com.facebook.buck.core.plugin.impl.BuckPluginManagerFactory; import com.facebook.buck.core.rules.knowntypes.TestKnownRuleTypesProvider; import com.facebook.buck.core.rules.knowntypes.provider.KnownRuleTypesProvider; import com.facebook.buck.core.select.Selector; import com.facebook.buck.core.select.SelectorKey; import com.facebook.buck.core.select.SelectorList; import com.facebook.buck.core.select.impl.SelectorFactory; import com.facebook.buck.core.select.impl.SelectorListFactory; import com.facebook.buck.parser.api.ImmutablePackageMetadata; import com.facebook.buck.parser.syntax.ImmutableListWithSelects; import com.facebook.buck.parser.syntax.ImmutableSelectorValue; import com.facebook.buck.rules.coercer.JsonTypeConcatenatingCoercerFactory; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import java.util.List; import java.util.Optional; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; public class DefaultUnconfiguredTargetNodeFactoryTest { private DefaultUnconfiguredTargetNodeFactory factory; private Cell cell; @Rule public ExpectedException thrown = ExpectedException.none(); @Before public void setUp() { KnownRuleTypesProvider knownRuleTypesProvider = TestKnownRuleTypesProvider.create(BuckPluginManagerFactory.createPluginManager()); cell = new TestCellBuilder().build(); factory = new DefaultUnconfiguredTargetNodeFactory( knownRuleTypesProvider, new BuiltTargetVerifier(), cell.getCellPathResolver(), new SelectorListFactory( new SelectorFactory(new ParsingUnconfiguredBuildTargetViewFactory()))); } @Test public void testCreatePopulatesNode() { UnconfiguredBuildTargetView buildTarget = UnconfiguredBuildTargetFactoryForTests.newInstance("//a/b:c"); ImmutableMap<String, Object> inputAttributes = ImmutableMap.<String, Object>builder() .put("buck.type", "java_library") .put("name", "c") .put("buck.base_path", "a/b") .put("deps", ImmutableList.of("//a/b:d", "//a/b:e")) .put( "resources", ImmutableListWithSelects.of( ImmutableList.of( ImmutableSelectorValue.of( ImmutableMap.of( "//c:a", ImmutableList.of("//a/b:file1", "//a/b:file2"), "//c:b", ImmutableList.of("//a/b:file3", "//a/b:file4")), "")), ImmutableList.class)) .put("visibility", ImmutableList.of("//a/...")) .put("within_view", ImmutableList.of("//b/...")) .build(); ImmutableMap<String, Object> expectAttributes = ImmutableMap.<String, Object>builder() .put("buck.type", "java_library") .put("name", "c") .put("buck.base_path", "a/b") .put("deps", ImmutableList.of("//a/b:d", "//a/b:e")) .put( "resources", new SelectorList<>( JsonTypeConcatenatingCoercerFactory.createForType(List.class), ImmutableList.of( new Selector<>( ImmutableMap.of( new SelectorKey( ConfigurationBuildTargetFactoryForTests.newInstance("//c:a")), ImmutableList.of("//a/b:file1", "//a/b:file2"), new SelectorKey( ConfigurationBuildTargetFactoryForTests.newInstance("//c:b")), ImmutableList.of("//a/b:file3", "//a/b:file4")), ImmutableSet.of(), "")))) .put("visibility", ImmutableList.of("//a/...")) .put("within_view", ImmutableList.of("//b/...")) .build(); UnconfiguredTargetNode unconfiguredTargetNode = factory.create( cell, cell.getRoot().resolve("a/b/BUCK"), buildTarget, DependencyStack.root(), inputAttributes, getPackage()); assertEquals( RuleType.of("java_library", RuleType.Kind.BUILD), unconfiguredTargetNode.getRuleType()); assertEquals(buildTarget.getData(), unconfiguredTargetNode.getBuildTarget()); assertEquals(expectAttributes, unconfiguredTargetNode.getAttributes()); assertEquals( "//a/...", Iterables.getFirst(unconfiguredTargetNode.getVisibilityPatterns(), null) .getRepresentation()); assertEquals( "//b/...", Iterables.getFirst(unconfiguredTargetNode.getWithinViewPatterns(), null) .getRepresentation()); } Package getPackage() { ImmutablePackageMetadata pkg = ImmutablePackageMetadata.of(ImmutableList.of("//a/..."), ImmutableList.of("//d/...")); return PackageFactory.create(cell, cell.getRoot().resolve("a/b/BUCK"), pkg, Optional.empty()); } }
apache-2.0
nherbaut/jdev2015T6A01
dvd2c-box/src/main/java/com/enseirb/telecom/dngroup/dvd2c/service/impl/ThridPartyStorageServiceImpl.java
2150
package com.enseirb.telecom.dngroup.dvd2c.service.impl; import java.net.URI; import java.util.ArrayList; import java.util.Collections; import java.util.List; import javax.inject.Inject; import javax.ws.rs.core.UriBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; import com.enseirb.telecom.dngroup.dvd2c.modeldb.Document; import com.enseirb.telecom.dngroup.dvd2c.modeldb.ThirdPartyConfiguration; import com.enseirb.telecom.dngroup.dvd2c.repository.DocumentRepository; import com.enseirb.telecom.dngroup.dvd2c.repository.ThirdPartyStorageConfigRepository; import com.enseirb.telecom.dngroup.dvd2c.service.ThridPartyStorageService; @Service public class ThridPartyStorageServiceImpl implements ThridPartyStorageService { private static final Logger LOGGER = LoggerFactory .getLogger(ThridPartyStorageServiceImpl.class); @Inject DocumentRepository docRepo; @Inject ThirdPartyStorageConfigRepository repo; private List<URI> generateRedirectUri(Document doc) { List<URI> res = new ArrayList<URI>(); for (ThirdPartyConfiguration conf : repo.findAll()) { if (thirdPartyDeployable(conf, doc.getType())) { res.add(UriBuilder.fromPath(conf.getBaseUrl()) .path("" + doc.getId()).build()); } } return res; } @SuppressWarnings("unchecked") @Override public List<URI> generateRedirectUri(String contentId) { Document doc = docRepo.findOne(Integer.valueOf(contentId)); if (doc != null) { return generateRedirectUri(doc); } else return Collections.EMPTY_LIST; } /* * (non-Javadoc) * * @see * com.enseirb.telecom.dngroup.dvd2c.service.ThridPartyStorage#register( * java.lang.String, java.lang.String) */ @Override public void register(String baseUrL, String name) { if (repo.findByBaseUrl(baseUrL) == null) { ThirdPartyConfiguration conf = new ThirdPartyConfiguration(); conf.setBaseUrl(baseUrL); conf.setName(name); repo.save(conf); } else { LOGGER.debug("third party already registered"); } } private boolean thirdPartyDeployable(ThirdPartyConfiguration conf, String type) { return true; } }
apache-2.0
rao2100/SpringBootStarter
src/test/java/com/rao2100/starter/utils/SystemdJournalUtilsTest.java
1156
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.rao2100.starter.utils; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.*; /** * * @author openetdev */ public class SystemdJournalUtilsTest { public SystemdJournalUtilsTest() { } @BeforeClass public static void setUpClass() { } @AfterClass public static void tearDownClass() { } @Before public void setUp() { } @After public void tearDown() { } /** * Test of send method, of class SystemdJournalUtils. */ @Test public void testSend() { System.out.println("send"); SystemdJournalUtils.send(); } /** * Test of send method, of class SystemdJournalUtils. */ @Test public void testRead() { System.out.println("read"); // SystemdJournalUtils.read(); } }
apache-2.0
Saber-Tecnologias/acervo-app
app/src/main/java/br/ufpe/sabertecnologias/acervoapp/ui/callbacks/GruposCallback.java
351
package br.ufpe.sabertecnologias.acervoapp.ui.callbacks; import java.util.ArrayList; import br.ufpe.sabertecnologias.acervoapp.modelo.dados.Grupo; /** * Created by joaotrindade on 25/10/16. */ public interface GruposCallback{ public void abrirGrupo(Grupo g); public void exit(); void notifyGrupoController(ArrayList<Grupo> mGrupos); }
apache-2.0
nblair/sometime
sometime-war/src/main/java/org/jasig/schedassist/web/security/DelegateCalendarAccountUserDetailsImpl.java
5819
/** * Licensed to Jasig under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Jasig licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.schedassist.web.security; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import org.jasig.schedassist.impl.owner.NotRegisteredException; import org.jasig.schedassist.model.ICalendarAccount; import org.jasig.schedassist.model.IDelegateCalendarAccount; import org.jasig.schedassist.model.IScheduleOwner; import org.springframework.security.core.GrantedAuthority; /** * {@link CalendarAccountUserDetails} implementation for {@link IDelegateCalendarAccount}s. * * @author Nicholas Blair, nblair@doit.wisc.edu * @version $Id: DelegateCalendarAccountUserDetailsImpl.java 2306 2010-07-28 17:20:12Z npblair $ */ public class DelegateCalendarAccountUserDetailsImpl implements CalendarAccountUserDetails { /** * */ private static final long serialVersionUID = 53706L; private static final String EMPTY = ""; private final IDelegateCalendarAccount delegateCalendarAccount; private IScheduleOwner scheduleOwner; /** * * @param delegateCalendarAccount */ public DelegateCalendarAccountUserDetailsImpl(IDelegateCalendarAccount delegateCalendarAccount) { this(delegateCalendarAccount, null); } /** * @param delegateCalendarAccount * @param delegateScheduleOwner */ public DelegateCalendarAccountUserDetailsImpl( IDelegateCalendarAccount delegateCalendarAccount, IScheduleOwner delegateScheduleOwner) { this.delegateCalendarAccount = delegateCalendarAccount; this.scheduleOwner = delegateScheduleOwner; } /* (non-Javadoc) * @see org.springframework.security.userdetails.UserDetails#getAuthorities() */ public Collection<GrantedAuthority> getAuthorities() { List<GrantedAuthority> authorities = new ArrayList<GrantedAuthority>(); if(null != this.delegateCalendarAccount && this.delegateCalendarAccount.isEligible()) { authorities.add(SecurityConstants.DELEGATE_REGISTER); } if(null != this.scheduleOwner) { authorities.add(SecurityConstants.DELEGATE_OWNER); authorities.remove(SecurityConstants.DELEGATE_REGISTER); } return Collections.unmodifiableList(authorities); } /* (non-Javadoc) * @see org.springframework.security.userdetails.UserDetails#getPassword() */ public String getPassword() { return EMPTY; } /* (non-Javadoc) * @see org.springframework.security.userdetails.UserDetails#getUsername() */ public String getUsername() { return this.delegateCalendarAccount.getUsername(); } /* (non-Javadoc) * @see org.springframework.security.userdetails.UserDetails#isAccountNonExpired() */ public boolean isAccountNonExpired() { return true; } /* (non-Javadoc) * @see org.springframework.security.userdetails.UserDetails#isAccountNonLocked() */ public boolean isAccountNonLocked() { return true; } /* (non-Javadoc) * @see org.springframework.security.userdetails.UserDetails#isCredentialsNonExpired() */ public boolean isCredentialsNonExpired() { return true; } /* (non-Javadoc) * @see org.springframework.security.userdetails.UserDetails#isEnabled() */ public boolean isEnabled() { return null != this.delegateCalendarAccount ? this.delegateCalendarAccount.isEligible() : false; } /* * (non-Javadoc) * @see org.jasig.schedassist.web.security.CalendarAccountUserDetails#getActiveDisplayName() */ public String getActiveDisplayName() { StringBuilder display = new StringBuilder(); display.append(this.delegateCalendarAccount.getDisplayName()); display.append(" (managed by "); display.append(this.delegateCalendarAccount.getAccountOwner().getUsername()); display.append(")"); return display.toString(); } /* * (non-Javadoc) * @see org.jasig.schedassist.web.security.CalendarAccountUserDetails#getCalendarAccount() */ @Override public ICalendarAccount getCalendarAccount() { return getDelegateCalendarAccount(); } /** * * @return the {@link IDelegateCalendarAccount} */ public IDelegateCalendarAccount getDelegateCalendarAccount() { return this.delegateCalendarAccount; } /* * (non-Javadoc) * @see org.jasig.schedassist.web.security.CalendarAccountUserDetails#getScheduleOwner() */ @Override public IScheduleOwner getScheduleOwner() throws NotRegisteredException { if(null == this.scheduleOwner) { throw new NotRegisteredException(this.delegateCalendarAccount + " is not registered"); } else { return this.scheduleOwner; } } /* * (non-Javadoc) * @see org.jasig.schedassist.web.security.CalendarAccountUserDetails#isDelegate() */ @Override public final boolean isDelegate() { return true; } /* * (non-Javadoc) * @see org.jasig.schedassist.web.security.CalendarAccountUserDetails#updateScheduleOwner(org.jasig.schedassist.model.IScheduleOwner) */ @Override public void updateScheduleOwner(IScheduleOwner owner) { this.scheduleOwner = owner; } }
apache-2.0
punkhorn/camel-upstream
components/camel-spring/src/test/java/org/apache/camel/spring/MainTest.java
2166
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.spring; import java.util.List; import org.apache.camel.CamelContext; import org.apache.camel.Exchange; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.spring.example.MyProcessor; import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class MainTest extends Assert { private static final Logger LOG = LoggerFactory.getLogger(MainTest.class); @Test public void testMain() throws Exception { // lets make a simple route Main main = new Main(); main.addRouteBuilder(new RouteBuilder() { @Override public void configure() throws Exception { from("file://src/test/data?initialDelay=0&delay=10&noop=true").process(new MyProcessor()).to("mock:results"); } }); main.start(); CamelContext camelContext = main.getCamelContext(); MockEndpoint endpoint = camelContext.getEndpoint("mock:results", MockEndpoint.class); // in case we add more files in src/test/data endpoint.expectedMinimumMessageCount(2); endpoint.assertIsSatisfied(); List<Exchange> list = endpoint.getReceivedExchanges(); LOG.debug("Received: " + list); main.stop(); } }
apache-2.0
Tankernn/TankernnGameEngine
src/main/java/eu/tankernn/gameEngine/particles/ParticleRenderer.java
4913
package eu.tankernn.gameEngine.particles; import java.nio.FloatBuffer; import java.util.List; import java.util.Map; import org.lwjgl.BufferUtils; import org.lwjgl.opengl.GL11; import org.lwjgl.opengl.GL15; import org.lwjgl.opengl.GL31; import org.lwjgl.util.vector.Matrix4f; import org.lwjgl.util.vector.Vector3f; import eu.tankernn.gameEngine.entities.Camera; import eu.tankernn.gameEngine.loader.Loader; import eu.tankernn.gameEngine.renderEngine.Vao; import eu.tankernn.gameEngine.renderEngine.Vbo; public class ParticleRenderer { private static final float[] VERTICES = {-0.5f, 0.5f, -0.5f, -0.5f, 0.5f, 0.5f, 0.5f, -0.5f}; private static final int MAX_INSTANCES = 10000; private static final int INSTANCE_DATA_LENGTH = 21; private static final FloatBuffer buffer = BufferUtils.createFloatBuffer(MAX_INSTANCES * INSTANCE_DATA_LENGTH); private Vao quad; private ParticleShader shader; private Vbo vbo; private int pointer = 0; protected ParticleRenderer(Loader loader, Matrix4f projectionMatrix) { this.vbo = Vbo.create(GL15.GL_ARRAY_BUFFER, GL15.GL_STREAM_DRAW, INSTANCE_DATA_LENGTH * MAX_INSTANCES); quad = loader.loadToVAO(VERTICES, 2); for (int i = 0; i < 5; i++) quad.addInstacedAttribute(vbo, i + 1, 4, INSTANCE_DATA_LENGTH, i * 4); quad.addInstacedAttribute(vbo, 6, 1, INSTANCE_DATA_LENGTH, 20); shader = new ParticleShader(); shader.start(); shader.projectionMatrix.loadMatrix(projectionMatrix); shader.stop(); } protected void render(Map<ParticleTexture, List<IParticle>> particles, Camera camera) { Matrix4f viewMatrix = camera.getViewMatrix(); prepare(); for (ParticleTexture texture: particles.keySet()) { bindTexture(texture); List<IParticle> particleList = particles.get(texture); pointer = 0; float[] vboData = new float[particleList.size() * INSTANCE_DATA_LENGTH]; for (IParticle p: particleList) { updateModelViewMatrix(p.getPosition(), p.getRotation(), p.getScale(), viewMatrix, vboData); updateTexCoordInfo(p, vboData); } vbo.updateData(vboData, buffer); GL31.glDrawArraysInstanced(GL11.GL_TRIANGLE_STRIP, 0, quad.getIndexCount(), particleList.size()); } finishRendering(); } @Override protected void finalize() { shader.finalize(); } private void updateTexCoordInfo(IParticle p, float[] data) { data[pointer++] = p.getTexOffset1().x; data[pointer++] = p.getTexOffset1().y; data[pointer++] = p.getTexOffset2().x; data[pointer++] = p.getTexOffset2().y; data[pointer++] = p.getBlend(); } private void bindTexture(ParticleTexture texture) { int blendType = texture.usesAdditiveBlending() ? GL11.GL_ONE : GL11.GL_ONE_MINUS_SRC_ALPHA; GL11.glBlendFunc(GL11.GL_SRC_ALPHA, blendType); texture.getTexture().bindToUnit(0); shader.numberOfRows.loadFloat(texture.getNumberOfRows()); } private void updateModelViewMatrix(Vector3f position, float rotation, float scale, Matrix4f viewMatrix, float[] vboData) { Matrix4f modelMatrix = new Matrix4f(); Matrix4f.translate(position, modelMatrix, modelMatrix); //Sets rotation of model matrix to transpose of rotation of view matrix modelMatrix.m00 = viewMatrix.m00; modelMatrix.m01 = viewMatrix.m10; modelMatrix.m02 = viewMatrix.m20; modelMatrix.m10 = viewMatrix.m01; modelMatrix.m11 = viewMatrix.m11; modelMatrix.m12 = viewMatrix.m21; modelMatrix.m20 = viewMatrix.m02; modelMatrix.m21 = viewMatrix.m12; modelMatrix.m22 = viewMatrix.m22; Matrix4f.rotate((float) Math.toRadians(rotation), new Vector3f(0, 0, 1), modelMatrix, modelMatrix); Matrix4f.rotate((float) Math.toRadians(180), new Vector3f(1, 0, 0), modelMatrix, modelMatrix); Matrix4f.scale(new Vector3f(scale, scale, scale), modelMatrix, modelMatrix); Matrix4f modelViewMatrix = Matrix4f.mul(viewMatrix, modelMatrix, null); storeMatrixData(modelViewMatrix, vboData); } private void storeMatrixData(Matrix4f matrix, float[] vboData) { vboData[pointer++] = matrix.m00; vboData[pointer++] = matrix.m01; vboData[pointer++] = matrix.m02; vboData[pointer++] = matrix.m03; vboData[pointer++] = matrix.m10; vboData[pointer++] = matrix.m11; vboData[pointer++] = matrix.m12; vboData[pointer++] = matrix.m13; vboData[pointer++] = matrix.m20; vboData[pointer++] = matrix.m21; vboData[pointer++] = matrix.m22; vboData[pointer++] = matrix.m23; vboData[pointer++] = matrix.m30; vboData[pointer++] = matrix.m31; vboData[pointer++] = matrix.m32; vboData[pointer++] = matrix.m33; } private void prepare() { shader.start(); quad.bind(0, 1, 2, 3, 4, 5, 6); GL11.glEnable(GL11.GL_BLEND); GL11.glDisable(GL11.GL_CULL_FACE); GL11.glBlendFunc(GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA); GL11.glDepthMask(false); } private void finishRendering() { GL11.glDepthMask(true); GL11.glDisable(GL11.GL_BLEND); GL11.glEnable(GL11.GL_CULL_FACE); quad.unbind(0, 1, 2, 3, 4, 5, 6); shader.stop(); } }
apache-2.0
mynamecsl/ppweather
src/com/ppweather/app/activity/WeatherActivity.java
5935
package com.ppweather.app.activity; import com.ppweather.app.R; import com.ppweather.app.service.AutoUpdateService; import com.ppweather.app.util.HttpCallbackListener; import com.ppweather.app.util.HttpUtil; import com.ppweather.app.util.Utility; import android.app.Activity; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.preference.PreferenceManager; import android.text.TextUtils; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.view.Window; import android.widget.Button; import android.widget.LinearLayout; import android.widget.TextView; public class WeatherActivity extends Activity implements OnClickListener{ private LinearLayout weatherInfoLayout; /** * 显示城市名 */ private TextView cityNameText; /** * 显示发布时间 */ private TextView publishText; /** * 显示天气描述信息 */ private TextView weatherDespText; /** * 显示气温1 */ private TextView temp1Text; /** * 显示气温2 */ private TextView temp2Text; /** * 显示当前日期 */ private TextView currentDateText; /** * 切换城市 */ private Button switchCity; /** *更新天气 */ private Button refreshWeather; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); requestWindowFeature(Window.FEATURE_NO_TITLE); setContentView(R.layout.weather_layout); weatherInfoLayout = (LinearLayout)findViewById(R.id.weather_info_layout); cityNameText = (TextView)findViewById(R.id.city_name); publishText = (TextView)findViewById(R.id.publish_text); weatherDespText = (TextView)findViewById(R.id.weather_desp); temp1Text = (TextView)findViewById(R.id.temp1); temp2Text = (TextView)findViewById(R.id.temp2); currentDateText = (TextView)findViewById(R.id.current_date); switchCity = (Button)findViewById(R.id.switch_city); refreshWeather = (Button)findViewById(R.id.refresh_weather); switchCity.setOnClickListener(this); refreshWeather.setOnClickListener(this); String countyCode = getIntent().getStringExtra("county_code"); if (!TextUtils.isEmpty(countyCode)) { //有县级代号时就去查询天气 publishText.setText("同步中。。。"); Log.d("ssss", "countyCode = " + countyCode); weatherInfoLayout.setVisibility(View.INVISIBLE); cityNameText.setVisibility(View.INVISIBLE); queryWeatherCode(countyCode); } else { //没有县级代号就直接显示本地天气 showWeather(); } } /** * 查询县级代号对应的天气代号 * @param countyCode */ private void queryWeatherCode(String countyCode) { String address = "http://www.weather.com.cn/data/list3/city" + countyCode + ".xml"; queryFromServer(address, "countyCode"); } /** * 查询天气代号对应的天气 * @param weatherCode */ private void queryWeatherInfo(String weatherCode) { String address = "http://www.weather.com.cn/data/cityinfo/" + weatherCode + ".html"; queryFromServer(address, "weatherCode"); } private void queryFromServer(final String address, final String type) { // TODO Auto-generated method stub HttpUtil.sendHttpRequest(address, new HttpCallbackListener() { @Override public void onFinish(final String response) { // TODO Auto-generated method stub if ("countyCode".equals(type)) { if (!TextUtils.isEmpty(response)) { String[] array = response.split("\\|"); if (array != null && array.length == 2) { String weatherCode = array[1]; queryWeatherInfo(weatherCode); } } } else if("weatherCode".equals(type)) { Log.d("ssss", "weatherCode run "); Utility.handleWeatherResponse(WeatherActivity.this, response); runOnUiThread(new Runnable() { @Override public void run() { // TODO Auto-generated method stub showWeather(); } }); } } @Override public void onError(Exception e) { // TODO Auto-generated method stub runOnUiThread(new Runnable() { @Override public void run() { // TODO Auto-generated method stub publishText.setText("同步失败"); } }); } }); } /** * 从SharedPreference文件中读取存储的天气信息,并显示到桌面上 */ private void showWeather() { // TODO Auto-generated method stub SharedPreferences prefs =PreferenceManager.getDefaultSharedPreferences(this); cityNameText.setText(prefs.getString("city_name", "")); Log.d("ssss", "cityNameText = " + cityNameText.getText()); temp1Text.setText(prefs.getString("temp1", "")); temp2Text.setText(prefs.getString("temp2", "")); weatherDespText.setText(prefs.getString("weather_desp", "")); publishText.setText("今天" + prefs.getString("publish_time", "") + "发布"); currentDateText.setText(prefs.getString("current_date", "")); weatherInfoLayout.setVisibility(View.VISIBLE); cityNameText.setVisibility(View.VISIBLE); //在showWeather中启动AutoUpdateService。这样一旦选中某城市并成功更新天气,AutoUpdateService就会在后台运行,并8小时更新一次天气 Intent intent = new Intent(this,AutoUpdateService.class); startService(intent); } @Override public void onClick(View v) { // TODO Auto-generated method stub switch (v.getId()) { case R.id.switch_city: Intent intent = new Intent(this, ChooseAreaActivity.class); intent.putExtra("from_weather_activity", true); startActivity(intent); finish(); break; case R.id.refresh_weather: publishText.setText("同步中..."); //从SharedPreferences中读取天气代号 SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this); String weatherCode = prefs.getString("weather_code", ""); if (!TextUtils.isEmpty(weatherCode)) { queryWeatherInfo(weatherCode); } default: break; } } }
apache-2.0
arrow-acs/acn-sdk-android
sdk/src/main/java/com/konexios/api/models/AuditLogModel.java
4939
/* * Copyright (c) 2017-2019 Arrow Electronics, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License 2.0 * which accompanies this distribution, and is available at * http://apache.org/licenses/LICENSE-2.0 * * Contributors: * Arrow Electronics, Inc. * Konexios, Inc. */ package com.konexios.api.models; import android.os.Parcel; import android.os.Parcelable; import androidx.annotation.NonNull; import com.google.gson.Gson; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; import java.util.Objects; public class AuditLogModel implements Parcelable { @SuppressWarnings("unused") public static final Parcelable.Creator<AuditLogModel> CREATOR = new Parcelable.Creator<AuditLogModel>() { @NonNull @Override public AuditLogModel createFromParcel(@NonNull Parcel in) { return new AuditLogModel(in); } @NonNull @Override public AuditLogModel[] newArray(int size) { return new AuditLogModel[size]; } }; @SerializedName("createdBy") @Expose private String createdBy; @SerializedName("createdString") @Expose private String createdString; @SerializedName("objectHid") @Expose private String objectHid; @SerializedName("parameters") @Expose private JsonElement parameters; @SerializedName("productName") @Expose private String productName; @SerializedName("type") @Expose private String type; public AuditLogModel() { } protected AuditLogModel(@NonNull Parcel in) { createdBy = in.readString(); createdString = (String) in.readValue(String.class.getClassLoader()); objectHid = in.readString(); JsonParser parser = new JsonParser(); parameters = parser.parse(in.readString()).getAsJsonObject(); productName = in.readString(); type = in.readString(); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; AuditLogModel that = (AuditLogModel) o; return Objects.equals(createdBy, that.createdBy) && Objects.equals(createdString, that.createdString) && Objects.equals(objectHid, that.objectHid) && Objects.equals(parameters, that.parameters) && Objects.equals(productName, that.productName) && Objects.equals(type, that.type); } @Override public int hashCode() { return Objects.hash(createdBy, createdString, objectHid, parameters, productName, type); } /** * @return The createdBy */ public String getCreatedBy() { return createdBy; } /** * @param createdBy The createdBy */ public void setCreatedBy(String createdBy) { this.createdBy = createdBy; } /** * @return The createdString */ public String getCreatedString() { return createdString; } /** * @param String The String */ public void setCreatedString(String String) { this.createdString = String; } /** * @return The objectHid */ public String getObjectHid() { return objectHid; } /** * @param objectHid The objectHid */ public void setObjectHid(String objectHid) { this.objectHid = objectHid; } /** * @return The parameters */ public JsonElement getParameters() { if (parameters == null) { parameters = new JsonObject(); } return parameters; } /** * @param parameters The parameters */ public void setParameters(JsonElement parameters) { this.parameters = parameters; } /** * @return The productName */ public String getProductName() { return productName; } /** * @param productName The productName */ public void setProductName(String productName) { this.productName = productName; } /** * @return The type */ public String getType() { return type; } /** * @param type The type */ public void setType(String type) { this.type = type; } @Override public int describeContents() { return 0; } @Override public void writeToParcel(@NonNull Parcel dest, int flags) { dest.writeString(createdBy); dest.writeValue(createdString); dest.writeString(objectHid); String str = new Gson().toJson(getParameters()); dest.writeString(str); dest.writeString(productName); dest.writeString(type); } }
apache-2.0
huahuajjh/card_recycle
src/main/java/com/tqmars/cardrecycle/application/User/dto/CreateUserInput.java
2014
package com.tqmars.cardrecycle.application.User.dto; /** * Created by jjh on 1/14/17. */ public class CreateUserInput { private String account; private String pwd; private String qq; private String tel; private String businessId; private String businessPwd; private String smsCode; private String withdrawPwd; public String getWithdrawPwd() { return withdrawPwd; } public void setWithdrawPwd(String withdrawPwd) { this.withdrawPwd = withdrawPwd; } public String getSmsCode() { return smsCode; } public void setSmsCode(String smsCode) { this.smsCode = smsCode; } public String getBusinessId() { return businessId; } public void setBusinessId(String businessId) { this.businessId = businessId; } public String getBusinessPwd() { return businessPwd; } public void setBusinessPwd(String businessPwd) { this.businessPwd = businessPwd; } public String getAccount() { return account; } public void setAccount(String account) { this.account = account; } public String getPwd() { return pwd; } public void setPwd(String pwd) { this.pwd = pwd; } public String getQq() { return qq; } public void setQq(String qq) { this.qq = qq; } public String getTel() { return tel; } public void setTel(String tel) { this.tel = tel; } @Override public String toString() { return "CreateUserInput{" + "account='" + account + '\'' + ", pwd='" + pwd + '\'' + ", qq='" + qq + '\'' + ", tel='" + tel + '\'' + ", businessId='" + businessId + '\'' + ", businessPwd='" + businessPwd + '\'' + ", smsCode='" + smsCode + '\'' + ", withdrawPwd='" + withdrawPwd + '\'' + '}'; } }
apache-2.0
frank29259/java
Data_structure/Stack/Friend/postfix.java
756
public class postfix { public static void main(String[] args) { // TODO Auto-generated method stub String post = "52+32*-"; IntStack stack = new IntStack(post.length()); char op; int op1=0 , op2=0; for(int i=0; i<post.length();i++){ op = post.charAt(i); if(op=='+'){ op1=stack.pop(); op2=stack.pop(); stack.push(op2+op1); }else if(op=='-'){ op1=stack.pop(); op2=stack.pop(); stack.push(op2-op1); }else if(op=='*'){ op1=stack.pop(); op2=stack.pop(); stack.push(op2*op1); }else if(op=='/'){ op1=stack.pop(); op2=stack.pop(); stack.push(op2/op1); }else{ int toInt = Character.getNumericValue(op); stack.push(toInt); } } System.out.println(stack.pop()); } }
apache-2.0
universal-development/myip-app
webapp/src/main/java/com/unidev/myip/web/IndexController.java
1013
package com.unidev.myip.web; import com.unidev.myip.MyIPService; import com.unidev.platform.web.WebUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.servlet.ModelAndView; import javax.servlet.http.HttpServletRequest; import java.util.List; import java.util.Map; /** * Frontend controller */ @Controller public class IndexController { @Autowired private HttpServletRequest request; @Autowired private MyIPService myIPService; @RequestMapping("/") public ModelAndView index() { String ip = myIPService.extractClinetIp(request); List<Map.Entry<String, Object>> headers = myIPService.extractHeaders(request); ModelAndView modelAndView = new ModelAndView("index"); modelAndView.addObject("ip", ip); modelAndView.addObject("headers", headers); return modelAndView; } }
apache-2.0
hilemz/yahoo-finance-api
yahoo-finance-api/src/main/java/pl/hilemz/yahoofinanceapi/ParseUtils.java
7707
/* * Copyright 2015 hilemz * * http://www.wykop.pl/ludzie/hilemz/ * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package pl.hilemz.yahoofinanceapi; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; /** * <p> * Date: 22.09.2015 * </p> * * <p> * Complex set of tools useful for parsing data provided by Yahoo Finance API. * </p> * * @author hilemz */ public class ParseUtils { /** * This method parse date from String object to LocalDate object. * * @param dateValue * Data value in format "MM/dd/yyyy". * @return Parsed date to LocalDate object. */ public static LocalDate toLocalDate(String dateValue) { return LocalDate.parse(trimQuotes(dateValue), DateTimeFormatter.ofPattern("M/dd/yyyy")); } /** * This method parse time from String object to LocalTime object. * * @param timeValue * Time value in format "h:ma" - "4:00pm". * @return Parsed time to LocalTime object. */ public static LocalTime toLocalTime(String timeValue) { return LocalTime.parse(trimQuotes(timeValue).toUpperCase(), DateTimeFormatter.ofPattern("h:ma")); } /** * This method parse date and time from String objects to LocalDateTime object. * * @param dateValue * Data value in format "MM/dd/yyyy". * @param timeValue * Time value in format "K:ma" - "4:00pm". * @return Parsed date and time to LocalDateTime object. */ public static LocalDateTime toLocalDateTime(String dateValue, String timeValue) { return LocalDateTime.of(toLocalDate(dateValue), toLocalTime(timeValue)); } /** * This method parse date and time from String objects to UNIX EPOCH timestamp. * * @param dateValue * Data value in format "MM/dd/yyyy". * @param timeValue * Time value in format "K:ma" - "4:00pm". * @return Parsed date and time to UNIX EPOCH timestamp object. */ public static long toTimestamp(String dateValue, String timeValue) { return toLocalDateTime(dateValue, timeValue).toInstant(ZoneOffset.UTC).toEpochMilli(); } /** * <p> * This method converts {@link DataType#CHANGE_PERCENT} String object to Tuple of double objects. Example value that * can be passed: * </p> * * <p> * "13.65 - 2.12%" * </p> * * @param value * Value for convertions. * @return Converted value. */ public static Tuple<Double, Double> changePercentToTuple(String value) { String[] elements = splitValue(value); if (elements.length != 2 || !value.matches("^(\"[0-9]+|\"-[0-9]+|[0-9]+|-[0-9]+).*-+.*([0-9]+%|[0-9]+%\")$")) { throw new IllegalArgumentException("Illegal arguments. Value should be in format: \"13.65 - 2.12%\"."); } return new Tuple<>(Double.parseDouble(elements[0]), Double.parseDouble(trimPercent(elements[1]))); } /** * <p> * This method converts {@link DataType#LAST_TRADE_WITH_TIME} String object to Tuple of LocalTime and double * objects. Example value that can be passed: * </p> * * <p> * "4:00pm - <b>629.25</b>" * </p> * * @param value * Value for convertions. * @return Converted value. */ public static Tuple<LocalTime, Double> tradeWithTimeToTuple(String value) { String[] elements = splitValue(value); if (elements.length != 2 || !value.matches("^(\"[0-9]+|\"-[0-9]+|[0-9]+|-[0-9]+).*-+.*([0-9]+</b>|[0-9]+</b>\")$")) { throw new IllegalArgumentException("Illegal arguments. Value should be in format: \"4" + ":00pm - <b>629.25</b>\"."); } return new Tuple<>(toLocalTime(elements[0]), Double.parseDouble(trimBold(elements[1]))); } /** * <p> * This method converts {@link DataType#DAYS_RANGE} String object to Tuple of double objects. Example value that can * be passed: * </p> * * <p> * "627.02 - 640.00" * </p> * * @param value * Value for convertions. * @return Converted value. */ public static Tuple<Double, Double> priceRangeToTuple(String value) { String[] elements = splitValue(value); if (elements.length != 2 || !value.matches("^(\"[0-9]+|\"-[0-9]+|[0-9]+|-[0-9]+).*-+.*([0-9]+|[0-9]+\")$")) { throw new IllegalArgumentException("Illegal arguments. Value should be in format: \"627.02 - 640.00\"."); } return new Tuple<>(Double.parseDouble(elements[0]), Double.parseDouble(elements[1])); } /** * This method converts big number passed by Yahoo Finance API with symbols at the end. It supports M and B suffix. * * @param value * Value for convertions. * @return Converted value. */ public static long bigNumberToLong(String value) { if (!isParsable(value)) { throw new IllegalArgumentException("Illegal argument value."); } if (!value.matches("(([0-9]*.[0-9]*)|([0-9]*))[a-zA-Z]{1}$")) { return Long.parseLong(value); } char suffix = value.charAt(value.length() - 1); value = value.substring(0, value.length() - 1); if (suffix == 'B') { return (long) (Double.parseDouble(value) * 1000000000); } else if (suffix == 'M') { return (long) (Double.parseDouble(value) * 1000000); } throw new IllegalArgumentException("Number couldn't be resolved."); } /** * This method converts 3.45% String object value to double object. In result return double value in form - 3.45. * * @param value * Value for convertions. * @return Converted value. */ public static double percentValueToDouble(String value) { if (!isParsable(value)) { throw new IllegalArgumentException("Illegal argument value."); } return Double.parseDouble(trimPercent(value)); } /** * This method splits passed value with delimiter "-". * * @param value * Value to split. * @return Splitted value. */ public static String[] splitValue(String value) { return trimQuotes(value).split(" - "); } /** * This method trim " from passed value. * * @param value * Value to trim. * @return Trimmed value. */ public static String trimQuotes(String value) { if (!isParsable(value)) { throw new IllegalArgumentException("Illegal argument value."); } return value.replaceAll("^\"|\"$", ""); } /** * This method trim % symbol from passed value. * * @param value * Value to trim. * @return Trimmed value. */ public static String trimPercent(String value) { if (!isParsable(value)) { throw new IllegalArgumentException("Illegal argument value."); } return value.replaceAll("%", ""); } /** * This method trim <code><b></b></code> from passed value. * * @param value * Value to trim. * @return Trimmed value. */ public static String trimBold(String value) { if (!isParsable(value)) { throw new IllegalArgumentException("Illegal argument value."); } return value.replaceAll("<b>|</b>", ""); } /** * This method verificate if passed value is appropriate for parsing. * * @param value * Value to check. * @return Result of verification. */ public static boolean isParsable(String value) { return !(value == null || value.equals("N/A") || value.equals("") || value.equals("\"\"")); } }
apache-2.0
dufangyu1990/NewFarmTool
OkHttpFinal/src/main/java/cn/finalteam/okhttpfinal/StringHttpRequestCallback.java
918
/* * Copyright (C) 2015 pengjianbo(pengjianbosoft@gmail.com), Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cn.finalteam.okhttpfinal; /** * Desction: * Author:pengjianbo * Date:15/12/10 下午8:13 */ public class StringHttpRequestCallback extends BaseHttpRequestCallback<String> { public StringHttpRequestCallback() { super(); type = String.class; } }
apache-2.0
nouribygi/Masnavi
app/src/main/java/com/nouribygi/masnavi/database/MasnaviDataProvider.java
3332
package com.nouribygi.masnavi.database; import android.app.SearchManager; import android.content.ContentProvider; import android.content.ContentResolver; import android.content.ContentValues; import android.content.UriMatcher; import android.database.Cursor; import android.net.Uri; import com.nouribygi.masnavi.util.MasnaviSettings; public class MasnaviDataProvider extends ContentProvider { public static String AUTHORITY = "com.nouribygi.masnavi.database.MasnaviDataProvider"; public static final Uri SEARCH_URI = Uri.parse("content://" + AUTHORITY + "/masnavi/search"); public static final String VERSES_MIME_TYPE = ContentResolver.CURSOR_DIR_BASE_TYPE + "/vnd.com.nouribygi.masnavi"; public static final String AYAH_MIME_TYPE = ContentResolver.CURSOR_ITEM_BASE_TYPE + "/vnd.com.nouribygi.masnavi"; // UriMatcher stuff private static final int SEARCH_VERSES = 0; private static final int GET_VERSE = 1; private static final int SEARCH_SUGGEST = 2; private static final UriMatcher sURIMatcher = buildUriMatcher(); private DatabaseHandler mDatabase = null; private static UriMatcher buildUriMatcher() { UriMatcher matcher = new UriMatcher(UriMatcher.NO_MATCH); matcher.addURI(AUTHORITY, "masnavi/search", SEARCH_VERSES); matcher.addURI(AUTHORITY, "masnavi/search/*", SEARCH_VERSES); matcher.addURI(AUTHORITY, "masnavi/search/*/*", SEARCH_VERSES); matcher.addURI(AUTHORITY, "masnavi/verse/#/#", GET_VERSE); matcher.addURI(AUTHORITY, "masnavi/verse/*/#/#", GET_VERSE); matcher.addURI(AUTHORITY, SearchManager.SUGGEST_URI_PATH_QUERY, SEARCH_SUGGEST); matcher.addURI(AUTHORITY, SearchManager.SUGGEST_URI_PATH_QUERY + "/*", SEARCH_SUGGEST); return matcher; } @Override public boolean onCreate() { mDatabase = DatabaseHandler.getInstance(getContext()); return true; } @Override public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) { String query = ""; if (selectionArgs.length >= 1) query = selectionArgs[0]; int bookCode = MasnaviSettings.getSelectedBook(getContext()); return mDatabase.search(query, bookCode); } @Override public String getType(Uri uri) { switch (sURIMatcher.match(uri)) { case SEARCH_VERSES: return VERSES_MIME_TYPE; case GET_VERSE: return AYAH_MIME_TYPE; case SEARCH_SUGGEST: return SearchManager.SUGGEST_MIME_TYPE; default: throw new IllegalArgumentException("Unknown URL " + uri); } } @Override public Uri insert(Uri uri, ContentValues values) { throw new UnsupportedOperationException(); } @Override public int update(Uri uri, ContentValues values, String selection, String[] selectionArgs) { throw new UnsupportedOperationException(); } @Override public int delete(Uri uri, String selection, String[] selectionArgs) { throw new UnsupportedOperationException(); } }
apache-2.0
Notronix/JaLAPI
src/main/java/com/notronix/lw/impl/method/orders/GetOrderViewsMethod.java
615
package com.notronix.lw.impl.method.orders; import com.google.gson.Gson; import com.notronix.lw.api.model.UserOrderView; import com.notronix.lw.impl.method.AbstractLinnworksAPIMethod; import java.util.Arrays; import java.util.List; public class GetOrderViewsMethod extends AbstractLinnworksAPIMethod<List<UserOrderView>> { @Override public String getURI() { return "Orders/GetOrderViews"; } @Override public List<UserOrderView> getResponse(Gson gson, String jsonPayload) { return Arrays.asList(gson.fromJson(jsonPayload, UserOrderView[].class)); } }
apache-2.0
ChamaraPhilipsuom/product-is
modules/integration/tests-integration/tests-backend/src/test/java/org/wso2/identity/integration/test/analytics/authentication/AnalyticsLoginTestCase.java
34987
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.identity.integration.test.analytics.authentication; import org.apache.axis2.context.ConfigurationContext; import org.apache.axis2.context.ConfigurationContextFactory; import org.apache.catalina.startup.Tomcat; import org.apache.commons.codec.binary.Base64; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.http.HttpResponse; import org.apache.http.NameValuePair; import org.apache.http.client.HttpClient; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.message.BasicNameValuePair; import org.apache.http.util.EntityUtils; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.DataProvider; import org.testng.annotations.Factory; import org.testng.annotations.Test; import org.wso2.carbon.automation.engine.context.TestUserMode; import org.wso2.carbon.databridge.commons.Event; import org.wso2.carbon.h2.osgi.utils.CarbonUtils; import org.wso2.carbon.identity.application.common.model.xsd.InboundAuthenticationConfig; import org.wso2.carbon.identity.application.common.model.xsd.InboundAuthenticationRequestConfig; import org.wso2.carbon.identity.application.common.model.xsd.Property; import org.wso2.carbon.identity.application.common.model.xsd.ServiceProvider; import org.wso2.carbon.identity.sso.saml.stub.types.SAMLSSOServiceProviderDTO; import org.wso2.carbon.integration.common.utils.exceptions.AutomationUtilException; import org.wso2.carbon.integration.common.utils.mgt.ServerConfigurationManager; import org.wso2.identity.integration.common.clients.application.mgt.ApplicationManagementServiceClient; import org.wso2.identity.integration.common.clients.sso.saml.SAMLSSOConfigServiceClient; import org.wso2.identity.integration.common.clients.usermgt.remote.RemoteUserStoreManagerServiceClient; import org.wso2.identity.integration.common.utils.ISIntegrationTest; import org.wso2.identity.integration.test.analytics.commons.ThriftServer; import org.wso2.identity.integration.test.util.Utils; import org.wso2.identity.integration.test.utils.CommonConstants; import javax.xml.xpath.XPathExpressionException; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.List; public class AnalyticsLoginTestCase extends ISIntegrationTest { private static final Log log = LogFactory.getLog(AnalyticsLoginTestCase.class); // SAML Application attributes private static final String USER_AGENT = "Apache-HttpClient/4.2.5 (java 1.5)"; private static final String APPLICATION_NAME = "SAML-SSO-TestApplication"; private static final String INBOUND_AUTH_TYPE = "samlsso"; private static final String ATTRIBUTE_CS_INDEX_VALUE = "1239245949"; private static final String ATTRIBUTE_CS_INDEX_NAME = "attrConsumServiceIndex"; public static final String TENANT_DOMAIN_PARAM = "tenantDomain"; private static final String SAML_SSO_URL = "https://localhost:9853/samlsso"; private static final String ACS_URL = "http://localhost:8490/%s/home.jsp"; private static final String COMMON_AUTH_URL = "https://localhost:9853/commonauth"; private static final String SAML_SSO_LOGIN_URL = "http://localhost:8490/%s/samlsso?SAML2.HTTPBinding=%s"; private static final String SAML_SSO_INDEX_URL = "http://localhost:8490/%s/"; private static final String SAML_SSO_LOGOUT_URL = "http://localhost:8490/%s/logout?SAML2.HTTPBinding=%s"; private static final String NAMEID_FORMAT = "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress"; private static final String LOGIN_URL = "/carbon/admin/login.jsp"; private static final String profileName = "default"; private static final String sessionStreamId = "org.wso2.is.analytics.stream.OverallSession:1.0.0"; private static final String authenticationStreamId = "org.wso2.is.analytics.stream.OverallAuthentication:1.0.0"; private ApplicationManagementServiceClient applicationManagementServiceClient; private SAMLSSOConfigServiceClient ssoConfigServiceClient; private RemoteUserStoreManagerServiceClient remoteUSMServiceClient; private SAMLConfig config; private Tomcat tomcatServer; private ThriftServer thriftServer; private ServerConfigurationManager serverConfigurationManager; HttpClient sharedHttpClient = new DefaultHttpClient(); private String resultPage; private enum HttpBinding { HTTP_REDIRECT("HTTP-Redirect"), HTTP_POST("HTTP-POST"); String binding; HttpBinding(String binding) { this.binding = binding; } } private enum ClaimType { LOCAL, CUSTOM, NONE } private enum User { SUPER_TENANT_USER("samlAnalyticsuser1", "samlAnalyticsuser1", "carbon.super", "samlAnalyticsuser1", "samlAnalyticsuser1@abc.com", "samlnickuser1"), TENANT_USER("samlAnalyticsuser2@wso2.com", "samlAnalyticsuser2", "wso2.com", "samlAnalyticsuser2", "samlAnalyticsuser2@abc.com", "samlnickuser2"); private String username; private String password; private String tenantDomain; private String tenantAwareUsername; private String email; private String nickname; User(String username, String password, String tenantDomain, String tenantAwareUsername, String email, String nickname) { this.username = username; this.password = password; this.tenantDomain = tenantDomain; this.tenantAwareUsername = tenantAwareUsername; this.email = email; this.nickname = nickname; } public String getUsername() { return username; } public String getPassword() { return password; } public String getTenantDomain() { return tenantDomain; } public String getTenantAwareUsername() { return tenantAwareUsername; } public String getEmail() { return email; } public String getNickname() { return nickname; } } ; private enum App { SUPER_TENANT_APP_WITH_SIGNING("travelocity.com", true), TENANT_APP_WITHOUT_SIGNING("travelocity.com-saml-tenantwithoutsigning", false); private String artifact; private boolean signingEnabled; App(String artifact, boolean signingEnabled) { this.artifact = artifact; this.signingEnabled = signingEnabled; } public String getArtifact() { return artifact; } public boolean isSigningEnabled() { return signingEnabled; } } private static class SAMLConfig { private TestUserMode userMode; private User user; private HttpBinding httpBinding; private ClaimType claimType; private App app; private SAMLConfig(TestUserMode userMode, User user, HttpBinding httpBinding, ClaimType claimType, App app) { this.userMode = userMode; this.user = user; this.httpBinding = httpBinding; this.claimType = claimType; this.app = app; } public TestUserMode getUserMode() { return userMode; } public App getApp() { return app; } public User getUser() { return user; } public ClaimType getClaimType() { return claimType; } public HttpBinding getHttpBinding() { return httpBinding; } @Override public String toString() { return "SAMLConfig[" + ", userMode=" + userMode.name() + ", user=" + user.getUsername() + ", httpBinding=" + httpBinding + ", claimType=" + claimType + ", app=" + app.getArtifact() + ']'; } } @Factory(dataProvider = "samlConfigProvider") public AnalyticsLoginTestCase(SAMLConfig config) { if (log.isDebugEnabled()) { log.debug("SAML SSO Test initialized for " + config); } this.config = config; } @BeforeClass(alwaysRun = true) public void testInit() throws Exception { super.init(); changeIdentityXml(); super.init(config.getUserMode()); thriftServer = new ThriftServer("Wso2EventTestCase", 8021, true); thriftServer.start(8021); log.info("Thrift Server is Started on port 8462"); ConfigurationContext configContext = ConfigurationContextFactory.createConfigurationContextFromFileSystem(null, null); applicationManagementServiceClient = new ApplicationManagementServiceClient(sessionCookie, backendURL, configContext); ssoConfigServiceClient = new SAMLSSOConfigServiceClient(backendURL, sessionCookie); remoteUSMServiceClient = new RemoteUserStoreManagerServiceClient(backendURL, sessionCookie); createUser(); createApplication(); //Starting tomcat log.info("Starting Tomcat"); tomcatServer = Utils.getTomcat(getClass()); URL resourceUrl = getClass().getResource(File.separator + "samples" + File.separator + config.getApp() .getArtifact() + ".war"); Utils.startTomcat(tomcatServer, "/" + config.getApp().getArtifact(), resourceUrl.getPath()); } @AfterClass(alwaysRun = true) public void testClear() throws Exception { deleteUser(); deleteApplication(); ssoConfigServiceClient = null; applicationManagementServiceClient = null; remoteUSMServiceClient = null; thriftServer.stop(); replaceIdentityXml(); //Stopping tomcat tomcatServer.stop(); tomcatServer.destroy(); Thread.sleep(1000); } @Test(description = "Add service provider", groups = "wso2.is", priority = 1) public void testAddSP() throws Exception { Boolean isAddSuccess = ssoConfigServiceClient.addServiceProvider(createSsoServiceProviderDTO()); Assert.assertTrue(isAddSuccess, "Adding a service provider has failed for " + config); SAMLSSOServiceProviderDTO[] samlssoServiceProviderDTOs = ssoConfigServiceClient .getServiceProviders().getServiceProviders(); Assert.assertEquals(samlssoServiceProviderDTOs[0].getIssuer(), config.getApp().getArtifact(), "Adding a service provider has failed for " + config); } @Test(alwaysRun = true, description = "Testing SAML SSO login", groups = "wso2.is", dependsOnMethods = {"testAddSP"}) public void testSAMLSSOIsPassiveLogin() { try { HttpClient httpClient = sharedHttpClient; HttpResponse response; response = Utils.sendGetRequest(String.format(SAML_SSO_INDEX_URL, config.getApp().getArtifact(), config .getHttpBinding().binding), USER_AGENT, httpClient); String samlResponse = Utils.extractDataFromResponse(response, "name='SAMLResponse'", 5); samlResponse = new String(Base64.decodeBase64(samlResponse)); Assert.assertTrue(samlResponse.contains("Destination=\"" + String.format(ACS_URL, config.getApp() .getArtifact()) + "\"")); } catch (Exception e) { Assert.fail("SAML SSO Login test failed for " + config, e); } } @Test(alwaysRun = true, description = "Testing SAML SSO login", groups = "wso2.is", dependsOnMethods = {"testSAMLSSOIsPassiveLogin"}) public void testSAMLSSOLogin() { try { HttpResponse response; HttpClient httpClient = sharedHttpClient; response = Utils.sendGetRequest(String.format(SAML_SSO_LOGIN_URL, config.getApp().getArtifact(), config .getHttpBinding().binding), USER_AGENT, httpClient); if (config.getHttpBinding() == HttpBinding.HTTP_POST) { String samlRequest = Utils.extractDataFromResponse(response, CommonConstants.SAML_REQUEST_PARAM, 5); response = sendSAMLMessage(SAML_SSO_URL, CommonConstants.SAML_REQUEST_PARAM, samlRequest); EntityUtils.consume(response.getEntity()); response = Utils.sendRedirectRequest(response, USER_AGENT, ACS_URL, config.getApp().getArtifact(), httpClient); } String sessionKey = Utils.extractDataFromResponse(response, CommonConstants.SESSION_DATA_KEY, 1); response = Utils.sendPOSTMessage(sessionKey, COMMON_AUTH_URL, USER_AGENT, ACS_URL, config.getApp() .getArtifact(), config.getUser().getUsername(), config.getUser().getPassword(), httpClient); EntityUtils.consume(response.getEntity()); response = Utils.sendRedirectRequest(response, USER_AGENT, ACS_URL, config.getApp().getArtifact(), httpClient); String samlResponse = Utils.extractDataFromResponse(response, CommonConstants.SAML_RESPONSE_PARAM, 5); response = sendSAMLMessage(String.format(ACS_URL, config.getApp().getArtifact()), CommonConstants .SAML_RESPONSE_PARAM, samlResponse); Thread.sleep(2000); Assert.assertEquals(thriftServer.getPreservedEventList().size(), 3); Event sessionEvent = null; Event authStepEvent = null; Event overallAuthEvent = null; for (Event event : thriftServer.getPreservedEventList()) { String streamId = event.getStreamId(); if (sessionStreamId.equalsIgnoreCase(streamId)) { sessionEvent = event; } if (authenticationStreamId.equalsIgnoreCase(streamId)) { Object[] eventStreamData = event.getPayloadData(); if ((Boolean) eventStreamData[2]) { overallAuthEvent = event; } else if ((Boolean) eventStreamData[16]) { authStepEvent = event; } } } assertSessionEvent(sessionEvent); Object[] eventStreamData = overallAuthEvent.getPayloadData(); eventStreamData = authStepEvent.getPayloadData(); // authenticationSuccess Assert.assertEquals(eventStreamData[2], false); // userName Assert.assertEquals(eventStreamData[3], "samlAnalyticsuser1"); // userStoreDomain Assert.assertEquals(eventStreamData[4], "PRIMARY"); // tenantDomain Assert.assertEquals(eventStreamData[5], "carbon.super"); // inboundAuthType Assert.assertEquals(eventStreamData[8], "samlsso"); // serviceprovider Assert.assertEquals(eventStreamData[9], "SAML-SSO-TestApplication"); // remembermeEnabled Assert.assertEquals(eventStreamData[10], false); // forceAuthEnabled Assert.assertEquals(eventStreamData[11], false); // rolesCommaSeperated Assert.assertEquals(eventStreamData[13], "Internal/everyone"); // authenticationStep Assert.assertEquals(eventStreamData[14], "1"); // isFirstLogin Assert.assertEquals(eventStreamData[18], true); extractDataFromResponse(response); } catch (Exception e) { Assert.fail("SAML SSO Login Analytics test failed for " + config, e); } finally { thriftServer.resetPreservedEventList(); } } @Test(alwaysRun = true, description = "Testing SAML SSO login", groups = "wso2.is", dependsOnMethods = {"testSAMLSSOLogin"}) public void testSAMLSSOLoginWithExistingSession() { try { HttpResponse response; HttpClient httpClient = sharedHttpClient; response = Utils.sendGetRequest(String.format(SAML_SSO_LOGIN_URL, config.getApp().getArtifact(), config .getHttpBinding().binding), USER_AGENT, httpClient); if (config.getHttpBinding() == HttpBinding.HTTP_POST) { String samlRequest = Utils.extractDataFromResponse(response, CommonConstants.SAML_REQUEST_PARAM, 5); response = sendSAMLMessage(SAML_SSO_URL, CommonConstants.SAML_REQUEST_PARAM, samlRequest); EntityUtils.consume(response.getEntity()); response = Utils.sendRedirectRequest(response, USER_AGENT, ACS_URL, config.getApp().getArtifact(), httpClient); } Event sessionEvent = null; for (Event event : thriftServer.getPreservedEventList()) { String streamId = event.getStreamId(); if (sessionStreamId.equalsIgnoreCase(streamId)) { sessionEvent = event; } } assertSessionUpdateEvent(sessionEvent); extractDataFromResponse(response); } catch (Exception e) { Assert.fail("SAML SSO Login Analytics test failed for " + config, e); } finally { thriftServer.resetPreservedEventList(); } } @Test(alwaysRun = true, description = "Testing SAML SSO login fail", groups = "wso2.is", dependsOnMethods = {"testSAMLSSOLogout"}) public void testSAMLSSOLoginFail() { try { HttpResponse response; HttpClient httpClient = new DefaultHttpClient(); response = Utils.sendGetRequest(String.format(SAML_SSO_LOGIN_URL, config.getApp().getArtifact(), config .getHttpBinding().binding), USER_AGENT, httpClient); if (config.getHttpBinding() == HttpBinding.HTTP_POST) { String samlRequest = Utils.extractDataFromResponse(response, CommonConstants.SAML_REQUEST_PARAM, 5); response = sendSAMLMessage(SAML_SSO_URL, CommonConstants.SAML_REQUEST_PARAM, samlRequest); EntityUtils.consume(response.getEntity()); response = Utils.sendRedirectRequest(response, USER_AGENT, ACS_URL, config.getApp().getArtifact(), httpClient); } String sessionKey = Utils.extractDataFromResponse(response, CommonConstants.SESSION_DATA_KEY, 1); response = Utils.sendPOSTMessage(sessionKey, COMMON_AUTH_URL, USER_AGENT, ACS_URL, config.getApp() .getArtifact(), "dummy", config.getUser().getPassword(), httpClient); EntityUtils.consume(response.getEntity()); response = Utils.sendRedirectRequest(response, USER_AGENT, ACS_URL, config.getApp().getArtifact(), httpClient); String samlResponse = Utils.extractDataFromResponse(response, CommonConstants.SAML_RESPONSE_PARAM, 5); response = sendSAMLMessage(String.format(ACS_URL, config.getApp().getArtifact()), CommonConstants .SAML_RESPONSE_PARAM, samlResponse); Thread.sleep(2000); Event event = (Event) thriftServer.getPreservedEventList().get(0); Object[] eventStreamData = event.getPayloadData(); // authenticationSuccess Assert.assertEquals(eventStreamData[2], false); // userName Assert.assertEquals(eventStreamData[3], "dummy"); // userStoreDomain Assert.assertEquals(eventStreamData[4], "PRIMARY"); // tenantDomain Assert.assertEquals(eventStreamData[5], "carbon.super"); // inboundAuthType Assert.assertEquals(eventStreamData[8], "samlsso"); // serviceprovider Assert.assertEquals(eventStreamData[9], "SAML-SSO-TestApplication"); // remembermeEnabled Assert.assertEquals(eventStreamData[10], false); // forceAuthEnabled Assert.assertEquals(eventStreamData[11], false); // rolesCommaSeperated Assert.assertEquals(eventStreamData[13], "NOT_AVAILABLE"); // authenticationStep Assert.assertEquals(eventStreamData[14], "1"); extractDataFromResponse(response); } catch (Exception e) { Assert.fail("SAML SSO Login Analytics test failed for " + config, e); } finally { thriftServer.resetPreservedEventList(); } } @Test(alwaysRun = true, description = "Testing SAML SSO logout", groups = "wso2.is", dependsOnMethods = {"testSAMLSSOLoginWithExistingSession"}) public void testSAMLSSOLogout() throws Exception { try { HttpResponse response; HttpClient httpClient = sharedHttpClient; response = Utils.sendGetRequest(String.format(SAML_SSO_LOGOUT_URL, config.getApp().getArtifact(), config .getHttpBinding().binding), USER_AGENT, httpClient); if (config.getHttpBinding() == HttpBinding.HTTP_POST) { String samlRequest = Utils.extractDataFromResponse(response, CommonConstants.SAML_REQUEST_PARAM, 5); response = sendSAMLMessage(SAML_SSO_URL, CommonConstants.SAML_REQUEST_PARAM, samlRequest); } String samlResponse = Utils.extractDataFromResponse(response, CommonConstants.SAML_RESPONSE_PARAM, 5); response = sendSAMLMessage(String.format(ACS_URL, config.getApp().getArtifact()), CommonConstants .SAML_RESPONSE_PARAM, samlResponse); assertSessionTerminationEvent(thriftServer.getPreservedEventList().get(0)); extractDataFromResponse(response); } catch (Exception e) { Assert.fail("SAML SSO Logout test failed for " + config, e); } finally { thriftServer.resetPreservedEventList(); } } @DataProvider(name = "samlConfigProvider") public static SAMLConfig[][] samlConfigProvider() { return new SAMLConfig[][]{ {new SAMLConfig(TestUserMode.SUPER_TENANT_ADMIN, User.SUPER_TENANT_USER, HttpBinding.HTTP_REDIRECT, ClaimType.NONE, App.SUPER_TENANT_APP_WITH_SIGNING)}, }; } private HttpResponse sendSAMLMessage(String url, String samlMsgKey, String samlMsgValue) throws IOException { HttpClient httpClient = sharedHttpClient; List<NameValuePair> urlParameters = new ArrayList<NameValuePair>(); HttpPost post = new HttpPost(url); post.setHeader("User-Agent", USER_AGENT); urlParameters.add(new BasicNameValuePair(samlMsgKey, samlMsgValue)); if (config.getUserMode() == TestUserMode.TENANT_ADMIN || config.getUserMode() == TestUserMode.TENANT_USER) { urlParameters.add(new BasicNameValuePair(TENANT_DOMAIN_PARAM, config.getUser().getTenantDomain())); } post.setEntity(new UrlEncodedFormEntity(urlParameters)); return httpClient.execute(post); } private void createApplication() throws Exception { ServiceProvider serviceProvider = new ServiceProvider(); serviceProvider.setApplicationName(APPLICATION_NAME); serviceProvider.setDescription("This is a test Service Provider"); applicationManagementServiceClient.createApplication(serviceProvider); serviceProvider = applicationManagementServiceClient.getApplication(APPLICATION_NAME); InboundAuthenticationRequestConfig requestConfig = new InboundAuthenticationRequestConfig(); requestConfig.setInboundAuthType(INBOUND_AUTH_TYPE); requestConfig.setInboundAuthKey(config.getApp().getArtifact()); Property attributeConsumerServiceIndexProp = new Property(); attributeConsumerServiceIndexProp.setName(ATTRIBUTE_CS_INDEX_NAME); attributeConsumerServiceIndexProp.setValue(ATTRIBUTE_CS_INDEX_VALUE); requestConfig.setProperties(new Property[]{attributeConsumerServiceIndexProp}); InboundAuthenticationConfig inboundAuthenticationConfig = new InboundAuthenticationConfig(); inboundAuthenticationConfig.setInboundAuthenticationRequestConfigs( new InboundAuthenticationRequestConfig[]{requestConfig}); serviceProvider.setInboundAuthenticationConfig(inboundAuthenticationConfig); applicationManagementServiceClient.updateApplicationData(serviceProvider); } private void deleteApplication() throws Exception { applicationManagementServiceClient.deleteApplication(APPLICATION_NAME); } private void createUser() { log.info("Creating User " + config.getUser().getUsername()); try { // creating the user remoteUSMServiceClient.addUser(config.getUser().getTenantAwareUsername(), config.getUser().getPassword(), null, null, profileName, true); } catch (Exception e) { Assert.fail("Error while creating the user", e); } } private void deleteUser() { log.info("Deleting User " + config.getUser().getUsername()); try { remoteUSMServiceClient.deleteUser(config.getUser().getTenantAwareUsername()); } catch (Exception e) { Assert.fail("Error while deleting the user", e); } } private SAMLSSOServiceProviderDTO createSsoServiceProviderDTO() { SAMLSSOServiceProviderDTO samlssoServiceProviderDTO = new SAMLSSOServiceProviderDTO(); samlssoServiceProviderDTO.setIssuer(config.getApp().getArtifact()); samlssoServiceProviderDTO.setAssertionConsumerUrls(new String[]{String.format(ACS_URL, config.getApp().getArtifact())}); samlssoServiceProviderDTO.setDefaultAssertionConsumerUrl(String.format(ACS_URL, config.getApp().getArtifact())); samlssoServiceProviderDTO.setAttributeConsumingServiceIndex(ATTRIBUTE_CS_INDEX_VALUE); samlssoServiceProviderDTO.setNameIDFormat(NAMEID_FORMAT); samlssoServiceProviderDTO.setDoSignAssertions(config.getApp().isSigningEnabled()); samlssoServiceProviderDTO.setDoSignResponse(config.getApp().isSigningEnabled()); samlssoServiceProviderDTO.setDoSingleLogout(true); samlssoServiceProviderDTO.setLoginPageURL(LOGIN_URL); if (config.getClaimType() != ClaimType.NONE) { samlssoServiceProviderDTO.setEnableAttributeProfile(true); samlssoServiceProviderDTO.setEnableAttributesByDefault(true); } return samlssoServiceProviderDTO; } public void changeIdentityXml() { log.info("Changing identity.xml file to enable analytics"); String carbonHome = CarbonUtils.getCarbonHome(); String analyticsEnabledIdentityXml = getISResourceLocation() + File.separator + "analytics" + File.separator + "config" + File.separator + "identit_analytics_enabled.xml"; File defaultIdentityXml = new File(carbonHome + File.separator + "repository" + File.separator + "conf" + File.separator + "identity" + File.separator + "identity.xml"); try { serverConfigurationManager = new ServerConfigurationManager(isServer); File configuredNotificationProperties = new File(analyticsEnabledIdentityXml); serverConfigurationManager = new ServerConfigurationManager(isServer); serverConfigurationManager.applyConfigurationWithoutRestart(configuredNotificationProperties, defaultIdentityXml, true); copyAuthenticationDataPublisher(); serverConfigurationManager.restartForcefully(); } catch (AutomationUtilException e) { log.error("Error while changing configurations in identity.xml"); } catch (XPathExpressionException e) { log.error("Error while changing configurations in identity.xml"); } catch (MalformedURLException e) { log.error("Error while changing configurations in identity.xml"); } catch (IOException e) { log.error("Error while changing configurations in identity.xml"); } } public void copyAuthenticationDataPublisher() { log.info("Changing AuthenticationDataPublisher.xml file to change default port"); String carbonHome = CarbonUtils.getCarbonHome(); String authnDataPublisherWithOffset = getISResourceLocation() + File.separator + "analytics" + File.separator + "config" + File.separator + "IsAnalytics-Publisher-wso2event-AuthenticationData.xml"; File defaultAuthenticationDataPublisher = new File(carbonHome + File.separator + "repository" + File.separator + "deployment" + File.separator + "server" + File.separator + "eventpublishers" + File.separator + "IsAnalytics-Publisher-wso2event-AuthenticationData.xml"); String sessionDataPublisherWithOffset = getISResourceLocation() + File.separator + "analytics" + File.separator + "config" + File.separator + "IsAnalytics-Publisher-wso2event-SessionData.xml"; File defaultSessionDataPublisher = new File(carbonHome + File.separator + "repository" + File.separator + "deployment" + File.separator + "server" + File.separator + "eventpublishers" + File.separator + "IsAnalytics-Publisher-wso2event-SessionData.xml"); try { File configuredAuthnPublisherFile = new File(authnDataPublisherWithOffset); File configuredSessionPublisherFile = new File(sessionDataPublisherWithOffset); serverConfigurationManager = new ServerConfigurationManager(isServer); serverConfigurationManager.applyConfigurationWithoutRestart(configuredAuthnPublisherFile, defaultAuthenticationDataPublisher, true); serverConfigurationManager.applyConfigurationWithoutRestart(configuredSessionPublisherFile, defaultSessionDataPublisher, true); } catch (AutomationUtilException e) { log.error("Error while changing publisher configurations"); } catch (XPathExpressionException e) { log.error("Error while changing publisher configurations"); } catch (MalformedURLException e) { log.error("Error while changing publisher configurations"); } catch (IOException e) { log.error("Error while changing publisher configurations"); } } public void replaceIdentityXml() { log.info("Changing identity.xml file to enable analytics"); String carbonHome = CarbonUtils.getCarbonHome(); String defaultIdentityXml = getISResourceLocation() + File.separator + "analytics" + File.separator + "config" + File.separator + "identit_original.xml"; File defaultIdentityXmlLocation = new File(carbonHome + File.separator + "repository" + File.separator + "conf" + File.separator + "identity" + File.separator + "identity.xml"); try { serverConfigurationManager = new ServerConfigurationManager(isServer); File configuredNotificationProperties = new File(defaultIdentityXml); serverConfigurationManager = new ServerConfigurationManager(isServer); serverConfigurationManager.applyConfigurationWithoutRestart(configuredNotificationProperties, defaultIdentityXmlLocation, true); copyAuthenticationDataPublisher(); serverConfigurationManager.restartForcefully(); } catch (AutomationUtilException e) { log.error("Error while changing configurations in identity.xml to default configurations"); } catch (XPathExpressionException e) { log.error("Error while changing configurations in identity.xml to default configurations"); } catch (MalformedURLException e) { log.error("Error while changing configurations in identity.xml to default configurations"); } catch (IOException e) { log.error("Error while changing configurations in identity.xml to default configurations"); } } public void assertSessionEvent(Event sessionEvent) { Object[] sessionObjects = sessionEvent.getPayloadData(); Assert.assertEquals(sessionObjects[1], sessionObjects[2]); Assert.assertEquals(sessionObjects[4], 1); Assert.assertEquals(sessionObjects[5], "samlAnalyticsuser1"); Assert.assertEquals(sessionObjects[6], "PRIMARY"); Assert.assertTrue((Long) sessionObjects[2] < (Long) sessionObjects[12]); } public void assertSessionUpdateEvent(Event sessionEvent) { Object[] sessionObjects = sessionEvent.getPayloadData(); // Assert.assertTrue((Long)sessionObjects[1] < (Long)sessionObjects[2]); Assert.assertEquals(sessionObjects[4], 2); Assert.assertEquals(sessionObjects[5], "samlAnalyticsuser1"); Assert.assertEquals(sessionObjects[6], "PRIMARY"); // Assert.assertTrue((Long)sessionObjects[2] < (Long)sessionObjects[10]); } public void assertSessionTerminationEvent(Event sessionEvent) { Object[] sessionObjects = sessionEvent.getPayloadData(); // Assert.assertTrue((Long) sessionObjects[1] < (Long) sessionObjects[2]); Assert.assertEquals(sessionObjects[4], 0); Assert.assertEquals(sessionObjects[5], "samlAnalyticsuser1"); Assert.assertEquals(sessionObjects[6], "PRIMARY"); // Assert.assertTrue((Long)sessionObjects[2] < (Long)sessionObjects[10]); } private String extractDataFromResponse(HttpResponse response) throws IOException { BufferedReader rd = new BufferedReader( new InputStreamReader(response.getEntity().getContent())); StringBuilder result = new StringBuilder(); String line; while ((line = rd.readLine()) != null) { result.append(line); } rd.close(); return result.toString(); } }
apache-2.0
KritikalFabric/corefabric.io
src/contrib/java/com/cisco/qte/jdtn/bp/EidMap.java
14806
/** Copyright (c) 2011, Cisco Systems, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the Cisco Systems, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.cisco.qte.jdtn.bp; import java.io.IOException; import java.io.PrintWriter; import java.util.HashMap; import java.util.logging.Level; import java.util.logging.Logger; import com.cisco.qte.jdtn.component.AbstractStartableComponent; import com.cisco.qte.jdtn.general.GeneralManagement; import com.cisco.qte.jdtn.general.JDtnException; import com.cisco.qte.jdtn.general.Utils; import com.cisco.qte.jdtn.general.XmlRDParser; import com.cisco.qte.jdtn.general.XmlRdParserException; /** * Mappings from IPN: to DTN: EndPointIds. Maintains two-way mappings * (equivalences) between a set of IPN: EndPointId and DTN: EndPointIds. */ public class EidMap extends AbstractStartableComponent { private static final Logger _logger = Logger.getLogger(EidMap.class.getCanonicalName()); private static EidMap _instance = null; private HashMap<EndPointId, IpnEndpointId> _dtnToIpnMap = new HashMap<EndPointId, IpnEndpointId>(); private HashMap<IpnEndpointId, EndPointId> _ipnToDtnMap = new HashMap<IpnEndpointId, EndPointId>(); /** * Get singleton Instance * @return Singleton instance */ public static EidMap getInstance() { if (_instance == null) { _instance = new EidMap(); } return _instance; } /** * Protected access constructor */ protected EidMap() { super("EidMap"); if (GeneralManagement.isDebugLogging()) { _logger.finer("EidMap()"); } } /** * Start this component */ @Override protected void startImpl() { if (GeneralManagement.isDebugLogging()) { _logger.finer("startImpl()"); } addDefaultMapping(); } /** * Stop this component */ @Override protected void stopImpl() { if (GeneralManagement.isDebugLogging()) { _logger.finer("stopImpl()"); } removeDefaultMapping(); } /** * Set to default state; clears all mappings */ public void setDefaults() { if (GeneralManagement.isDebugLogging()) { _logger.finer("setDefaults()"); } _dtnToIpnMap.clear(); _ipnToDtnMap.clear(); addDefaultMapping(); } /** * Parse from config file. It is assume that the parse is sitting on the * &lt; EidMap &gt; element. We parse all contained &lt; EidMapEntry &gt; * sub-elements, adding a Dtn <-> Ipn EID Mapping for each. We also * parse the ending &lt; /EidMap &gt; tag. * @param parser The config file parser * @throws XmlPullParserException on general parsing errors * @throws IOException On general I/O errors * @throws JDtnException on JDTN specific errors */ public void parse(XmlRDParser parser) throws XmlRdParserException, IOException, JDtnException { if (GeneralManagement.isDebugLogging()) { _logger.finer("parse()"); } // General structure of EidMap info: // <EidMap> // <EidMapEntry dtnEid='dtnEid' ipnEid='ipnEid /> // ... // </EidMap> // Parse each <EidMapEntry> XmlRDParser.EventType event = Utils.nextNonTextEvent(parser); while (event == XmlRDParser.EventType.START_ELEMENT) { if (!parser.getElementTag().equals("EidMapEntry")) { throw new BPException("Expecting <EidMapEntry>"); } // Get 'dtnEid' attribute String dtnEidStr = Utils.getStringAttribute(parser, "dtnEid"); if (dtnEidStr == null) { throw new BPException("Missing attribute 'dtnEid'"); } EndPointId dtnEid = EndPointId.createEndPointId(dtnEidStr); if (!dtnEid.getScheme().equals(EndPointId.DEFAULT_SCHEME)) { throw new BPException("First argument not 'dtn' Eid"); } // Get 'ipnEid' attribute String ipnEidStr = Utils.getStringAttribute(parser, "ipnEid"); if (ipnEidStr == null) { throw new BPException("Missing attribute 'ipnEid'"); } EndPointId ipnEid = EndPointId.createEndPointId(ipnEidStr); if (!ipnEid.getScheme().equals(IpnEndpointId.SCHEME_NAME) || !(ipnEid instanceof IpnEndpointId)) { throw new BPException("Second argument not 'ipn' Eid"); } // Add the mapping addMapping(dtnEid, (IpnEndpointId)ipnEid); // Parse </EidMapEntry> event = Utils.nextNonTextEvent(parser); if (event != XmlRDParser.EventType.END_ELEMENT || !parser.getElementTag().equals("EidMapEntry")) { throw new BPException("Expecting </EidMapEntry>"); } event = Utils.nextNonTextEvent(parser); } // Parse </EidMap> if (event != XmlRDParser.EventType.END_ELEMENT || !parser.getElementTag().equals("EidMap")) { throw new JDtnException("Expecting '</EidMap>'"); } } /** * Write EidMap to config file. We only do this if there are entries * in the map. * @param pw PrintWrite to output to */ public void writeConfig(PrintWriter pw) { if (GeneralManagement.isDebugLogging()) { _logger.finer("writeConfig()"); } if (EidMap.getInstance().size() > 0) { pw.println(" <EidMap>"); for (EndPointId dtnEid : _dtnToIpnMap.keySet()) { IpnEndpointId ipnEid = _dtnToIpnMap.get(dtnEid); if (!isDefaultMapping(dtnEid, ipnEid)) { pw.println(" <EidMapEntry"); pw.println(" dtnEid='" + dtnEid.getEndPointIdString() + "'"); pw.println(" ipnEid='" + ipnEid.getEndPointIdString() + "'"); pw.println(" />"); } } pw.println(" </EidMap>"); } } // Add an entry to map 'dtn:none' to 'ipn:0.0' private void addDefaultMapping() { if (GeneralManagement.isDebugLogging()) { _logger.finer("addDefaultMapping()"); } try { addMapping( EndPointId.DEFAULT_ENDPOINT_ID_STRING, IpnEndpointId.DEFAULT_IPNEID_STR); } catch (BPException e) { _logger.log(Level.SEVERE, "EidMap default mapping", e); } } // Remove entry mapping 'dtn:none' to 'ipn:0.0' private void removeDefaultMapping() { if (GeneralManagement.isDebugLogging()) { _logger.finer("removeDefaultMapping()"); } try { removeMapping(EndPointId.DEFAULT_ENDPOINT_ID_STRING); } catch (BPException e) { _logger.log(Level.SEVERE, "EidMap default mapping", e); } } // Determine if given mapping is 'dtn:none' <=> 'ipn:0.0' private boolean isDefaultMapping(EndPointId dtnEid, IpnEndpointId ipnEid) { if (dtnEid.getEndPointIdString().equalsIgnoreCase(EndPointId.DEFAULT_ENDPOINT_ID_STRING) && ipnEid.getEndPointIdString().equalsIgnoreCase(IpnEndpointId.DEFAULT_IPNEID_STR)) { return true; } return false; } /** * Add a mapping between a 'dtn' Eid and an 'ipn' Eid * @param dtnEidStr String containing the 'dtn' Eid * @param ipnEidStr String containing the 'ipn' Eid * @throws BPException if there is already a mapping for dtnEid <=> ipnEid, * or if dtnEidStr is not a valid 'dtn' scheme EndPointId, * or if ipnEidStr is not a valid 'ipn' scheme EndPointId. */ public void addMapping(String dtnEidStr, String ipnEidStr) throws BPException { if (GeneralManagement.isDebugLogging()) { _logger.finer("addMapping(<String>" + dtnEidStr + " <=> " + ipnEidStr + ")"); } EndPointId dtnEid = EndPointId.createEndPointId(dtnEidStr); IpnEndpointId ipnEid = new IpnEndpointId(ipnEidStr); addMapping(dtnEid, ipnEid); } /** * Add a mapping between a 'dtn' Eid and an 'ipn' Eid * @param dtnEid The 'dtn' Eid * @param ipnEid The 'ipn' Eid * @throws BPException if there is already a mapping for dtnEid <=> ipnEid, * or if dtnEid is not a 'dtn' scheme EndPointId. */ public synchronized void addMapping(EndPointId dtnEid, IpnEndpointId ipnEid) throws BPException { if (GeneralManagement.isDebugLogging()) { _logger.finer("addMapping(" + dtnEid.getEndPointIdString() + " <=> " + ipnEid.getEndPointIdString() + ")"); } if (!dtnEid.getScheme().equals(EndPointId.DEFAULT_SCHEME)) { throw new BPException("First argument is not a 'dtn' EndPointId"); } if (_dtnToIpnMap.containsKey(dtnEid)) { if (_ipnToDtnMap.containsKey(ipnEid)) { // Full Mapping already exists; silently ignore return; } if (GeneralManagement.isDebugLogging()) { _logger.finer("addMapping(" + dtnEid.getEndPointIdString() + " <=> " + ipnEid.getEndPointIdString() + ") Entry already exists"); _logger.finest(dump("", true)); } throw new BPException("There is already a mapping for DTN EID: " + dtnEid.getEndPointIdString()); } if (_ipnToDtnMap.containsKey(ipnEid)) { throw new BPException("There is already a mapping for IPN EID: " + ipnEid.getEndPointIdString()); } _dtnToIpnMap.put(dtnEid, ipnEid); _ipnToDtnMap.put(ipnEid, dtnEid); } /** * Remove a mapping between a 'dtn' Eid and an 'ipn' Eid * @param dtnEidStr The 'dtn' Eid String * @throws BPException If no mapping, or dtnEidStr poorly formatted */ public synchronized void removeMapping(String dtnEidStr) throws BPException { if (GeneralManagement.isDebugLogging()) { _logger.finer("removeMapping(" + dtnEidStr + ")"); } EndPointId dtnEid = EndPointId.createEndPointId(dtnEidStr); IpnEndpointId ipnEid = getIpnEid(dtnEid); if (ipnEid == null) { throw new BPException("No mapping for " + dtnEid.getEndPointIdString()); } removeMapping(dtnEid, ipnEid); } /** * Remove a mapping between a 'dtn' Eid and an 'ipn' Eid * @param dtnEid The 'dtn' Eid * @param ipnEid The 'ipn' Eid * @throws BPException if there is not a mapping for dtnEid <=> ipnEid, * or if dtnEid is not a 'dtn' scheme EndPointId. */ public synchronized void removeMapping(EndPointId dtnEid, IpnEndpointId ipnEid) throws BPException { if (GeneralManagement.isDebugLogging()) { _logger.finer("removeMapping(" + dtnEid.getEndPointIdString() + " <=> " + ipnEid.getEndPointIdString() + ")"); } if (!_dtnToIpnMap.containsKey(dtnEid)) { throw new BPException("There is not a mapping for DTN EID: " + dtnEid.getEndPointIdString()); } if (!_ipnToDtnMap.containsKey(ipnEid)) { throw new BPException("There is not a mapping for IPN EID: " + ipnEid.getEndPointIdString()); } _dtnToIpnMap.remove(dtnEid); _ipnToDtnMap.remove(ipnEid); } /** * Dump this object * @param indent Amount of indentation * @param detailed if want detailed dump * @return String containing dump */ @Override public synchronized String dump(String indent, boolean detailed) { StringBuilder sb = new StringBuilder(indent + "EidMap\n"); for (EndPointId dtnEid : _dtnToIpnMap.keySet()) { sb.append( indent + " DtnEid=" + dtnEid.getEndPointIdString() + " <=> IpnEid=" + _dtnToIpnMap.get(dtnEid).getEndPointIdString() + "\n"); } return sb.toString(); } /** * Get the IPN Eid mapped to given DTN Eid * @param dtnEidStr Given DTN Eid String * @return Mapped IPN Eid or null if none mapped * @throws BPException if dtnEidStr is poorly formed */ public String getIpnEidStr(String dtnEidStr) throws BPException { if (GeneralManagement.isDebugLogging()) { _logger.finer("getIpnEidStr(" + dtnEidStr + ")"); } EndPointId dtnEid = EndPointId.createEndPointId(dtnEidStr); IpnEndpointId ipnEid = getIpnEid(dtnEid); if (ipnEid == null) { if (GeneralManagement.isDebugLogging()) { _logger.finer("getIpnEidStr(" + dtnEidStr + ") = null"); } return null; } if (GeneralManagement.isDebugLogging()) { _logger.finer("removeMapping(" + dtnEidStr + ") = " + ipnEid.getEndPointIdString()); } return ipnEid.getEndPointIdString(); } /** * Get the IPN Eid mapped to given DTN Eid * @param dtnEid Given DTN Eid * @return Mapped IPN Eid or null if none mapped */ public synchronized IpnEndpointId getIpnEid(EndPointId dtnEid) { IpnEndpointId ipnEid = _dtnToIpnMap.get(dtnEid); if (ipnEid == null) { if (GeneralManagement.isDebugLogging()) { _logger.finer("getIpnEid(" + dtnEid.getEndPointIdString() + ") = null"); } return null; } if (GeneralManagement.isDebugLogging()) { _logger.finer("getIpnEid(" + dtnEid.getEndPointIdString() + ") = " + ipnEid.getEndPointIdString()); } return ipnEid; } /** * Get the DTN Eid mapped to given IPN Eid * @param ipnEidStr Given IPN Eid String * @return Mapped DTN Eid String or null if none mapped * @throws BPException if ipnEidStr is poorly formed */ public String getDtnEidStr(String ipnEidStr) throws BPException { IpnEndpointId ipnEid = new IpnEndpointId(ipnEidStr); EndPointId dtnEid = getDtnEid(ipnEid); if (dtnEid == null) { if (GeneralManagement.isDebugLogging()) { _logger.finer("getDtnEidStr(" + ipnEidStr + ") = null"); } return null; } if (GeneralManagement.isDebugLogging()) { _logger.finer("getDtnEidStr(" + ipnEidStr + ") = " + dtnEid.getEndPointIdString()); } return dtnEid.getEndPointIdString(); } /** * Get the DTN Eid mapped to given IPN Eid * @param ipnEid Given IPN Eid * @return Mapped DTN Eid or null if none mapped */ public synchronized EndPointId getDtnEid(IpnEndpointId ipnEid) { EndPointId dtnEid = _ipnToDtnMap.get(ipnEid); if (dtnEid == null) { if (GeneralManagement.isDebugLogging()) { _logger.finer("getDtnEid(" + ipnEid.getEndPointIdString() + ") = null"); } return null; } if (GeneralManagement.isDebugLogging()) { _logger.finer("getDtnEid(" + ipnEid.getEndPointIdString() + ") = " + dtnEid.getEndPointIdString()); } return dtnEid; } /** * Get the number of mappings * @return Number of mappings */ public int size() { return _dtnToIpnMap.size(); } }
apache-2.0
lumeng689/luapp
practise/src/main/java/org/luapp/practise/leetcode/RemoveNthNodeFromEndOfList.java
1829
package org.luapp.practise.leetcode; /** * Created by lum on 2015/3/16. */ public class RemoveNthNodeFromEndOfList { public static class ListNode { int val; ListNode next; ListNode(int x) { val = x; next = null; } } public static ListNode removeNthFromEnd(ListNode head, int n) { if (head == null) { return head; } // 移动指示针 ListNode nPointer = head; int i = 0; while (nPointer != null && i < n) { nPointer = nPointer.next; i++; } // 如果指定n大于队列长度,报错 if (i != n) { System.out.println("error"); return null; } ListNode pre = head; ListNode lPointer = head; while (nPointer != null) { nPointer = nPointer.next; pre = lPointer; lPointer = lPointer.next; } if(lPointer == head) { head = head.next; } else { pre.next = lPointer.next; } return head; } public static void print(ListNode head) { if (head == null) { System.out.println("null"); } else { System.out.print(head.val); ListNode temp = head.next; while (temp != null) { System.out.print("->" + temp.val); temp = temp.next; } System.out.println(); } } public static void main(String[] args) { ListNode head = new ListNode(1); ListNode temp = head; for (int i = 2; i <= 5; i++) { temp.next = new ListNode(i); temp = temp.next; } print(head); print(removeNthFromEnd(head, -1)); } }
apache-2.0
leapframework/framework
base/core/src/main/java/leap/core/security/UserPrincipal.java
1031
/* * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package leap.core.security; import java.util.Collections; import java.util.Map; public interface UserPrincipal extends Principal { /** * Returns the user's display name. */ String getName(); /** * Returns the user's login name. */ String getLoginName(); /** * Returns the details property. */ default Map<String, Object> getProperties() { return Collections.emptyMap(); } }
apache-2.0
cltl/coreference-evaluation
src/main/java/eu/newsreader/conversion/ReduceConllResponse.java
2884
package eu.newsreader.conversion; import eu.newsreader.util.Util; import java.io.File; import java.io.FileOutputStream; import java.io.OutputStream; import java.util.ArrayList; /** * Created by piek on 12/15/14. */ public class ReduceConllResponse { static public void main (String[] args) { try { String pathToKeyFolder = ""; String pathToResponseFolder = ""; pathToKeyFolder = "/Users/piek/Desktop/NWR/NWR-benchmark/coreference/corpus_CONLL/corpus_airbus/events/key"; pathToResponseFolder = "/Users/piek/Desktop/NWR/NWR-benchmark/coreference/corpus_CONLL/corpus_airbus/events/response"; for (int i = 0; i < args.length; i++) { String arg = args[i]; if (arg.equalsIgnoreCase("--key") && args.length>(i+1)) { pathToKeyFolder = args[i+1]; } else if (arg.equalsIgnoreCase("--response") && args.length>(i+1)) { pathToResponseFolder = args[i+1]; } } ArrayList<File> keyFiles = Util.makeFlatFileList(new File(pathToKeyFolder)); ArrayList<File> responseFiles = Util.makeFlatFileList(new File(pathToResponseFolder)); System.out.println("keyFiles = " + keyFiles.size()); System.out.println("responseFiles = " + responseFiles.size()); for (int i = 0; i < keyFiles.size(); i++) { File keyFile = keyFiles.get(i); ArrayList<String> sentenceIds = Util.getSentenceIdsConllFile(keyFile); // System.out.println("sentenceIds.toString() = " + sentenceIds.toString()); String keyS1 = Util.readFirstSentence(keyFile); boolean MATCH = false; for (int j = 0; j < responseFiles.size(); j++) { File responseFile = responseFiles.get(j); String responseS1 = Util.readFirstSentence(responseFile); if (keyS1.equals(responseS1)) { String reducedResponse = Util.reduceConllFileForSentenceIds(responseFile, sentenceIds); // System.out.println("reducedResponse = " + reducedResponse); OutputStream responseFos = new FileOutputStream(responseFile); responseFos.write(reducedResponse.getBytes()); responseFos.close(); MATCH = true; break; } } if (!MATCH) { System.out.println("NO MATCH for keyFile = " + keyFile.getName()); System.out.println("sentenceIds = " + sentenceIds.toString()); } // break; } } catch (Exception e) { //e.printStackTrace(); } } }
apache-2.0
Nithanim/netty-packet-library
library/src/test/java/me/nithanim/netty/packetlib/handler/PacketEncoderTest.java
3734
package me.nithanim.netty.packetlib.handler; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import java.util.Arrays; import me.nithanim.netty.packetlib.packets.Packet; import me.nithanim.netty.packetlib.testpackets.TestPacketEmpty; import me.nithanim.netty.packetlib.testpackets.TestPacketFull; import me.nithanim.netty.packetlib.testpackets.TestPacketReferenceCounted; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.*; public class PacketEncoderTest { private static PacketEncoder encoder; @BeforeClass public static void setUpClass() { } @AfterClass public static void tearDownClass() { } @Before public void setUp() { encoder = new PacketEncoder(); } @After public void tearDown() { } @Test public void testEncodeEmpty() throws Exception { Packet packet = new TestPacketEmpty(); ByteBuf expected = packetAsBuffer(packet); ByteBuf encoded = getExactByteBuffer(packet.getPacketSize()); encoder.encode(null, packet, encoded); assertEquals(expected, encoded); } @Test public void testEncodeFull() throws Exception { Packet packet = new TestPacketFull(456253454, 53634); ByteBuf expected = packetAsBuffer(packet); ByteBuf encoded = getExactByteBuffer(packet.getPacketSize()); encoder.encode(null, packet, encoded); System.out.println(Arrays.toString(expected.array())); System.out.println(Arrays.toString(encoded.array())); assertEquals(expected, encoded); } @Test public void testEncodeMultiple() throws Exception { Packet[] packets = new Packet[] { new TestPacketEmpty(), new TestPacketFull(32, 34234), new TestPacketEmpty(), new TestPacketFull(234234235, 214421), new TestPacketEmpty() }; int sizeOfPackets = getSizeOfPacketArray(packets); ByteBuf encoded = Unpooled.buffer(sizeOfPackets); for(Packet packet : packets) { encoder.encode(null, packet, encoded); } ByteBuf expected = Unpooled.buffer(sizeOfPackets); for(Packet packet : packets) { expected.writeBytes(packetAsBuffer(packet)); } System.out.println(Arrays.toString(expected.array())); System.out.println(Arrays.toString(encoded.array())); assertEquals(expected, encoded); } @Test public void testReferenceCountDecrease() throws Exception { TestPacketReferenceCounted packet = new TestPacketReferenceCounted(); ByteBuf buffer = getExactByteBuffer(packet.getPacketSize()); assertTrue(packet.refCnt() == 1); encoder.encode(null, packet, buffer); assertTrue(packet.refCnt() == 0); } private int getSizeOfPacketArray(Packet[] packets) { int size = 0; for(Packet packet : packets) { size += packet.getPacketSize(); } return size; } private ByteBuf packetAsBuffer(Packet packet) { ByteBuf buffer = getExactByteBuffer(packet.getPacketSize()); writePacketMetaToByteBuf(packet, buffer); packet.pack(buffer); return buffer; } private void writePacketMetaToByteBuf(Packet packet, ByteBuf buf) { buf.writeByte(packet.getId()); buf.writeShort(packet.getPayloadSize()); } private ByteBuf getExactByteBuffer(int payloadSize) { return Unpooled.buffer(payloadSize, payloadSize); } }
apache-2.0
orientechnologies/orientdb
core/src/main/java/com/orientechnologies/orient/core/storage/cluster/v1/OPaginatedClusterV1.java
69403
/* * Copyright 2010-2013 OrientDB LTD (info--at--orientdb.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orientechnologies.orient.core.storage.cluster.v1; import static com.orientechnologies.orient.core.config.OGlobalConfiguration.DISK_CACHE_PAGE_SIZE; import com.orientechnologies.common.exception.OException; import com.orientechnologies.common.io.OFileUtils; import com.orientechnologies.common.serialization.types.OByteSerializer; import com.orientechnologies.common.serialization.types.OIntegerSerializer; import com.orientechnologies.common.serialization.types.OLongSerializer; import com.orientechnologies.orient.core.Orient; import com.orientechnologies.orient.core.compression.OCompression; import com.orientechnologies.orient.core.compression.OCompressionFactory; import com.orientechnologies.orient.core.compression.impl.ONothingCompression; import com.orientechnologies.orient.core.config.OContextConfiguration; import com.orientechnologies.orient.core.config.OGlobalConfiguration; import com.orientechnologies.orient.core.config.OStorageClusterConfiguration; import com.orientechnologies.orient.core.config.OStoragePaginatedClusterConfiguration; import com.orientechnologies.orient.core.conflict.ORecordConflictStrategy; import com.orientechnologies.orient.core.encryption.OEncryption; import com.orientechnologies.orient.core.encryption.OEncryptionFactory; import com.orientechnologies.orient.core.encryption.impl.ONothingEncryption; import com.orientechnologies.orient.core.exception.NotEmptyComponentCanNotBeRemovedException; import com.orientechnologies.orient.core.exception.OPaginatedClusterException; import com.orientechnologies.orient.core.exception.ORecordNotFoundException; import com.orientechnologies.orient.core.id.ORecordId; import com.orientechnologies.orient.core.metadata.OMetadataInternal; import com.orientechnologies.orient.core.storage.OPhysicalPosition; import com.orientechnologies.orient.core.storage.ORawBuffer; import com.orientechnologies.orient.core.storage.OStorage; import com.orientechnologies.orient.core.storage.cache.OCacheEntry; import com.orientechnologies.orient.core.storage.cluster.OClusterPage; import com.orientechnologies.orient.core.storage.cluster.OClusterPageDebug; import com.orientechnologies.orient.core.storage.cluster.OClusterPositionMap; import com.orientechnologies.orient.core.storage.cluster.OClusterPositionMapBucket; import com.orientechnologies.orient.core.storage.cluster.OPaginatedCluster; import com.orientechnologies.orient.core.storage.cluster.OPaginatedClusterDebug; import com.orientechnologies.orient.core.storage.impl.local.OAbstractPaginatedStorage; import com.orientechnologies.orient.core.storage.impl.local.OClusterBrowseEntry; import com.orientechnologies.orient.core.storage.impl.local.OClusterBrowsePage; import com.orientechnologies.orient.core.storage.impl.local.paginated.atomicoperations.OAtomicOperation; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Optional; /** * @author Andrey Lomakin (a.lomakin-at-orientdb.com) * @since 10/7/13 */ public final class OPaginatedClusterV1 extends OPaginatedCluster { private static final int STATE_ENTRY_INDEX = 0; private static final int BINARY_VERSION = 1; private static final int DISK_PAGE_SIZE = DISK_CACHE_PAGE_SIZE.getValueAsInteger(); @SuppressWarnings("deprecation") private static final int LOWEST_FREELIST_BOUNDARY = OGlobalConfiguration.PAGINATED_STORAGE_LOWEST_FREELIST_BOUNDARY.getValueAsInteger(); private static final int FREE_LIST_SIZE = DISK_PAGE_SIZE - LOWEST_FREELIST_BOUNDARY; private static final int PAGE_INDEX_OFFSET = 16; private static final int RECORD_POSITION_MASK = 0xFFFF; private static final int ONE_KB = 1024; private volatile OCompression compression = ONothingCompression.INSTANCE; private volatile OEncryption encryption = ONothingEncryption.INSTANCE; private final boolean systemCluster; private final OClusterPositionMapV1 clusterPositionMap; private volatile int id; private long fileId; private ORecordConflictStrategy recordConflictStrategy; private static final class AddEntryResult { private final long pageIndex; private final int pagePosition; private final int recordVersion; private final int recordsSizeDiff; private AddEntryResult( final long pageIndex, final int pagePosition, final int recordVersion, final int recordsSizeDiff) { this.pageIndex = pageIndex; this.pagePosition = pagePosition; this.recordVersion = recordVersion; this.recordsSizeDiff = recordsSizeDiff; } } private static final class FindFreePageResult { private final long pageIndex; private final int freePageIndex; private final boolean allocateNewPage; private FindFreePageResult( final long pageIndex, final int freePageIndex, final boolean allocateNewPage) { this.pageIndex = pageIndex; this.freePageIndex = freePageIndex; this.allocateNewPage = allocateNewPage; } } public OPaginatedClusterV1(final String name, final OAbstractPaginatedStorage storage) { this(name, OPaginatedCluster.DEF_EXTENSION, OClusterPositionMap.DEF_EXTENSION, storage); } public OPaginatedClusterV1( final String name, final String dataExtension, final String cpmExtension, final OAbstractPaginatedStorage storage) { super(storage, name, dataExtension, name + dataExtension); systemCluster = OMetadataInternal.SYSTEM_CLUSTER.contains(name); clusterPositionMap = new OClusterPositionMapV1(storage, getName(), getFullName(), cpmExtension); } @Override public void configure(final int id, final String clusterName) throws IOException { acquireExclusiveLock(); try { final OContextConfiguration ctxCfg = storage.getConfiguration().getContextConfiguration(); final String cfgCompression = ctxCfg.getValueAsString(OGlobalConfiguration.STORAGE_COMPRESSION_METHOD); @SuppressWarnings("deprecation") final String cfgEncryption = ctxCfg.getValueAsString(OGlobalConfiguration.STORAGE_ENCRYPTION_METHOD); final String cfgEncryptionKey = ctxCfg.getValueAsString(OGlobalConfiguration.STORAGE_ENCRYPTION_KEY); init(id, clusterName, cfgCompression, cfgEncryption, cfgEncryptionKey, null); } finally { releaseExclusiveLock(); } } @Override public boolean exists() { atomicOperationsManager.acquireReadLock(this); try { acquireSharedLock(); try { final OAtomicOperation atomicOperation = atomicOperationsManager.getCurrentOperation(); return isFileExists(atomicOperation, getFullName()); } finally { releaseSharedLock(); } } finally { atomicOperationsManager.releaseReadLock(this); } } @Override public int getBinaryVersion() { return BINARY_VERSION; } @Override public OStoragePaginatedClusterConfiguration generateClusterConfig() { acquireSharedLock(); try { return new OStoragePaginatedClusterConfiguration( id, getName(), null, true, OStoragePaginatedClusterConfiguration.DEFAULT_GROW_FACTOR, OStoragePaginatedClusterConfiguration.DEFAULT_GROW_FACTOR, compression.name(), encryption.name(), null, Optional.ofNullable(recordConflictStrategy) .map(ORecordConflictStrategy::getName) .orElse(null), OStorageClusterConfiguration.STATUS.ONLINE, BINARY_VERSION); } finally { releaseSharedLock(); } } @Override public void configure(final OStorage storage, final OStorageClusterConfiguration config) throws IOException { acquireExclusiveLock(); try { final OContextConfiguration ctxCfg = storage.getConfiguration().getContextConfiguration(); final String cfgCompression = ctxCfg.getValueAsString(OGlobalConfiguration.STORAGE_COMPRESSION_METHOD); @SuppressWarnings("deprecation") final String cfgEncryption = ctxCfg.getValueAsString(OGlobalConfiguration.STORAGE_ENCRYPTION_METHOD); final String cfgEncryptionKey = ctxCfg.getValueAsString(OGlobalConfiguration.STORAGE_ENCRYPTION_KEY); init( config.getId(), config.getName(), cfgCompression, cfgEncryption, cfgEncryptionKey, ((OStoragePaginatedClusterConfiguration) config).conflictStrategy); } finally { releaseExclusiveLock(); } } @Override public void create(final OAtomicOperation atomicOperation) { executeInsideComponentOperation( atomicOperation, operation -> { acquireExclusiveLock(); try { fileId = addFile(atomicOperation, getFullName()); initCusterState(atomicOperation); clusterPositionMap.create(atomicOperation); } finally { releaseExclusiveLock(); } }); } @Override public void open(OAtomicOperation atomicOperation) throws IOException { acquireExclusiveLock(); try { fileId = openFile(atomicOperation, getFullName()); clusterPositionMap.open(atomicOperation); } finally { releaseExclusiveLock(); } } @Override public void close() { close(true); } @Override public void close(final boolean flush) { acquireExclusiveLock(); try { if (flush) { synch(); } readCache.closeFile(fileId, flush, writeCache); clusterPositionMap.close(flush); } finally { releaseExclusiveLock(); } } @Override public void delete(OAtomicOperation atomicOperation) { executeInsideComponentOperation( atomicOperation, operation -> { acquireExclusiveLock(); try { final long entries = getEntries(); if (entries > 0) { throw new NotEmptyComponentCanNotBeRemovedException( getName() + " : Not empty cluster can not be deleted. Cluster has " + entries + " records"); } deleteFile(atomicOperation, fileId); clusterPositionMap.delete(atomicOperation); } finally { releaseExclusiveLock(); } }); } @Override public boolean isSystemCluster() { return systemCluster; } @Override public String compression() { acquireSharedLock(); try { return compression.name(); } finally { releaseSharedLock(); } } @Override public String encryption() { acquireSharedLock(); try { return encryption.name(); } finally { releaseSharedLock(); } } @Override public OPhysicalPosition allocatePosition( final byte recordType, final OAtomicOperation operation) { return calculateInsideComponentOperation( operation, atomicOperation -> { acquireExclusiveLock(); try { return createPhysicalPosition(recordType, clusterPositionMap.allocate(operation), -1); } finally { releaseExclusiveLock(); } }); } @Override public OPhysicalPosition createRecord( byte[] content, final int recordVersion, final byte recordType, final OPhysicalPosition allocatedPosition, final OAtomicOperation atomicOperation) { content = compression.compress(content); final byte[] encryptedContent = encryption.encrypt(content); return calculateInsideComponentOperation( atomicOperation, operation -> { acquireExclusiveLock(); try { final int entryContentLength = getEntryContentLength(encryptedContent.length); if (entryContentLength < OClusterPage.MAX_RECORD_SIZE) { final byte[] entryContent = new byte[entryContentLength]; int entryPosition = 0; entryContent[entryPosition] = recordType; entryPosition++; OIntegerSerializer.INSTANCE.serializeNative( encryptedContent.length, entryContent, entryPosition); entryPosition += OIntegerSerializer.INT_SIZE; System.arraycopy( encryptedContent, 0, entryContent, entryPosition, encryptedContent.length); entryPosition += encryptedContent.length; entryContent[entryPosition] = 1; entryPosition++; OLongSerializer.INSTANCE.serializeNative(-1L, entryContent, entryPosition); final AddEntryResult addEntryResult = addEntry(recordVersion, entryContent, atomicOperation); updateClusterState(1, addEntryResult.recordsSizeDiff, atomicOperation); final long clusterPosition; if (allocatedPosition != null) { clusterPositionMap.update( allocatedPosition.clusterPosition, new OClusterPositionMapBucket.PositionEntry( addEntryResult.pageIndex, addEntryResult.pagePosition), atomicOperation); clusterPosition = allocatedPosition.clusterPosition; } else { clusterPosition = clusterPositionMap.add( addEntryResult.pageIndex, addEntryResult.pagePosition, atomicOperation); } return createPhysicalPosition( recordType, clusterPosition, addEntryResult.recordVersion); } else { final int entrySize = encryptedContent.length + OIntegerSerializer.INT_SIZE + OByteSerializer.BYTE_SIZE; int fullEntryPosition = 0; final byte[] fullEntry = new byte[entrySize]; fullEntry[fullEntryPosition] = recordType; fullEntryPosition++; OIntegerSerializer.INSTANCE.serializeNative( encryptedContent.length, fullEntry, fullEntryPosition); fullEntryPosition += OIntegerSerializer.INT_SIZE; System.arraycopy( encryptedContent, 0, fullEntry, fullEntryPosition, encryptedContent.length); long prevPageRecordPointer = -1; long firstPageIndex = -1; int firstPagePosition = -1; int version = 0; int from = 0; int to = from + (OClusterPage.MAX_RECORD_SIZE - OByteSerializer.BYTE_SIZE - OLongSerializer.LONG_SIZE); int recordsSizeDiff = 0; do { final byte[] entryContent = new byte[to - from + OByteSerializer.BYTE_SIZE + OLongSerializer.LONG_SIZE]; System.arraycopy(fullEntry, from, entryContent, 0, to - from); if (from > 0) { entryContent[ entryContent.length - OLongSerializer.LONG_SIZE - OByteSerializer.BYTE_SIZE] = 0; } else { entryContent[ entryContent.length - OLongSerializer.LONG_SIZE - OByteSerializer.BYTE_SIZE] = 1; } OLongSerializer.INSTANCE.serializeNative( -1L, entryContent, entryContent.length - OLongSerializer.LONG_SIZE); final AddEntryResult addEntryResult = addEntry(recordVersion, entryContent, atomicOperation); recordsSizeDiff += addEntryResult.recordsSizeDiff; if (firstPageIndex == -1) { firstPageIndex = addEntryResult.pageIndex; firstPagePosition = addEntryResult.pagePosition; version = addEntryResult.recordVersion; } final long addedPagePointer = createPagePointer(addEntryResult.pageIndex, addEntryResult.pagePosition); if (prevPageRecordPointer >= 0) { final long prevPageIndex = getPageIndex(prevPageRecordPointer); final int prevPageRecordPosition = getRecordPosition(prevPageRecordPointer); final OCacheEntry prevPageCacheEntry = loadPageForWrite(atomicOperation, fileId, prevPageIndex, false, true); try { final OClusterPage prevPage = new OClusterPage(prevPageCacheEntry); prevPage.setRecordLongValue( prevPageRecordPosition, -OLongSerializer.LONG_SIZE, addedPagePointer); } finally { releasePageFromWrite(atomicOperation, prevPageCacheEntry); } } prevPageRecordPointer = addedPagePointer; from = to; to = to + (OClusterPage.MAX_RECORD_SIZE - OLongSerializer.LONG_SIZE - OByteSerializer.BYTE_SIZE); if (to > fullEntry.length) { to = fullEntry.length; } } while (from < to); updateClusterState(1, recordsSizeDiff, atomicOperation); final long clusterPosition; if (allocatedPosition != null) { clusterPositionMap.update( allocatedPosition.clusterPosition, new OClusterPositionMapBucket.PositionEntry(firstPageIndex, firstPagePosition), atomicOperation); clusterPosition = allocatedPosition.clusterPosition; } else { clusterPosition = clusterPositionMap.add(firstPageIndex, firstPagePosition, atomicOperation); } return createPhysicalPosition(recordType, clusterPosition, version); } } finally { releaseExclusiveLock(); } }); } private static int getEntryContentLength(final int grownContentSize) { return grownContentSize + 2 * OByteSerializer.BYTE_SIZE + OIntegerSerializer.INT_SIZE + OLongSerializer.LONG_SIZE; } @Override public ORawBuffer readRecord(final long clusterPosition, final boolean prefetchRecords) throws IOException { int pagesToPrefetch = 1; if (prefetchRecords) { pagesToPrefetch = OGlobalConfiguration.QUERY_SCAN_PREFETCH_PAGES.getValueAsInteger(); } return readRecord(clusterPosition, pagesToPrefetch); } private ORawBuffer readRecord(final long clusterPosition, final int pageCount) throws IOException { atomicOperationsManager.acquireReadLock(this); try { acquireSharedLock(); try { final OAtomicOperation atomicOperation = atomicOperationsManager.getCurrentOperation(); final OClusterPositionMapBucket.PositionEntry positionEntry = clusterPositionMap.get(clusterPosition, pageCount, atomicOperation); if (positionEntry == null) { return null; } return internalReadRecord( clusterPosition, positionEntry.getPageIndex(), positionEntry.getRecordPosition(), pageCount, atomicOperation); } finally { releaseSharedLock(); } } finally { atomicOperationsManager.releaseReadLock(this); } } private ORawBuffer internalReadRecord( final long clusterPosition, final long pageIndex, final int recordPosition, int pageCount, final OAtomicOperation atomicOperation) throws IOException { if (pageCount > 1) { final OCacheEntry stateCacheEntry = loadPageForRead(atomicOperation, fileId, STATE_ENTRY_INDEX, false); try { final OPaginatedClusterStateV1 state = new OPaginatedClusterStateV1(stateCacheEntry); pageCount = (int) Math.min(state.getFileSize() + 1 - pageIndex, pageCount); } finally { releasePageFromRead(atomicOperation, stateCacheEntry); } } int recordVersion; final OCacheEntry cacheEntry = loadPageForRead(atomicOperation, fileId, pageIndex, false, pageCount); try { final OClusterPage localPage = new OClusterPage(cacheEntry); recordVersion = localPage.getRecordVersion(recordPosition); } finally { releasePageFromRead(atomicOperation, cacheEntry); } final byte[] fullContent = readFullEntry(clusterPosition, pageIndex, recordPosition, atomicOperation, pageCount); if (fullContent == null) { return null; } int fullContentPosition = 0; final byte recordType = fullContent[fullContentPosition]; fullContentPosition++; final int readContentSize = OIntegerSerializer.INSTANCE.deserializeNative(fullContent, fullContentPosition); fullContentPosition += OIntegerSerializer.INT_SIZE; byte[] recordContent = Arrays.copyOfRange(fullContent, fullContentPosition, fullContentPosition + readContentSize); recordContent = encryption.decrypt(recordContent); recordContent = compression.uncompress(recordContent); return new ORawBuffer(recordContent, recordVersion, recordType); } @Override public ORawBuffer readRecordIfVersionIsNotLatest( final long clusterPosition, final int recordVersion) throws IOException, ORecordNotFoundException { atomicOperationsManager.acquireReadLock(this); try { acquireSharedLock(); try { final OAtomicOperation atomicOperation = atomicOperationsManager.getCurrentOperation(); final OClusterPositionMapBucket.PositionEntry positionEntry = clusterPositionMap.get(clusterPosition, 1, atomicOperation); if (positionEntry == null) { throw new ORecordNotFoundException( new ORecordId(id, clusterPosition), "Record for cluster with id " + id + " and position " + clusterPosition + " is absent."); } final int recordPosition = positionEntry.getRecordPosition(); final long pageIndex = positionEntry.getPageIndex(); int loadedRecordVersion; final OCacheEntry cacheEntry = loadPageForRead(atomicOperation, fileId, pageIndex, false); try { final OClusterPage localPage = new OClusterPage(cacheEntry); if (localPage.isDeleted(recordPosition)) { throw new ORecordNotFoundException( new ORecordId(id, clusterPosition), "Record for cluster with id " + id + " and position " + clusterPosition + " is absent."); } loadedRecordVersion = localPage.getRecordVersion(recordPosition); } finally { releasePageFromRead(atomicOperation, cacheEntry); } if (loadedRecordVersion > recordVersion) { return readRecord(clusterPosition, false); } return null; } finally { releaseSharedLock(); } } finally { atomicOperationsManager.releaseReadLock(this); } } @Override public boolean deleteRecord(OAtomicOperation operation, final long clusterPosition) { return calculateInsideComponentOperation( operation, atomicOperation -> { acquireExclusiveLock(); try { final OClusterPositionMapBucket.PositionEntry positionEntry = clusterPositionMap.get(clusterPosition, 1, operation); if (positionEntry == null) { return false; } long pageIndex = positionEntry.getPageIndex(); int recordPosition = positionEntry.getRecordPosition(); long nextPagePointer; int removedContentSize = 0; do { boolean cacheEntryReleased = false; OCacheEntry cacheEntry = loadPageForWrite(operation, fileId, pageIndex, false, true); int initialFreePageIndex; try { OClusterPage localPage = new OClusterPage(cacheEntry); initialFreePageIndex = calculateFreePageIndex(localPage); if (localPage.isDeleted(recordPosition)) { if (removedContentSize == 0) { cacheEntryReleased = true; releasePageFromWrite(operation, cacheEntry); return false; } else { throw new OPaginatedClusterException( "Content of record " + new ORecordId(id, clusterPosition) + " was broken", this); } } else if (removedContentSize == 0) { releasePageFromWrite(operation, cacheEntry); cacheEntry = loadPageForWrite(operation, fileId, pageIndex, false, true); localPage = new OClusterPage(cacheEntry); } final byte[] content = localPage.deleteRecord(recordPosition, true); operation.addDeletedRecordPosition(id, cacheEntry.getPageIndex(), recordPosition); assert content != null; final int initialFreeSpace = localPage.getFreeSpace(); localPage.deleteRecord(recordPosition, true); operation.addDeletedRecordPosition(id, cacheEntry.getPageIndex(), recordPosition); removedContentSize += localPage.getFreeSpace() - initialFreeSpace; nextPagePointer = OLongSerializer.INSTANCE.deserializeNative( content, content.length - OLongSerializer.LONG_SIZE); } finally { if (!cacheEntryReleased) { releasePageFromWrite(operation, cacheEntry); } } updateFreePagesIndex(initialFreePageIndex, pageIndex, operation); pageIndex = getPageIndex(nextPagePointer); recordPosition = getRecordPosition(nextPagePointer); } while (nextPagePointer >= 0); updateClusterState(-1, -removedContentSize, operation); clusterPositionMap.remove(clusterPosition, operation); return true; } finally { releaseExclusiveLock(); } }); } @Override public void updateRecord( final long clusterPosition, byte[] content, final int recordVersion, final byte recordType, final OAtomicOperation atomicOperation) { content = compression.compress(content); content = encryption.encrypt(content); final byte[] encryptedContent = content; executeInsideComponentOperation( atomicOperation, operation -> { acquireExclusiveLock(); try { final OClusterPositionMapBucket.PositionEntry positionEntry = clusterPositionMap.get(clusterPosition, 1, atomicOperation); if (positionEntry == null) { return; } int nextRecordPosition = positionEntry.getRecordPosition(); long nextPageIndex = positionEntry.getPageIndex(); int newRecordPosition = -1; long newPageIndex = -1; long prevPageIndex = -1; int prevRecordPosition = -1; long nextEntryPointer = -1; int from = 0; int to; long sizeDiff = 0; byte[] updateEntry = null; do { final int entrySize; final int updatedEntryPosition; if (updateEntry == null) { if (from == 0) { entrySize = Math.min( getEntryContentLength(encryptedContent.length), OClusterPage.MAX_RECORD_SIZE); to = entrySize - (2 * OByteSerializer.BYTE_SIZE + OIntegerSerializer.INT_SIZE + OLongSerializer.LONG_SIZE); } else { entrySize = Math.min( encryptedContent.length - from + OByteSerializer.BYTE_SIZE + OLongSerializer.LONG_SIZE, OClusterPage.MAX_RECORD_SIZE); to = from + entrySize - (OByteSerializer.BYTE_SIZE + OLongSerializer.LONG_SIZE); } updateEntry = new byte[entrySize]; int entryPosition = 0; if (from == 0) { updateEntry[entryPosition] = recordType; entryPosition++; OIntegerSerializer.INSTANCE.serializeNative( encryptedContent.length, updateEntry, entryPosition); entryPosition += OIntegerSerializer.INT_SIZE; } System.arraycopy(encryptedContent, from, updateEntry, entryPosition, to - from); entryPosition += to - from; if (nextPageIndex == positionEntry.getPageIndex()) { updateEntry[entryPosition] = 1; } entryPosition++; OLongSerializer.INSTANCE.serializeNative(-1, updateEntry, entryPosition); assert to >= encryptedContent.length || entrySize == OClusterPage.MAX_RECORD_SIZE; } else { entrySize = updateEntry.length; if (from == 0) { to = entrySize - (2 * OByteSerializer.BYTE_SIZE + OIntegerSerializer.INT_SIZE + OLongSerializer.LONG_SIZE); } else { to = from + entrySize - (OByteSerializer.BYTE_SIZE + OLongSerializer.LONG_SIZE); } } int freePageIndex = -1; final boolean isNew; if (nextPageIndex < 0) { final FindFreePageResult findFreePageResult = findFreePage(entrySize, atomicOperation); nextPageIndex = findFreePageResult.pageIndex; freePageIndex = findFreePageResult.freePageIndex; isNew = findFreePageResult.allocateNewPage; } else { isNew = false; } final OCacheEntry cacheEntry; if (isNew) { final OCacheEntry stateCacheEntry = loadPageForWrite(atomicOperation, fileId, STATE_ENTRY_INDEX, false, true); try { final OPaginatedClusterStateV1 clusterState = new OPaginatedClusterStateV1(stateCacheEntry); final int fileSize = clusterState.getFileSize(); final long filledUpTo = getFilledUpTo(atomicOperation, fileId); if (fileSize == filledUpTo - 1) { cacheEntry = addPage(atomicOperation, fileId); } else { assert fileSize < filledUpTo - 1; cacheEntry = loadPageForWrite(atomicOperation, fileId, fileSize + 1, false, false); } clusterState.setFileSize(fileSize + 1); } finally { releasePageFromWrite(atomicOperation, stateCacheEntry); } } else { cacheEntry = loadPageForWrite(atomicOperation, fileId, nextPageIndex, false, true); } try { final OClusterPage localPage = new OClusterPage(cacheEntry); if (isNew) { localPage.init(); } final int pageFreeSpace = localPage.getFreeSpace(); if (freePageIndex < 0) { freePageIndex = calculateFreePageIndex(localPage); } else { assert isNew || freePageIndex == calculateFreePageIndex(localPage); } if (nextRecordPosition >= 0) { if (localPage.isDeleted(nextRecordPosition)) { throw new OPaginatedClusterException( "Record with rid " + new ORecordId(id, clusterPosition) + " was deleted", this); } final int currentEntrySize = localPage.getRecordSize(nextRecordPosition); nextEntryPointer = localPage.getRecordLongValue( nextRecordPosition, currentEntrySize - OLongSerializer.LONG_SIZE); if (currentEntrySize == entrySize) { localPage.replaceRecord(nextRecordPosition, updateEntry, recordVersion); updatedEntryPosition = nextRecordPosition; } else { localPage.deleteRecord(nextRecordPosition, true); atomicOperation.addDeletedRecordPosition( id, cacheEntry.getPageIndex(), nextRecordPosition); if (localPage.getMaxRecordSize() >= entrySize) { updatedEntryPosition = localPage.appendRecord( recordVersion, updateEntry, -1, atomicOperation.getBookedRecordPositions( id, cacheEntry.getPageIndex())); } else { updatedEntryPosition = -1; } } if (nextEntryPointer >= 0) { nextRecordPosition = getRecordPosition(nextEntryPointer); nextPageIndex = getPageIndex(nextEntryPointer); } else { nextPageIndex = -1; nextRecordPosition = -1; } } else { assert localPage.getFreeSpace() >= entrySize; updatedEntryPosition = localPage.appendRecord( recordVersion, updateEntry, -1, atomicOperation.getBookedRecordPositions(id, cacheEntry.getPageIndex())); nextPageIndex = -1; nextRecordPosition = -1; } sizeDiff += pageFreeSpace - localPage.getFreeSpace(); } finally { releasePageFromWrite(atomicOperation, cacheEntry); } updateFreePagesIndex(freePageIndex, cacheEntry.getPageIndex(), atomicOperation); if (updatedEntryPosition >= 0) { if (from == 0) { newPageIndex = cacheEntry.getPageIndex(); newRecordPosition = updatedEntryPosition; } from = to; if (prevPageIndex >= 0) { final OCacheEntry prevCacheEntry = loadPageForWrite(atomicOperation, fileId, prevPageIndex, false, true); try { final OClusterPage prevPage = new OClusterPage(prevCacheEntry); prevPage.setRecordLongValue( prevRecordPosition, -OLongSerializer.LONG_SIZE, createPagePointer(cacheEntry.getPageIndex(), updatedEntryPosition)); } finally { releasePageFromWrite(atomicOperation, prevCacheEntry); } } prevPageIndex = cacheEntry.getPageIndex(); prevRecordPosition = updatedEntryPosition; updateEntry = null; } } while (to < encryptedContent.length || updateEntry != null); // clear unneeded pages while (nextEntryPointer >= 0) { nextPageIndex = getPageIndex(nextEntryPointer); nextRecordPosition = getRecordPosition(nextEntryPointer); final int freePagesIndex; final int freeSpace; final OCacheEntry cacheEntry = loadPageForWrite(atomicOperation, fileId, nextPageIndex, false, true); try { final OClusterPage localPage = new OClusterPage(cacheEntry); freeSpace = localPage.getFreeSpace(); freePagesIndex = calculateFreePageIndex(localPage); nextEntryPointer = localPage.getRecordLongValue(nextRecordPosition, -OLongSerializer.LONG_SIZE); localPage.deleteRecord(nextRecordPosition, true); atomicOperation.addDeletedRecordPosition( id, cacheEntry.getPageIndex(), nextRecordPosition); sizeDiff += freeSpace - localPage.getFreeSpace(); } finally { releasePageFromWrite(atomicOperation, cacheEntry); } updateFreePagesIndex(freePagesIndex, nextPageIndex, atomicOperation); } assert newPageIndex >= 0; assert newRecordPosition >= 0; if (newPageIndex != positionEntry.getPageIndex() || newRecordPosition != positionEntry.getRecordPosition()) { clusterPositionMap.update( clusterPosition, new OClusterPositionMapBucket.PositionEntry(newPageIndex, newRecordPosition), atomicOperation); } updateClusterState(0, sizeDiff, atomicOperation); } finally { releaseExclusiveLock(); } }); } @Override public long getTombstonesCount() { return 0; } @Override public OPhysicalPosition getPhysicalPosition(final OPhysicalPosition position) throws IOException { atomicOperationsManager.acquireReadLock(this); try { acquireSharedLock(); try { final OAtomicOperation atomicOperation = atomicOperationsManager.getCurrentOperation(); final long clusterPosition = position.clusterPosition; final OClusterPositionMapBucket.PositionEntry positionEntry = clusterPositionMap.get(clusterPosition, 1, atomicOperation); if (positionEntry == null) { return null; } final long pageIndex = positionEntry.getPageIndex(); final int recordPosition = positionEntry.getRecordPosition(); final OCacheEntry cacheEntry = loadPageForRead(atomicOperation, fileId, pageIndex, false); try { final OClusterPage localPage = new OClusterPage(cacheEntry); if (localPage.isDeleted(recordPosition)) { return null; } if (localPage.getRecordByteValue( recordPosition, -OLongSerializer.LONG_SIZE - OByteSerializer.BYTE_SIZE) == 0) { return null; } final OPhysicalPosition physicalPosition = new OPhysicalPosition(); physicalPosition.recordSize = -1; physicalPosition.recordType = localPage.getRecordByteValue(recordPosition, 0); physicalPosition.recordVersion = localPage.getRecordVersion(recordPosition); physicalPosition.clusterPosition = position.clusterPosition; return physicalPosition; } finally { releasePageFromRead(atomicOperation, cacheEntry); } } finally { releaseSharedLock(); } } finally { atomicOperationsManager.releaseReadLock(this); } } @Override public boolean isDeleted(final OPhysicalPosition position) throws IOException { atomicOperationsManager.acquireReadLock(this); try { acquireSharedLock(); try { final OAtomicOperation atomicOperation = atomicOperationsManager.getCurrentOperation(); final long clusterPosition = position.clusterPosition; final OClusterPositionMapBucket.PositionEntry positionEntry = clusterPositionMap.get(clusterPosition, 1, atomicOperation); if (positionEntry == null) { return false; } final long pageIndex = positionEntry.getPageIndex(); final int recordPosition = positionEntry.getRecordPosition(); final OCacheEntry cacheEntry = loadPageForRead(atomicOperation, fileId, pageIndex, false); try { final OClusterPage localPage = new OClusterPage(cacheEntry); return localPage.isDeleted(recordPosition); } finally { releasePageFromRead(atomicOperation, cacheEntry); } } finally { releaseSharedLock(); } } finally { atomicOperationsManager.releaseReadLock(this); } } @Override public long getEntries() { atomicOperationsManager.acquireReadLock(this); try { acquireSharedLock(); try { final OAtomicOperation atomicOperation = atomicOperationsManager.getCurrentOperation(); final OCacheEntry pinnedStateEntry = loadPageForRead(atomicOperation, fileId, STATE_ENTRY_INDEX, true); try { return new OPaginatedClusterStateV1(pinnedStateEntry).getSize(); } finally { releasePageFromRead(atomicOperation, pinnedStateEntry); } } finally { releaseSharedLock(); } } catch (final IOException ioe) { throw OException.wrapException( new OPaginatedClusterException( "Error during retrieval of size of '" + getName() + "' cluster", this), ioe); } finally { atomicOperationsManager.releaseReadLock(this); } } @Override public long getFirstPosition() throws IOException { atomicOperationsManager.acquireReadLock(this); try { acquireSharedLock(); try { final OAtomicOperation atomicOperation = atomicOperationsManager.getCurrentOperation(); return clusterPositionMap.getFirstPosition(atomicOperation); } finally { releaseSharedLock(); } } finally { atomicOperationsManager.releaseReadLock(this); } } @Override public long getLastPosition() throws IOException { atomicOperationsManager.acquireReadLock(this); try { acquireSharedLock(); try { final OAtomicOperation atomicOperation = atomicOperationsManager.getCurrentOperation(); return clusterPositionMap.getLastPosition(atomicOperation); } finally { releaseSharedLock(); } } finally { atomicOperationsManager.releaseReadLock(this); } } @Override public long getNextPosition() throws IOException { atomicOperationsManager.acquireReadLock(this); try { acquireSharedLock(); try { final OAtomicOperation atomicOperation = atomicOperationsManager.getCurrentOperation(); return clusterPositionMap.getNextPosition(atomicOperation); } finally { releaseSharedLock(); } } finally { atomicOperationsManager.releaseReadLock(this); } } @Override public String getFileName() { atomicOperationsManager.acquireReadLock(this); try { acquireSharedLock(); try { return writeCache.fileNameById(fileId); } finally { releaseSharedLock(); } } finally { atomicOperationsManager.releaseReadLock(this); } } @Override public int getId() { return id; } /** Returns the fileId used in disk cache. */ public long getFileId() { return fileId; } @Override public void synch() { atomicOperationsManager.acquireReadLock(this); try { acquireSharedLock(); try { writeCache.flush(fileId); clusterPositionMap.flush(); } finally { releaseSharedLock(); } } finally { atomicOperationsManager.releaseReadLock(this); } } @Override public long getRecordsSize() throws IOException { atomicOperationsManager.acquireReadLock(this); try { acquireSharedLock(); try { final OAtomicOperation atomicOperation = atomicOperationsManager.getCurrentOperation(); final OCacheEntry pinnedStateEntry = loadPageForRead(atomicOperation, fileId, STATE_ENTRY_INDEX, true); try { return new OPaginatedClusterStateV1(pinnedStateEntry).getRecordsSize(); } finally { releasePageFromRead(atomicOperation, pinnedStateEntry); } } finally { releaseSharedLock(); } } finally { atomicOperationsManager.releaseReadLock(this); } } @Override public OPhysicalPosition[] higherPositions(final OPhysicalPosition position) throws IOException { atomicOperationsManager.acquireReadLock(this); try { acquireSharedLock(); try { final OAtomicOperation atomicOperation = atomicOperationsManager.getCurrentOperation(); final long[] clusterPositions = clusterPositionMap.higherPositions(position.clusterPosition, atomicOperation); return convertToPhysicalPositions(clusterPositions); } finally { releaseSharedLock(); } } finally { atomicOperationsManager.releaseReadLock(this); } } @Override public OPhysicalPosition[] ceilingPositions(final OPhysicalPosition position) throws IOException { atomicOperationsManager.acquireReadLock(this); try { acquireSharedLock(); try { final OAtomicOperation atomicOperation = atomicOperationsManager.getCurrentOperation(); final long[] clusterPositions = clusterPositionMap.ceilingPositions(position.clusterPosition, atomicOperation); return convertToPhysicalPositions(clusterPositions); } finally { releaseSharedLock(); } } finally { atomicOperationsManager.releaseReadLock(this); } } @Override public OPhysicalPosition[] lowerPositions(final OPhysicalPosition position) throws IOException { atomicOperationsManager.acquireReadLock(this); try { acquireSharedLock(); try { final OAtomicOperation atomicOperation = atomicOperationsManager.getCurrentOperation(); final long[] clusterPositions = clusterPositionMap.lowerPositions(position.clusterPosition, atomicOperation); return convertToPhysicalPositions(clusterPositions); } finally { releaseSharedLock(); } } finally { atomicOperationsManager.releaseReadLock(this); } } @Override public OPhysicalPosition[] floorPositions(final OPhysicalPosition position) throws IOException { atomicOperationsManager.acquireReadLock(this); try { acquireSharedLock(); try { final OAtomicOperation atomicOperation = atomicOperationsManager.getCurrentOperation(); final long[] clusterPositions = clusterPositionMap.floorPositions(position.clusterPosition, atomicOperation); return convertToPhysicalPositions(clusterPositions); } finally { releaseSharedLock(); } } finally { atomicOperationsManager.releaseReadLock(this); } } @Override public ORecordConflictStrategy getRecordConflictStrategy() { return recordConflictStrategy; } @Override public void setRecordConflictStrategy(final String stringValue) { acquireExclusiveLock(); try { recordConflictStrategy = Orient.instance().getRecordConflictStrategy().getStrategy(stringValue); } finally { releaseExclusiveLock(); } } private void updateClusterState( final long sizeDiff, final long recordsSizeDiff, final OAtomicOperation atomicOperation) throws IOException { final OCacheEntry pinnedStateEntry = loadPageForWrite(atomicOperation, fileId, STATE_ENTRY_INDEX, false, true); try { final OPaginatedClusterStateV1 paginatedClusterState = new OPaginatedClusterStateV1(pinnedStateEntry); paginatedClusterState.setSize((int) (paginatedClusterState.getSize() + sizeDiff)); paginatedClusterState.setRecordsSize( (int) (paginatedClusterState.getRecordsSize() + recordsSizeDiff)); } finally { releasePageFromWrite(atomicOperation, pinnedStateEntry); } } private void init( final int id, final String name, final String compression, final String encryption, final String encryptionKey, final String conflictStrategy) throws IOException { OFileUtils.checkValidName(name); this.compression = OCompressionFactory.INSTANCE.getCompression(compression, null); this.encryption = OEncryptionFactory.INSTANCE.getEncryption(encryption, encryptionKey); if (conflictStrategy != null) { this.recordConflictStrategy = Orient.instance().getRecordConflictStrategy().getStrategy(conflictStrategy); } this.id = id; } @Override public void setEncryption(final String method, final String key) { acquireExclusiveLock(); try { encryption = OEncryptionFactory.INSTANCE.getEncryption(method, key); } catch (final IllegalArgumentException e) { //noinspection deprecation throw OException.wrapException( new OPaginatedClusterException( "Invalid value for " + ATTRIBUTES.ENCRYPTION + " attribute", this), e); } finally { releaseExclusiveLock(); } } @Override public void setClusterName(final String newName) { acquireExclusiveLock(); try { writeCache.renameFile(fileId, newName + getExtension()); clusterPositionMap.rename(newName); setName(newName); } catch (IOException e) { throw OException.wrapException( new OPaginatedClusterException("Error during renaming of cluster", this), e); } finally { releaseExclusiveLock(); } } private static OPhysicalPosition createPhysicalPosition( final byte recordType, final long clusterPosition, final int version) { final OPhysicalPosition physicalPosition = new OPhysicalPosition(); physicalPosition.recordType = recordType; physicalPosition.recordSize = -1; physicalPosition.clusterPosition = clusterPosition; physicalPosition.recordVersion = version; return physicalPosition; } private byte[] readFullEntry( final long clusterPosition, long pageIndex, int recordPosition, final OAtomicOperation atomicOperation, int pageCount) throws IOException { final List<byte[]> recordChunks = new ArrayList<>(2); int contentSize = 0; if (pageCount > 1) { final OCacheEntry stateCacheEntry = loadPageForRead(atomicOperation, fileId, STATE_ENTRY_INDEX, false); try { final OPaginatedClusterStateV1 state = new OPaginatedClusterStateV1(stateCacheEntry); pageCount = (int) Math.min(state.getFileSize() + 1 - pageIndex, pageCount); } finally { releasePageFromRead(atomicOperation, stateCacheEntry); } } long nextPagePointer; boolean firstEntry = true; do { final OCacheEntry cacheEntry = loadPageForRead(atomicOperation, fileId, pageIndex, false, pageCount); try { final OClusterPage localPage = new OClusterPage(cacheEntry); if (localPage.isDeleted(recordPosition)) { if (recordChunks.isEmpty()) { return null; } else { throw new OPaginatedClusterException( "Content of record " + new ORecordId(id, clusterPosition) + " was broken", this); } } final byte[] content = localPage.getRecordBinaryValue( recordPosition, 0, localPage.getRecordSize(recordPosition)); assert content != null; if (firstEntry && content[content.length - OLongSerializer.LONG_SIZE - OByteSerializer.BYTE_SIZE] == 0) { return null; } recordChunks.add(content); nextPagePointer = OLongSerializer.INSTANCE.deserializeNative( content, content.length - OLongSerializer.LONG_SIZE); contentSize += content.length - OLongSerializer.LONG_SIZE - OByteSerializer.BYTE_SIZE; firstEntry = false; } finally { releasePageFromRead(atomicOperation, cacheEntry); } pageIndex = getPageIndex(nextPagePointer); recordPosition = getRecordPosition(nextPagePointer); } while (nextPagePointer >= 0); return convertRecordChunksToSingleChunk(recordChunks, contentSize); } private static byte[] convertRecordChunksToSingleChunk( final List<byte[]> recordChunks, final int contentSize) { final byte[] fullContent; if (recordChunks.size() == 1) { fullContent = recordChunks.get(0); } else { fullContent = new byte[contentSize + OLongSerializer.LONG_SIZE + OByteSerializer.BYTE_SIZE]; int fullContentPosition = 0; for (final byte[] recordChuck : recordChunks) { System.arraycopy( recordChuck, 0, fullContent, fullContentPosition, recordChuck.length - OLongSerializer.LONG_SIZE - OByteSerializer.BYTE_SIZE); fullContentPosition += recordChuck.length - OLongSerializer.LONG_SIZE - OByteSerializer.BYTE_SIZE; } } return fullContent; } private static long createPagePointer(final long pageIndex, final int pagePosition) { return pageIndex << PAGE_INDEX_OFFSET | pagePosition; } private static int getRecordPosition(final long nextPagePointer) { return (int) (nextPagePointer & RECORD_POSITION_MASK); } private static long getPageIndex(final long nextPagePointer) { return nextPagePointer >>> PAGE_INDEX_OFFSET; } private AddEntryResult addEntry( final int recordVersion, final byte[] entryContent, final OAtomicOperation atomicOperation) throws IOException { int recordSizesDiff; int position; int finalVersion = 0; long pageIndex; do { final FindFreePageResult findFreePageResult = findFreePage(entryContent.length, atomicOperation); final int freePageIndex = findFreePageResult.freePageIndex; pageIndex = findFreePageResult.pageIndex; final boolean newPage = findFreePageResult.allocateNewPage; final OCacheEntry cacheEntry; if (newPage) { final OCacheEntry stateCacheEntry = loadPageForWrite(atomicOperation, fileId, STATE_ENTRY_INDEX, false, true); try { final OPaginatedClusterStateV1 clusterState = new OPaginatedClusterStateV1(stateCacheEntry); final int fileSize = clusterState.getFileSize(); final long filledUpTo = getFilledUpTo(atomicOperation, fileId); if (fileSize == filledUpTo - 1) { cacheEntry = addPage(atomicOperation, fileId); } else { assert fileSize < filledUpTo - 1; cacheEntry = loadPageForWrite(atomicOperation, fileId, fileSize + 1, false, false); } clusterState.setFileSize(fileSize + 1); } finally { releasePageFromWrite(atomicOperation, stateCacheEntry); } } else { cacheEntry = loadPageForWrite(atomicOperation, fileId, pageIndex, false, true); } try { final OClusterPage localPage = new OClusterPage(cacheEntry); if (newPage) { localPage.init(); } assert newPage || freePageIndex == calculateFreePageIndex(localPage); final int initialFreeSpace = localPage.getFreeSpace(); position = localPage.appendRecord( recordVersion, entryContent, -1, atomicOperation.getBookedRecordPositions(id, cacheEntry.getPageIndex())); final int freeSpace = localPage.getFreeSpace(); recordSizesDiff = initialFreeSpace - freeSpace; if (position >= 0) { finalVersion = localPage.getRecordVersion(position); } } finally { releasePageFromWrite(atomicOperation, cacheEntry); } updateFreePagesIndex(freePageIndex, pageIndex, atomicOperation); } while (position < 0); return new AddEntryResult(pageIndex, position, finalVersion, recordSizesDiff); } private FindFreePageResult findFreePage( final int contentSize, final OAtomicOperation atomicOperation) throws IOException { int freePageIndex = contentSize / ONE_KB; //noinspection deprecation freePageIndex -= OGlobalConfiguration.PAGINATED_STORAGE_LOWEST_FREELIST_BOUNDARY.getValueAsInteger(); if (freePageIndex < 0) { freePageIndex = 0; } long pageIndex; final OCacheEntry pinnedStateEntry = loadPageForRead(atomicOperation, fileId, STATE_ENTRY_INDEX, true); try { final OPaginatedClusterStateV1 freePageLists = new OPaginatedClusterStateV1(pinnedStateEntry); do { pageIndex = freePageLists.getFreeListPage(freePageIndex); freePageIndex++; } while (pageIndex < 0 && freePageIndex < FREE_LIST_SIZE); } finally { releasePageFromRead(atomicOperation, pinnedStateEntry); } final boolean allocateNewPage; if (pageIndex < 0) { final int fileSize; final OCacheEntry stateCacheEntry = loadPageForRead(atomicOperation, fileId, STATE_ENTRY_INDEX, false); try { final OPaginatedClusterStateV1 clusterState = new OPaginatedClusterStateV1(stateCacheEntry); fileSize = clusterState.getFileSize(); } finally { releasePageFromRead(atomicOperation, stateCacheEntry); } allocateNewPage = true; pageIndex = fileSize + 1; } else { allocateNewPage = false; freePageIndex--; } return new FindFreePageResult(pageIndex, freePageIndex, allocateNewPage); } private void updateFreePagesIndex( final int prevFreePageIndex, final long pageIndex, final OAtomicOperation atomicOperation) throws IOException { final OCacheEntry cacheEntry = loadPageForWrite(atomicOperation, fileId, pageIndex, false, true); try { final OClusterPage localPage = new OClusterPage(cacheEntry); final int newFreePageIndex = calculateFreePageIndex(localPage); if (prevFreePageIndex == newFreePageIndex) { return; } final long nextPageIndex = localPage.getNextPage(); final long prevPageIndex = localPage.getPrevPage(); if (prevPageIndex >= 0) { final OCacheEntry prevPageCacheEntry = loadPageForWrite(atomicOperation, fileId, prevPageIndex, false, true); try { final OClusterPage prevPage = new OClusterPage(prevPageCacheEntry); assert calculateFreePageIndex(prevPage) == prevFreePageIndex; prevPage.setNextPage(nextPageIndex); } finally { releasePageFromWrite(atomicOperation, prevPageCacheEntry); } } if (nextPageIndex >= 0) { final OCacheEntry nextPageCacheEntry = loadPageForWrite(atomicOperation, fileId, nextPageIndex, false, true); try { final OClusterPage nextPage = new OClusterPage(nextPageCacheEntry); if (calculateFreePageIndex(nextPage) != prevFreePageIndex) { calculateFreePageIndex(nextPage); } assert calculateFreePageIndex(nextPage) == prevFreePageIndex; nextPage.setPrevPage(prevPageIndex); } finally { releasePageFromWrite(atomicOperation, nextPageCacheEntry); } } localPage.setNextPage(-1); localPage.setPrevPage(-1); if (prevFreePageIndex < 0 && newFreePageIndex < 0) { return; } if (prevFreePageIndex >= 0 && prevFreePageIndex < FREE_LIST_SIZE) { if (prevPageIndex < 0) { updateFreePagesList(prevFreePageIndex, nextPageIndex, atomicOperation); } } if (newFreePageIndex >= 0) { long oldFreePage; final OCacheEntry pinnedStateEntry = loadPageForRead(atomicOperation, fileId, STATE_ENTRY_INDEX, true); try { final OPaginatedClusterStateV1 clusterFreeList = new OPaginatedClusterStateV1(pinnedStateEntry); oldFreePage = clusterFreeList.getFreeListPage(newFreePageIndex); } finally { releasePageFromRead(atomicOperation, pinnedStateEntry); } if (oldFreePage >= 0) { final OCacheEntry oldFreePageCacheEntry = loadPageForWrite(atomicOperation, fileId, oldFreePage, false, true); try { final OClusterPage oldFreeLocalPage = new OClusterPage(oldFreePageCacheEntry); assert calculateFreePageIndex(oldFreeLocalPage) == newFreePageIndex; oldFreeLocalPage.setPrevPage(pageIndex); } finally { releasePageFromWrite(atomicOperation, oldFreePageCacheEntry); } localPage.setNextPage(oldFreePage); localPage.setPrevPage(-1); } updateFreePagesList(newFreePageIndex, pageIndex, atomicOperation); } } finally { releasePageFromWrite(atomicOperation, cacheEntry); } } private void updateFreePagesList( final int freeListIndex, final long pageIndex, final OAtomicOperation atomicOperation) throws IOException { final OCacheEntry pinnedStateEntry = loadPageForWrite(atomicOperation, fileId, STATE_ENTRY_INDEX, true, true); try { final OPaginatedClusterStateV1 paginatedClusterState = new OPaginatedClusterStateV1(pinnedStateEntry); paginatedClusterState.setFreeListPage(freeListIndex, (int) pageIndex); } finally { releasePageFromWrite(atomicOperation, pinnedStateEntry); } } private static int calculateFreePageIndex(final OClusterPage localPage) { int newFreePageIndex; if (localPage.isEmpty()) { newFreePageIndex = FREE_LIST_SIZE - 1; } else { newFreePageIndex = (localPage.getMaxRecordSize() - (ONE_KB - 1)) / ONE_KB; newFreePageIndex -= LOWEST_FREELIST_BOUNDARY; } return newFreePageIndex; } private void initCusterState(final OAtomicOperation atomicOperation) throws IOException { final OCacheEntry stateEntry; if (getFilledUpTo(atomicOperation, fileId) == 0) { stateEntry = addPage(atomicOperation, fileId); } else { stateEntry = loadPageForWrite(atomicOperation, fileId, STATE_ENTRY_INDEX, false, false); } assert stateEntry.getPageIndex() == 0; try { final OPaginatedClusterStateV1 paginatedClusterState = new OPaginatedClusterStateV1(stateEntry); paginatedClusterState.setSize(0); paginatedClusterState.setRecordsSize(0); paginatedClusterState.setFileSize(0); for (int i = 0; i < FREE_LIST_SIZE; i++) { paginatedClusterState.setFreeListPage(i, -1); } } finally { releasePageFromWrite(atomicOperation, stateEntry); } } private static OPhysicalPosition[] convertToPhysicalPositions(final long[] clusterPositions) { final OPhysicalPosition[] positions = new OPhysicalPosition[clusterPositions.length]; for (int i = 0; i < positions.length; i++) { final OPhysicalPosition physicalPosition = new OPhysicalPosition(); physicalPosition.clusterPosition = clusterPositions[i]; positions[i] = physicalPosition; } return positions; } public OPaginatedClusterDebug readDebug(final long clusterPosition) throws IOException { final OPaginatedClusterDebug debug = new OPaginatedClusterDebug(); debug.clusterPosition = clusterPosition; debug.fileId = fileId; final OAtomicOperation atomicOperation = atomicOperationsManager.getCurrentOperation(); final OClusterPositionMapBucket.PositionEntry positionEntry = clusterPositionMap.get(clusterPosition, 1, atomicOperation); if (positionEntry == null) { debug.empty = true; return debug; } long pageIndex = positionEntry.getPageIndex(); int recordPosition = positionEntry.getRecordPosition(); debug.pages = new ArrayList<>(2); int contentSize = 0; long nextPagePointer; boolean firstEntry = true; do { final OClusterPageDebug debugPage = new OClusterPageDebug(); debugPage.pageIndex = pageIndex; final OCacheEntry cacheEntry = loadPageForRead(atomicOperation, fileId, pageIndex, false); try { final OClusterPage localPage = new OClusterPage(cacheEntry); if (localPage.isDeleted(recordPosition)) { if (debug.pages.isEmpty()) { debug.empty = true; return debug; } else { throw new OPaginatedClusterException( "Content of record " + new ORecordId(id, clusterPosition) + " was broken", this); } } debugPage.inPagePosition = recordPosition; debugPage.inPageSize = localPage.getRecordSize(recordPosition); final byte[] content = localPage.getRecordBinaryValue(recordPosition, 0, debugPage.inPageSize); assert content != null; debugPage.content = content; if (firstEntry && content[content.length - OLongSerializer.LONG_SIZE - OByteSerializer.BYTE_SIZE] == 0) { debug.empty = true; return debug; } debug.pages.add(debugPage); nextPagePointer = OLongSerializer.INSTANCE.deserializeNative( content, content.length - OLongSerializer.LONG_SIZE); contentSize += content.length - OLongSerializer.LONG_SIZE - OByteSerializer.BYTE_SIZE; firstEntry = false; } finally { releasePageFromRead(atomicOperation, cacheEntry); } pageIndex = getPageIndex(nextPagePointer); recordPosition = getRecordPosition(nextPagePointer); } while (nextPagePointer >= 0); debug.contentSize = contentSize; return debug; } public RECORD_STATUS getRecordStatus(final long clusterPosition) throws IOException { final OAtomicOperation atomicOperation = atomicOperationsManager.getCurrentOperation(); acquireSharedLock(); try { final byte status = clusterPositionMap.getStatus(clusterPosition, atomicOperation); switch (status) { case OClusterPositionMapBucket.NOT_EXISTENT: return RECORD_STATUS.NOT_EXISTENT; case OClusterPositionMapBucket.ALLOCATED: return RECORD_STATUS.ALLOCATED; case OClusterPositionMapBucket.FILLED: return RECORD_STATUS.PRESENT; case OClusterPositionMapBucket.REMOVED: return RECORD_STATUS.REMOVED; } // UNREACHABLE return null; } finally { releaseSharedLock(); } } @Override public void acquireAtomicExclusiveLock() { atomicOperationsManager.acquireExclusiveLockTillOperationComplete(this); } @Override public String toString() { return "plocal cluster: " + getName(); } @Override public OClusterBrowsePage nextPage(final long lastPosition) throws IOException { atomicOperationsManager.acquireReadLock(this); try { acquireSharedLock(); try { final OAtomicOperation atomicOperation = atomicOperationsManager.getCurrentOperation(); final OClusterPositionMapV1.OClusterPositionEntry[] nextPositions = clusterPositionMap.higherPositionsEntries(lastPosition, atomicOperation); if (nextPositions.length > 0) { final long newLastPosition = nextPositions[nextPositions.length - 1].getPosition(); final List<OClusterBrowseEntry> nexv = new ArrayList<>(nextPositions.length); for (final OClusterPositionMapV1.OClusterPositionEntry pos : nextPositions) { final ORawBuffer buff = internalReadRecord( pos.getPosition(), pos.getPage(), pos.getOffset(), 1, atomicOperation); nexv.add(new OClusterBrowseEntry(pos.getPosition(), buff)); } return new OClusterBrowsePage(nexv, newLastPosition); } else { return null; } } finally { releaseSharedLock(); } } finally { atomicOperationsManager.releaseReadLock(this); } } }
apache-2.0
KnowledgeAndAction/-
app/src/main/java/cn/hicc/information/sensorsignin/utils/Utils.java
3608
package cn.hicc.information.sensorsignin.utils; import android.app.Activity; import android.app.ActivityManager; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.provider.Settings; import android.widget.TextView; import java.util.ArrayList; import cn.hicc.information.sensorsignin.MyApplication; /** * 工具类 */ public class Utils { /** * 判断网络情况 * * @param context 上下文 * @return false 表示没有网络 true 表示有网络 */ public static boolean isNetworkAvalible(Context context) { // 获得网络状态管理器 ConnectivityManager connectivityManager = (ConnectivityManager) context .getSystemService(Context.CONNECTIVITY_SERVICE); if (connectivityManager == null) { return false; } else { // 建立网络数组 NetworkInfo[] net_info = connectivityManager.getAllNetworkInfo(); if (net_info != null) { for (int i = 0; i < net_info.length; i++) { // 判断获得的网络状态是否是处于连接状态 if (net_info[i].getState() == NetworkInfo.State.CONNECTED) { return true; } } } } return false; } // 如果没有网络,则弹出网络设置对话框 public static void checkNetwork(final Activity activity) { if (!Utils.isNetworkAvalible(activity)) { TextView msg = new TextView(activity); msg.setText(" 当前没有可以使用的网络,部分功能可能无法使用,请设置网络!"); new AlertDialog.Builder(activity) //.setIcon(R.mipmap.ic_launcher) .setTitle("网络状态提示") .setView(msg) .setNegativeButton("朕知道了", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) { } }) .setPositiveButton("开启网络", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { // 跳转到设置界面 activity.startActivityForResult(new Intent( Settings.ACTION_WIRELESS_SETTINGS), 0); } }).create().show(); } return; } // 判断服务是否在运行 public static boolean ServiceIsWorked(String name) { ActivityManager myManager = (ActivityManager) MyApplication.getContext().getSystemService(Context.ACTIVITY_SERVICE); ArrayList<ActivityManager.RunningServiceInfo> runningService = (ArrayList <ActivityManager.RunningServiceInfo>)myManager.getRunningServices(300); for (int i = 0; i < runningService.size(); i++) { if (runningService.get(i).service.getClassName().toString().equals(name)) { return true; } } return false; } }
apache-2.0
eric-stanley/cgeo
main/src/cgeo/calendar/ICalendar.java
895
package cgeo.calendar; public interface ICalendar { static final String CALENDAR_ADDON_URI = "market://details?id=cgeo.calendar"; static final String INTENT = "cgeo.calendar.RESERVE"; static final String URI_SCHEME = "add"; static final String URI_HOST = "cgeo.org"; static final String PARAM_SHORT_DESC = "shortDesc"; // cache short description static final String PARAM_HIDDEN_DATE = "hiddenDate"; // cache hidden date in milliseconds static final String PARAM_URL = "url"; // cache URL static final String PARAM_NOTE = "note"; // personal note static final String PARAM_NAME = "name"; // cache name static final String PARAM_LOCATION = "location"; // cache location, or empty string static final String PARAM_COORDS = "coords"; // cache coordinates, or empty string static final String PARAM_START_TIME_MINUTES = "time"; // time of start }
apache-2.0
googleads/googleads-java-lib
modules/dfp_appengine/src/main/java/com/google/api/ads/admanager/jaxws/v202202/CustomFieldServiceInterfaceupdateCustomFieldOptions.java
2990
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.admanager.jaxws.v202202; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * * Updates the specified {@link CustomFieldOption} objects. * * @param customFieldOptions the custom field options to update * @return the updated custom field options * * * <p>Java class for updateCustomFieldOptions element declaration. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;element name="updateCustomFieldOptions"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="customFieldOptions" type="{https://www.google.com/apis/ads/publisher/v202202}CustomFieldOption" maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "customFieldOptions" }) @XmlRootElement(name = "updateCustomFieldOptions") public class CustomFieldServiceInterfaceupdateCustomFieldOptions { protected List<CustomFieldOption> customFieldOptions; /** * Gets the value of the customFieldOptions property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the customFieldOptions property. * * <p> * For example, to add a new item, do as follows: * <pre> * getCustomFieldOptions().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link CustomFieldOption } * * */ public List<CustomFieldOption> getCustomFieldOptions() { if (customFieldOptions == null) { customFieldOptions = new ArrayList<CustomFieldOption>(); } return this.customFieldOptions; } }
apache-2.0
BolkunetsAlexandr/java-training
task 5/autoservice-api/src/com/senla/bolkunets/autoservice/api/managers/IOrdersManager.java
1054
package com.senla.bolkunets.autoservice.api.managers; import java.util.Comparator; import java.util.Date; import java.util.List; import com.senla.bolkunets.autoservice.api.beans.IOrder; import com.senla.bolkunets.autoservice.api.enums.OrderStatus; public interface IOrdersManager { List<IOrder> getOrders(); void addOrder(IOrder order); boolean changeOrderStatus(long id, OrderStatus orderStatus); List<IOrder> getSortedListProgressOrders(Comparator<IOrder> orderComparator); List<IOrder> getSortedListAllOrders(Comparator<IOrder> orderComparator); IOrder getOrder(long idMaster); void setGarageToOrder(long idGarage, long idOrder); void removeGarageFromOrder(long idOrder); boolean addMasterToOrder(long idMaster, long idOrder); void removeMasterFromOrder(long idMaster, long idOrder); List<IOrder> getOrders(OrderStatus status, Date dateLeft, Date dateRight, Comparator<IOrder> comp); public int getCountFreePlace(Date date); Date getNextFreeDate(); void shiftOrders(long orderId, int countDay); void save(); }
apache-2.0
rylexr/android-simple-search-app
app/src/androidTest/java/com/tinbytes/simplesearchapp/ApplicationTest.java
950
/* * Copyright 2015 Tinbytes Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tinbytes.simplesearchapp; import android.app.Application; import android.test.ApplicationTestCase; /** * <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a> */ public class ApplicationTest extends ApplicationTestCase<Application> { public ApplicationTest() { super(Application.class); } }
apache-2.0
opensingular/singular-server
requirement/requirement-module/src/main/java/org/opensingular/requirement/module/admin/healthsystem/extension/JobsAdminEntry.java
1087
/* * Copyright (C) 2016 Singular Studios (a.k.a Atom Tecnologia) - www.opensingular.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opensingular.requirement.module.admin.healthsystem.extension; import org.apache.wicket.markup.html.panel.Panel; import org.opensingular.requirement.module.admin.healthsystem.panel.JobPanel; public class JobsAdminEntry implements AdministrationEntryExtension { @Override public String name() { return "Jobs"; } @Override public Panel makePanel(String id) { return new JobPanel(id); } }
apache-2.0
smanvi-pivotal/geode
geode-core/src/test/java/org/apache/geode/internal/cache/RollbackFunction.java
5585
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.logging.log4j.Logger; import org.apache.geode.DataSerializable; import org.apache.geode.cache.Cache; import org.apache.geode.cache.CacheFactory; import org.apache.geode.cache.CacheTransactionManager; import org.apache.geode.cache.TransactionDataNodeHasDepartedException; import org.apache.geode.cache.TransactionId; import org.apache.geode.cache.execute.Execution; import org.apache.geode.cache.execute.Function; import org.apache.geode.cache.execute.FunctionContext; import org.apache.geode.cache.execute.FunctionException; import org.apache.geode.cache.execute.FunctionService; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.internal.cache.TXId; import org.apache.geode.internal.logging.LogService; /** * This function can be used by GemFire clients and peers to rollback an existing transaction. A * {@link TransactionId} corresponding to the transaction to be rolledback must be provided as an * argument while invoking this function.<br /> * * This function should execute only on one server. If the transaction is not hosted on the server * where the function is invoked then this function decides to invoke a * {@link NestedTransactionFunction} which executes on the member where transaction is hosted.<br /> * * This function returns a single Boolean as result, whose value is <code>Boolean.TRUE</code> if the * transaction rolled back successfully otherwise the return value is * <code>Boolean.FALSE</code>.<br /> * * To execute this function, it is recommended to use the {@link Execution} obtained by using * TransactionFunctionService. <br /> * * To summarize, this function should be used as follows: * * <pre> * Execution exe = TransactionFunctionService.onTransaction(txId); * List l = (List) exe.execute(rollbackFunction).getResult(); * Boolean result = (Boolean) l.get(0); * </pre> * * This function is <b>not</b> registered on the cache servers by default, and it is the user's * responsibility to register this function. see {@link FunctionService#registerFunction(Function)} * * @since GemFire 6.6.1 */ public class RollbackFunction implements Function, DataSerializable { private static final Logger logger = LogService.getLogger(); private static final long serialVersionUID = 1377183180063184795L; public RollbackFunction() {} public boolean hasResult() { return true; } public void execute(FunctionContext context) { Cache cache = CacheFactory.getAnyInstance(); TXId txId = null; try { txId = (TXId) context.getArguments(); } catch (ClassCastException e) { logger.info( "RollbackFunction should be invoked with a TransactionId as an argument i.e. setArguments(txId).execute(function)"); throw e; } DistributedMember member = txId.getMemberId(); Boolean result = false; final boolean isDebugEnabled = logger.isDebugEnabled(); if (cache.getDistributedSystem().getDistributedMember().equals(member)) { if (isDebugEnabled) { logger.debug("RollbackFunction: for transaction: {} rolling back locally", txId); } CacheTransactionManager txMgr = cache.getCacheTransactionManager(); if (txMgr.tryResume(txId)) { if (isDebugEnabled) { logger.debug("RollbackFunction: resumed transaction: {}", txId); } txMgr.rollback(); result = true; } } else { ArrayList args = new ArrayList(); args.add(txId); args.add(NestedTransactionFunction.ROLLBACK); Execution ex = FunctionService.onMember(member).setArguments(args); if (isDebugEnabled) { logger.debug( "RollbackFunction: for transaction: {} executing NestedTransactionFunction on member: {}", txId, member); } try { List list = (List) ex.execute(new NestedTransactionFunction()).getResult(); result = (Boolean) list.get(0); } catch (FunctionException fe) { throw new TransactionDataNodeHasDepartedException("Could not Rollback on member:" + member); } } if (isDebugEnabled) { logger.debug("RollbackFunction: for transaction: {} returning result: {}", txId, result); } context.getResultSender().lastResult(result); } public String getId() { return getClass().getName(); } public boolean optimizeForWrite() { return true; } public boolean isHA() { // GEM-207 return true; } @Override public void toData(DataOutput out) throws IOException { } @Override public void fromData(DataInput in) throws IOException, ClassNotFoundException { } }
apache-2.0