answer
stringlengths
17
10.2M
package <%=packageName%>.aop.logging; import <%=packageName%>.config.Constants; import org.aspectj.lang.JoinPoint; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.AfterThrowing; import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; import org.aspectj.lang.annotation.Pointcut; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.core.env.Environment; import javax.inject.Inject; import java.util.Arrays; /** * Aspect for logging execution of service and repository Spring components. */ @Aspect public class LoggingAspect { private final Logger log = LoggerFactory.getLogger(this.getClass()); @Inject private Environment env; @Pointcut("within(<%=packageName%>.repository..*) || within(<%=packageName%>.service..*)") public void loggingPoincut() {} @AfterThrowing(pointcut = "loggingPoincut()", throwing = "e") public void logAfterThrowing(JoinPoint joinPoint, Throwable e) { if (env.acceptsProfiles(Constants.SPRING_PROFILE_DEVELOPMENT)) { log.error("Exception in {}.{}() with cause = {}", joinPoint.getSignature().getDeclaringTypeName(), joinPoint.getSignature().getName(), e.getCause(), e); } else { log.error("Exception in {}.{}() with cause = {}", joinPoint.getSignature().getDeclaringTypeName(), joinPoint.getSignature().getName(), e.getCause()); } } @Around("loggingPoincut()") public Object logAround(ProceedingJoinPoint joinPoint) throws Throwable { log.debug("Enter: {}.{}() with argument[s] = {}", joinPoint.getSignature().getDeclaringTypeName(), joinPoint.getSignature().getName(), Arrays.toString(joinPoint.getArgs())); try { Object result = joinPoint.proceed(); log.debug("Exit: {}.{}() with result = {}", joinPoint.getSignature().getDeclaringTypeName(), joinPoint.getSignature().getName(), result); return result; } catch (IllegalArgumentException e) { log.error("Illegal argument: {} in {}.{}()", Arrays.toString(joinPoint.getArgs()), joinPoint.getSignature().getDeclaringTypeName(), joinPoint.getSignature().getName()); throw e; } } }
package com.dataiku.wt1.controllers; import java.io.IOException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; @SuppressWarnings("serial") public class GlobalIdServlet extends HttpServlet { public static final String FUN_PARAM = "fun"; /* * Called with ?fun=FUN * Returns Javascript script calling FUN("my_global_visitor_id") */ @Override public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException { String globalVisitorIdVal = PixelServlet.getThirdPartyCookie(req, resp, true, null); if (globalVisitorIdVal == null) { globalVisitorIdVal = ""; } String fun = req.getParameter(FUN_PARAM); if (fun == null) { resp.setContentType("application/json"); resp.getWriter().write("{\"id\": \""+ globalVisitorIdVal + "\" }"); } else { resp.setContentType("application/x-javascript"); resp.getWriter().write(fun + "(\"" + globalVisitorIdVal + "\");"); } } }
// Administrator of the National Aeronautics and Space Administration // This software is distributed under the NASA Open Source Agreement // (NOSA), version 1.3. The NOSA has been approved by the Open Source // Initiative. See the file NOSA-1.3-JPF at the top of the distribution // directory tree for the complete NOSA document. // KIND, EITHER EXPRESSED, IMPLIED, OR STATUTORY, INCLUDING, BUT NOT // SPECIFICATIONS, ANY IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR // DOCUMENTATION, IF PROVIDED, WILL CONFORM TO THE SUBJECT SOFTWARE. package gov.nasa.jpf.jvm; import gov.nasa.jpf.Config; import gov.nasa.jpf.jvm.bytecode.Instruction; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.Set; /** * MJI NativePeer class for java.lang.Class library abstraction */ public class JPF_java_lang_Class { static final String FIELD_CLASSNAME = "java.lang.reflect.Field"; static final String METHOD_CLASSNAME = "java.lang.reflect.Method"; static final String CONSTRUCTOR_CLASSNAME = "java.lang.reflect.Constructor"; public static void init (Config conf){ // we create Method and Constructor objects, so we better make sure these // classes are initialized (they already might be) JPF_java_lang_reflect_Method.init(conf); JPF_java_lang_reflect_Constructor.init(conf); } public static boolean isArray____Z (MJIEnv env, int robj) { ClassInfo ci = env.getReferredClassInfo( robj); return ci.isArray(); } public static int getComponentType____Ljava_lang_Class_2 (MJIEnv env, int robj) { if (isArray____Z(env, robj)) { ThreadInfo ti = env.getThreadInfo(); Instruction insn = ti.getPC(); ClassInfo ci = env.getReferredClassInfo( robj).getComponentClassInfo(); if (insn.requiresClinitExecution(ti, ci)) { env.repeatInvocation(); return MJIEnv.NULL; } return ci.getClassObjectRef(); } return MJIEnv.NULL; } public static boolean isInstance__Ljava_lang_Object_2__Z (MJIEnv env, int robj, int r1) { ElementInfo sei = env.getClassElementInfo(robj); ClassInfo ci = sei.getClassInfo(); ClassInfo ciOther = env.getClassInfo(r1); return (ciOther.isInstanceOf(ci.getName())); } public static boolean isInterface____Z (MJIEnv env, int robj){ ClassInfo ci = env.getReferredClassInfo( robj); return ci.isInterface(); } public static boolean isAssignableFrom__Ljava_lang_Class_2__Z (MJIEnv env, int rcls, int r1) { ElementInfo sei1 = env.getClassElementInfo(rcls); ClassInfo ci1 = sei1.getClassInfo(); ElementInfo sei2 = env.getClassElementInfo(r1); ClassInfo ci2 = sei2.getClassInfo(); return ci2.isInstanceOf( ci1.getName()); } public static int getAnnotations_____3Ljava_lang_annotation_Annotation_2 (MJIEnv env, int robj){ ClassInfo ci = env.getReferredClassInfo( robj); AnnotationInfo[] ai = ci.getAnnotations(); try { return env.newAnnotationProxies(ai); } catch (ClinitRequired x){ env.handleClinitRequest(x.getRequiredClassInfo()); return MJIEnv.NULL; } } public static int getAnnotation__Ljava_lang_Class_2__Ljava_lang_annotation_Annotation_2 (MJIEnv env, int robj, int annoClsRef){ ClassInfo ci = env.getReferredClassInfo( robj); ClassInfo aci = env.getReferredClassInfo(annoClsRef); AnnotationInfo ai = ci.getAnnotation(aci.getName()); if (ai != null){ ClassInfo aciProxy = ClassInfo.getAnnotationProxy(aci); try { return env.newAnnotationProxy(aciProxy, ai); } catch (ClinitRequired x){ env.handleClinitRequest(x.getRequiredClassInfo()); return MJIEnv.NULL; } } else { return MJIEnv.NULL; } } public static int getPrimitiveClass__Ljava_lang_String_2__Ljava_lang_Class_2 (MJIEnv env, int rcls, int stringRef) { String clsName = env.getStringObject(stringRef); // we don't really have to check for a valid class name here, since // this is a package default method that just gets called from // the clinit of box classes // note this does NOT return the box class (e.g. java.lang.Integer), which // is a normal, functional class, but a primitive class (e.g. 'int') that // is rather a strange beast (not even Object derived) StaticArea sa = env.getStaticArea(); StaticElementInfo ei = sa.get(clsName); int cref = ei.getClassObjectRef(); env.setBooleanField(cref, "isPrimitive", true); return cref; } public static boolean desiredAssertionStatus____Z (MJIEnv env, int robj) { ClassInfo ci = env.getReferredClassInfo(robj); return ci.areAssertionsEnabled(); } public static int getClassObject (MJIEnv env, ClassInfo ci){ ThreadInfo ti = env.getThreadInfo(); Instruction insn = ti.getPC(); if (insn.requiresClinitExecution(ti, ci)) { env.repeatInvocation(); return MJIEnv.NULL; } StaticElementInfo ei = ci.getStaticElementInfo(); int ref = ei.getClassObjectRef(); return ref; } public static int forName__Ljava_lang_String_2__Ljava_lang_Class_2 (MJIEnv env, int rcls, int clsNameRef) { String clsName = env.getStringObject(clsNameRef); ClassInfo ci = ClassInfo.tryGetResolvedClassInfo(clsName); if (ci == null){ env.throwException("java.lang.ClassNotFoundException", clsName); return MJIEnv.NULL; } return getClassObject(env, ci); } /** * this is an example of a native method issuing direct calls - otherwise known * as a round trip. * We don't have to deal with class init here anymore, since this is called * via the class object of the class to instantiate */ public static int newInstance____Ljava_lang_Object_2 (MJIEnv env, int robj) { ThreadInfo ti = env.getThreadInfo(); StackFrame frame = ti.getReturnedDirectCall(); if (frame != null){ return frame.pop(); } else { ClassInfo ci = env.getReferredClassInfo(robj); // what are we if (ci.requiresClinitExecution(ti)) { env.repeatInvocation(); return MJIEnv.NULL; } if(ci.isAbstract()){ // not allowed to instantiate env.throwException("java.lang.InstantiationException"); return MJIEnv.NULL; } int objRef = env.newObject(ci); // create the thing MethodInfo mi = ci.getMethod("<init>()V", true); if (mi != null) { // direct call required for initialization // <2do> - still need to handle protected if (mi.isPrivate()){ env.throwException("java.lang.IllegalAccessException", "cannot access non-public member of class " + ci.getName()); return MJIEnv.NULL; } MethodInfo stub = mi.createDirectCallStub("[init]"); frame = new DirectCallStackFrame(stub, 2,0); frame.push( objRef, true); // Hmm, we borrow the DirectCallStackFrame to cache the object ref // (don't try that with a normal StackFrame) frame.dup(); ti.pushFrame(frame); return MJIEnv.NULL; } else { return objRef; // no initialization required } } } public static int getSuperclass____Ljava_lang_Class_2 (MJIEnv env, int robj) { ClassInfo ci = env.getReferredClassInfo( robj); ClassInfo sci = ci.getSuperClass(); if (sci != null) { return sci.getClassObjectRef(); } else { return MJIEnv.NULL; } } public static int getClassLoader____Ljava_lang_ClassLoader_2 (MJIEnv env, int objref){ ClassInfo ci = env.getReferredClassInfo( objref); ClassLoaderInfo cli = ci.getClassLoaderInfo(); // if the class is loaded by a system classloader, this should return null if(cli.isSystemClassLoader()) { return MJIEnv.NULL; } return cli.objRef; } static int getMethod (MJIEnv env, int clsRef, ClassInfo ciMethod, String mname, int argTypesRef, boolean isRecursiveLookup, boolean publicOnly) { ClassInfo ci = env.getReferredClassInfo( clsRef); StringBuilder sb = new StringBuilder(mname); sb.append('('); int nParams = argTypesRef != MJIEnv.NULL ? env.getArrayLength(argTypesRef) : 0; for (int i=0; i<nParams; i++) { int cRef = env.getReferenceArrayElement(argTypesRef, i); ClassInfo cit = env.getReferredClassInfo( cRef); String tname = cit.getName(); String tcode = tname; tcode = Types.getTypeSignature(tcode, false); sb.append(tcode); } sb.append(')'); String fullMthName = sb.toString(); MethodInfo mi = ci.getReflectionMethod(fullMthName, isRecursiveLookup); if (mi == null || (publicOnly && !mi.isPublic())) { env.throwException("java.lang.NoSuchMethodException", ci.getName() + '.' + fullMthName); return MJIEnv.NULL; } else { return createMethodObject(env, ciMethod, mi); } } static int createMethodObject (MJIEnv env, ClassInfo objectCi, MethodInfo mi) { // NOTE - we rely on Constructor and Method peers being initialized if (mi.isCtor()){ return JPF_java_lang_reflect_Constructor.createConstructorObject(env, objectCi, mi); } else { return JPF_java_lang_reflect_Method.createMethodObject(env, objectCi, mi); } } public static int getDeclaredMethod__Ljava_lang_String_2_3Ljava_lang_Class_2__Ljava_lang_reflect_Method_2 (MJIEnv env, int clsRef, int nameRef, int argTypesRef) { ClassInfo mci = getInitializedClassInfo(env, METHOD_CLASSNAME); if (mci == null) { env.repeatInvocation(); return MJIEnv.NULL; } String mname = env.getStringObject(nameRef); return getMethod(env, clsRef, mci, mname, argTypesRef, false, false); } public static int getDeclaredConstructor___3Ljava_lang_Class_2__Ljava_lang_reflect_Constructor_2 (MJIEnv env, int clsRef, int argTypesRef){ ClassInfo mci = getInitializedClassInfo(env, CONSTRUCTOR_CLASSNAME); if (mci == null) { env.repeatInvocation(); return MJIEnv.NULL; } int ctorRef = getMethod(env,clsRef, mci, "<init>",argTypesRef,false, false); return ctorRef; } public static int getMethod__Ljava_lang_String_2_3Ljava_lang_Class_2__Ljava_lang_reflect_Method_2 (MJIEnv env, int clsRef, int nameRef, int argTypesRef) { ClassInfo mci = getInitializedClassInfo(env, METHOD_CLASSNAME); if (mci == null) { env.repeatInvocation(); return MJIEnv.NULL; } String mname = env.getStringObject(nameRef); return getMethod( env, clsRef, mci, mname, argTypesRef, true, true); } private static void addDeclaredMethodsRec (HashMap<String,MethodInfo>methods, ClassInfo ci){ ClassInfo sci = ci.getSuperClass(); if (sci != null){ addDeclaredMethodsRec(methods,sci); } for (String ifcName : ci.getInterfaces()){ ClassInfo ici = ClassInfo.getResolvedClassInfo(ifcName); // has to be already defined, so no exception addDeclaredMethodsRec(methods,ici); } for (MethodInfo mi : ci.getDeclaredMethodInfos()) { // filter out non-public, <clinit> and <init> if (mi.isPublic() && (mi.getName().charAt(0) != '<')) { String mname = mi.getUniqueName(); if (!(ci.isInterface() && methods.containsKey(mname))){ methods.put(mname, mi); } } } } public static int getMethods_____3Ljava_lang_reflect_Method_2 (MJIEnv env, int objref) { ClassInfo mci = getInitializedClassInfo(env, METHOD_CLASSNAME); if (mci == null) { env.repeatInvocation(); return MJIEnv.NULL; } ClassInfo ci = env.getReferredClassInfo(objref); // collect all the public, non-ctor instance methods if (!ci.isPrimitive()) { HashMap<String,MethodInfo> methods = new HashMap<String,MethodInfo>(); addDeclaredMethodsRec(methods,ci); int n = methods.size(); int aref = env.newObjectArray("Ljava/lang/reflect/Method;", n); int i=0; for (MethodInfo mi : methods.values()){ int mref = createMethodObject(env, mci, mi); env.setReferenceArrayElement(aref,i++,mref); } return aref; } else { return env.newObjectArray("Ljava/lang/reflect/Method;", 0); } } public static int getDeclaredMethods_____3Ljava_lang_reflect_Method_2 (MJIEnv env, int objref) { ClassInfo mci = getInitializedClassInfo(env, METHOD_CLASSNAME); if (mci == null) { env.repeatInvocation(); return MJIEnv.NULL; } ClassInfo ci = env.getReferredClassInfo(objref); MethodInfo[] methodInfos = ci.getDeclaredMethodInfos(); // we have to filter out the ctors and the static init int nMth = methodInfos.length; for (int i=0; i<methodInfos.length; i++){ if (methodInfos[i].getName().charAt(0) == '<'){ methodInfos[i] = null; nMth } } int aref = env.newObjectArray("Ljava/lang/reflect/Method;", nMth); for (int i=0, j=0; i<methodInfos.length; i++) { if (methodInfos[i] != null){ int mref = createMethodObject(env, mci, methodInfos[i]); env.setReferenceArrayElement(aref,j++,mref); } } return aref; } static int getConstructors (MJIEnv env, int objref, boolean publicOnly){ ClassInfo mci = getInitializedClassInfo(env, CONSTRUCTOR_CLASSNAME); if (mci == null) { env.repeatInvocation(); return MJIEnv.NULL; } ClassInfo ci = env.getReferredClassInfo(objref); ArrayList<MethodInfo> ctors = new ArrayList<MethodInfo>(); // we have to filter out the ctors and the static init for (MethodInfo mi : ci.getDeclaredMethodInfos()){ if (mi.getName().equals("<init>")){ if (!publicOnly || mi.isPublic()) { ctors.add(mi); } } } int nCtors = ctors.size(); int aref = env.newObjectArray("Ljava/lang/reflect/Constructor;", nCtors); for (int i=0; i<nCtors; i++){ env.setReferenceArrayElement(aref, i, createMethodObject(env, mci, ctors.get(i))); } return aref; } public static int getConstructors_____3Ljava_lang_reflect_Constructor_2 (MJIEnv env, int objref){ return getConstructors(env, objref, true); } public static int getDeclaredConstructors_____3Ljava_lang_reflect_Constructor_2 (MJIEnv env, int objref){ return getConstructors(env, objref, false); } public static int getConstructor___3Ljava_lang_Class_2__Ljava_lang_reflect_Constructor_2 (MJIEnv env, int clsRef, int argTypesRef){ ClassInfo mci = getInitializedClassInfo(env, CONSTRUCTOR_CLASSNAME); if (mci == null) { env.repeatInvocation(); return MJIEnv.NULL; } // <2do> should only return a public ctor return getMethod(env,clsRef, mci, "<init>",argTypesRef,false,true); } static ClassInfo getInitializedClassInfo (MJIEnv env, String clsName){ ThreadInfo ti = env.getThreadInfo(); Instruction insn = ti.getPC(); ClassInfo ci = ClassInfo.getResolvedClassInfo( clsName); if (insn.requiresClinitExecution(ti, ci)) { return null; } else { return ci; } } static Set<ClassInfo> getInitializedInterfaces (MJIEnv env, ClassInfo ci){ ThreadInfo ti = env.getThreadInfo(); Instruction insn = ti.getPC(); Set<ClassInfo> ifcs = ci.getAllInterfaceClassInfos(); for (ClassInfo ciIfc : ifcs){ if (insn.requiresClinitExecution(ti, ciIfc)) { return null; } } return ifcs; } static int createFieldObject (MJIEnv env, FieldInfo fi, ClassInfo fci){ int regIdx = JPF_java_lang_reflect_Field.registerFieldInfo(fi); int eidx = env.newObject(fci); ElementInfo ei = env.getElementInfo(eidx); ei.setIntField("regIdx", regIdx); return eidx; } public static int getDeclaredFields_____3Ljava_lang_reflect_Field_2 (MJIEnv env, int objRef) { ClassInfo fci = getInitializedClassInfo(env, FIELD_CLASSNAME); if (fci == null) { env.repeatInvocation(); return MJIEnv.NULL; } ClassInfo ci = env.getReferredClassInfo(objRef); int nInstance = ci.getNumberOfDeclaredInstanceFields(); int nStatic = ci.getNumberOfStaticFields(); int aref = env.newObjectArray("Ljava/lang/reflect/Field;", nInstance + nStatic); int i, j=0; for (i=0; i<nStatic; i++) { FieldInfo fi = ci.getStaticField(i); env.setReferenceArrayElement(aref, j++, createFieldObject(env, fi, fci)); } for (i=0; i<nInstance; i++) { FieldInfo fi = ci.getDeclaredInstanceField(i); env.setReferenceArrayElement(aref, j++, createFieldObject(env, fi, fci)); } return aref; } public static int getFields_____3Ljava_lang_reflect_Field_2 (MJIEnv env, int clsRef){ ClassInfo fci = getInitializedClassInfo(env, FIELD_CLASSNAME); if (fci == null) { env.repeatInvocation(); return MJIEnv.NULL; } ClassInfo ci = env.getReferredClassInfo(clsRef); // interfaces might not be initialized yet, so we have to check first Set<ClassInfo> ifcs = getInitializedInterfaces( env, ci); if (ifcs == null) { env.repeatInvocation(); return MJIEnv.NULL; } ArrayList<FieldInfo> fiList = new ArrayList<FieldInfo>(); for (; ci != null; ci = ci.getSuperClass()){ // the host VM returns them in order of declaration, but the spec says there is no guaranteed order so we keep it simple for (FieldInfo fi : ci.getDeclaredInstanceFields()){ if (fi.isPublic()){ fiList.add(fi); } } for (FieldInfo fi : ci.getDeclaredStaticFields()){ if (fi.isPublic()){ fiList.add(fi); } } } for (ClassInfo ciIfc : ifcs){ for (FieldInfo fi : ciIfc.getDeclaredStaticFields()){ fiList.add(fi); // there are no non-public fields in interfaces } } int aref = env.newObjectArray("Ljava/lang/reflect/Field;", fiList.size()); int j=0; for (FieldInfo fi : fiList){ env.setReferenceArrayElement(aref, j++, createFieldObject(env, fi, fci)); } return aref; } static int getField (MJIEnv env, int clsRef, int nameRef, boolean isRecursiveLookup) { ClassInfo ci = env.getReferredClassInfo( clsRef); String fname = env.getStringObject(nameRef); FieldInfo fi = null; if (isRecursiveLookup) { fi = ci.getInstanceField(fname); if (fi == null) { fi = ci.getStaticField(fname); } } else { fi = ci.getDeclaredInstanceField(fname); if (fi == null) { fi = ci.getDeclaredStaticField(fname); } } if (fi == null) { env.throwException("java.lang.NoSuchFieldException", fname); return MJIEnv.NULL; } else { // don't do a Field clinit before we know there is such a field ClassInfo fci = getInitializedClassInfo( env, FIELD_CLASSNAME); if (fci == null) { env.repeatInvocation(); return MJIEnv.NULL; } return createFieldObject( env, fi, fci); } } public static int getDeclaredField__Ljava_lang_String_2__Ljava_lang_reflect_Field_2 (MJIEnv env, int clsRef, int nameRef) { return getField(env,clsRef,nameRef, false); } public static int getField__Ljava_lang_String_2__Ljava_lang_reflect_Field_2 (MJIEnv env, int clsRef, int nameRef) { return getField(env,clsRef,nameRef, true); } public static int getModifiers____I (MJIEnv env, int clsRef){ ClassInfo ci = env.getReferredClassInfo(clsRef); return ci.getModifiers(); } public static int getEnumConstants (MJIEnv env, int clsRef){ ClassInfo ci = env.getReferredClassInfo(clsRef); if (env.requiresClinitExecution(ci)){ env.repeatInvocation(); return 0; } if (ci.getSuperClass().getName().equals("java.lang.Enum")) { ArrayList<FieldInfo> list = new ArrayList<FieldInfo>(); String cName = ci.getName(); for (FieldInfo fi : ci.getDeclaredStaticFields()) { if (fi.isFinal() && cName.equals(fi.getType())){ list.add(fi); } } int aRef = env.newObjectArray(cName, list.size()); StaticElementInfo sei = ci.getStaticElementInfo(); int i=0; for (FieldInfo fi : list){ env.setReferenceArrayElement( aRef, i++, sei.getReferenceField(fi)); } return aRef; } return MJIEnv.NULL; } static public int getInterfaces_____3Ljava_lang_Class_2 (MJIEnv env, int clsRef){ ClassInfo ci = env.getReferredClassInfo(clsRef); int aref = MJIEnv.NULL; ThreadInfo ti = env.getThreadInfo(); // contrary to the API doc, this only returns the interfaces directly // implemented by this class, not it's bases // <2do> this is not exactly correct, since the interfaces should be ordered Set<String> ifcNames = ci.getInterfaces(); aref = env.newObjectArray("Ljava/lang/Class;", ifcNames.size()); int i=0; for (String ifc : ifcNames){ ClassInfo ici = ClassInfo.getResolvedClassInfo(ifc); if (!ici.isRegistered()) { ici.registerClass(ti); } env.setReferenceArrayElement(aref, i++, ici.getClassObjectRef()); } return aref; } /** * <2do> needs to load from the classfile location, NOT the MJIEnv (native) class * * @author Sebastian Gfeller (sebastian.gfeller@gmail.com) * @author Tihomir Gvero (tihomir.gvero@gmail.com) */ public static int getByteArrayFromResourceStream(MJIEnv env, int clsRef, int nameRef) { String name = env.getStringObject(nameRef); // <2do> this is not loading from the classfile location! fix it InputStream is = env.getClass().getResourceAsStream(name); if (is == null){ return MJIEnv.NULL; } // We assume that the entire input stream can be read at the moment, // although this could break. byte[] content = null; try { content = new byte[is.available()]; is.read(content); } catch (IOException e) { throw new RuntimeException(e); } // Now if everything worked, the content should be in the byte buffer. // We put this buffer into the JPF JVM. return env.newByteArray(content); } public static int getEnclosingClass (MJIEnv env, int clsRef) { ClassInfo ciEncl = env.getReferredClassInfo( clsRef).getEnclosingClassInfo(); if (ciEncl == null){ return MJIEnv.NULL; } if (!ciEncl.isRegistered()){ ThreadInfo ti = env.getThreadInfo(); ciEncl.registerClass(ti); if (!ciEncl.isInitialized()){ if (ciEncl.requiresClinitExecution(ti)){ env.repeatInvocation(); return 0; } } } return ciEncl.getClassObjectRef(); } public static int getDeclaredClasses (MJIEnv env, int clsRef){ ClassInfo ci = env.getReferredClassInfo(clsRef); String[] innerClassNames = ci.getInnerClasses(); int aref = MJIEnv.NULL; ThreadInfo ti = env.getThreadInfo(); aref = env.newObjectArray("Ljava/lang/Class;", innerClassNames.length); for (int i=0; i<innerClassNames.length; i++){ ClassInfo ici = ClassInfo.getResolvedClassInfo(innerClassNames[i]); if (!ici.isRegistered()) { ici.registerClass(ti); } env.setReferenceArrayElement(aref, i, ici.getClassObjectRef()); } return aref; } private static String getCanonicalName (ClassInfo ci){ if (ci.isArray()){ String canonicalName = getCanonicalName(ci.getComponentClassInfo()); if (canonicalName != null){ return canonicalName + "[]"; } else{ return null; } } if (isLocalOrAnonymousClass(ci)) { return null; } if (ci.getEnclosingClassInfo() == null){ return ci.getName(); } else{ String enclosingName = getCanonicalName(ci.getEnclosingClassInfo()); if (enclosingName == null){ return null; } return enclosingName + "." + ci.getSimpleName(); } } public static int getCanonicalName____Ljava_lang_String_2 (MJIEnv env, int clsRef){ ClassInfo ci = env.getReferredClassInfo(clsRef); return env.newString(getCanonicalName(ci)); } public static int getDeclaredAnnotations_____3Ljava_lang_annotation_Annotation_2 (MJIEnv env, int robj){ ClassInfo ci = env.getReferredClassInfo(robj); ArrayList<AnnotationInfo> declared = new ArrayList<AnnotationInfo>(); for (AnnotationInfo a : ci.getAnnotations()){ if (!a.inherited){ declared.add(a); } } AnnotationInfo[] ai = declared.toArray(new AnnotationInfo[declared.size()]); try{ return env.newAnnotationProxies(ai); } catch (ClinitRequired x){ env.handleClinitRequest(x.getRequiredClassInfo()); return MJIEnv.NULL; } } public static int getEnclosingConstructor____Ljava_lang_reflect_Constructor_2 (MJIEnv env, int robj){ ClassInfo mci = getInitializedClassInfo(env, CONSTRUCTOR_CLASSNAME); if (mci == null){ env.repeatInvocation(); return MJIEnv.NULL; } ClassInfo ci = env.getReferredClassInfo(robj); MethodInfo enclosingMethod = ci.getEnclosingMethodInfo(); if ((enclosingMethod != null) && enclosingMethod.isCtor()){ return createMethodObject(env, mci, enclosingMethod); } return MJIEnv.NULL; } public static int getEnclosingMethod____Ljava_lang_reflect_Method_2 (MJIEnv env, int robj){ ClassInfo mci = getInitializedClassInfo(env, METHOD_CLASSNAME); if (mci == null){ env.repeatInvocation(); return MJIEnv.NULL; } ClassInfo ci = env.getReferredClassInfo(robj); MethodInfo enclosingMethod = ci.getEnclosingMethodInfo(); if ((enclosingMethod != null) && !enclosingMethod.isCtor()){ return createMethodObject(env, mci, enclosingMethod); } return MJIEnv.NULL; } public static boolean isAnonymousClass____Z (MJIEnv env, int robj){ ClassInfo ci = env.getReferredClassInfo(robj); String cname = null; if (ci.getName().contains("$")){ cname = ci.getName().substring(ci.getName().lastIndexOf('$') + 1); } return (cname == null) ? false : cname.matches("\\d+?"); } public static boolean isEnum____Z (MJIEnv env, int robj){ ClassInfo ci = env.getReferredClassInfo(robj); return ci.isEnum(); } // Similar to getEnclosingClass() except it returns null for the case of // anonymous class. public static int getDeclaringClass____Ljava_lang_Class_2 (MJIEnv env, int clsRef){ ClassInfo ci = env.getReferredClassInfo(clsRef); if (isLocalOrAnonymousClass(ci)){ return MJIEnv.NULL; } else{ return getEnclosingClass(env, clsRef); } } public static boolean isLocalClass____Z (MJIEnv env, int robj){ ClassInfo ci = env.getReferredClassInfo(robj); return isLocalOrAnonymousClass(ci) && !isAnonymousClass____Z(env, robj); } private static boolean isLocalOrAnonymousClass (ClassInfo ci){ return (ci.getEnclosingMethodInfo() != null); } public static boolean isMemberClass____Z (MJIEnv env, int robj){ ClassInfo ci = env.getReferredClassInfo(robj); return (ci.getEnclosingClassInfo() != null) && !isLocalOrAnonymousClass(ci); } }
package <%=packageName%>.aop.logging; import <%=packageName%>.config.Constants; import org.aspectj.lang.JoinPoint; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.AfterThrowing; import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; import org.aspectj.lang.annotation.Pointcut; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.core.env.Environment; import javax.inject.Inject; import java.util.Arrays; /** * Aspect for logging execution of service and repository Spring components. */ @Aspect public class LoggingAspect { private final Logger log = LoggerFactory.getLogger(this.getClass()); @Inject private Environment env; @Pointcut("within(<%=packageName%>.repository..*) || within(<%=packageName%>.service..*) || within(<%=packageName%>.web.rest..*)") public void loggingPointcut() { } @AfterThrowing(pointcut = "loggingPointcut()", throwing = "e") public void logAfterThrowing(JoinPoint joinPoint, Throwable e) { if (env.acceptsProfiles(Constants.SPRING_PROFILE_DEVELOPMENT)) { log.error("Exception in {}.{}() with cause = {} and exception {}", joinPoint.getSignature().getDeclaringTypeName(), joinPoint.getSignature().getName(), e.getCause(), e); } else { log.error("Exception in {}.{}() with cause = {}", joinPoint.getSignature().getDeclaringTypeName(), joinPoint.getSignature().getName(), e.getCause()); } } @Around("loggingPointcut()") public Object logAround(ProceedingJoinPoint joinPoint) throws Throwable { if (log.isDebugEnabled()) { log.debug("Enter: {}.{}() with argument[s] = {}", joinPoint.getSignature().getDeclaringTypeName(), joinPoint.getSignature().getName(), Arrays.toString(joinPoint.getArgs())); } try { Object result = joinPoint.proceed(); if (log.isDebugEnabled()) { log.debug("Exit: {}.{}() with result = {}", joinPoint.getSignature().getDeclaringTypeName(), joinPoint.getSignature().getName(), result); } return result; } catch (IllegalArgumentException e) { log.error("Illegal argument: {} in {}.{}()", Arrays.toString(joinPoint.getArgs()), joinPoint.getSignature().getDeclaringTypeName(), joinPoint.getSignature().getName()); throw e; } } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.eas.util; import java.io.*; import java.util.logging.Level; import java.util.logging.Logger; /** * Common operations with files. * * @author vv */ public class FileUtils { public static final char EXTENSION_SEPARATOR = '.'; private static final char UNIX_SEPARATOR = '/'; private static final char WINDOWS_SEPARATOR = '\\'; public static String getFileExtension(String fileName) { String ext = null; int k = fileName.lastIndexOf(EXTENSION_SEPARATOR); // NOI18N if (k != -1) { ext = fileName.substring(k + 1, fileName.length()); } return ext; } public static String getFileExtension(File file) { return getFileExtension(file.getName()); } public static String removeExtension(String filename) { if (filename == null) { return null; } int index = indexOfExtension(filename); if (index == -1) { return filename; } else { return filename.substring(0, index); } } public static int indexOfExtension(String filename) { if (filename == null) { return -1; } int extensionPos = filename.lastIndexOf(EXTENSION_SEPARATOR); int lastSeparator = indexOfLastSeparator(filename); return lastSeparator > extensionPos ? -1 : extensionPos; } public static int indexOfLastSeparator(String filename) { if (filename == null) { return -1; } int lastUnixPos = filename.lastIndexOf(UNIX_SEPARATOR); int lastWindowsPos = filename.lastIndexOf(WINDOWS_SEPARATOR); return Math.max(lastUnixPos, lastWindowsPos); } public static byte[] readBytes(File file) throws IOException { long len = file.length(); if (len > Integer.MAX_VALUE) { throw new IOException("Too big file " + file.getPath()); // NOI18N } try (InputStream is = new FileInputStream(file)) { byte[] arr = new byte[(int) len]; int pos = 0; while (pos < arr.length) { int read = is.read(arr, pos, arr.length - pos); if (read == -1) { break; } pos += read; } if (pos != arr.length) { throw new IOException("Just " + pos + " bytes read from " + file.getPath()); // NOI18N } return arr; } } public static String readString(File file, String encoding) throws IOException { return new String(readBytes(file), encoding); } public static void writeBytes(File file, byte[] arr) throws IOException { try (FileOutputStream out = new FileOutputStream(file)) { out.write(arr); } } public static void writeString(File file, String str, String encoding) throws IOException { try (Writer out = new OutputStreamWriter(new FileOutputStream(file), encoding)) { out.write(str); } } public static void delete(File f) throws IOException { delete(f, false); } public static void delete(File f, boolean aSkipUndeletedFiles) throws IOException { if (f.isDirectory()) { for (File c : f.listFiles()) { delete(c, aSkipUndeletedFiles); } } if (!f.delete()) { if (aSkipUndeletedFiles) { Logger.getLogger(FileUtils.class.getName()).log(Level.WARNING, "Unable to delete file: {0} skipping.", f.getAbsolutePath()); } else { throw new IOException("Failed to delete file: " + f); // NOI18N } } } public static void clearDirectory(File f, boolean aSkipUndeletedFiles) throws IOException { if (!f.isDirectory()) { throw new IllegalArgumentException("Only directory can be cleared."); // NOI18N } for (File c : f.listFiles()) { delete(c, aSkipUndeletedFiles); } } }
package com.github.horrorho.ragingmoose; import static java.lang.Long.toHexString; import java.nio.ByteBuffer; import java.util.Objects; import javax.annotation.Nonnull; import javax.annotation.ParametersAreNonnullByDefault; import javax.annotation.concurrent.NotThreadSafe; /** * Low level bit in stream. * * @author Ayesha */ @NotThreadSafe @ParametersAreNonnullByDefault class BitInStream { // accumNBits 63 bit limit avoids unsupported 64 bit shifts/ branch. private final ByteBuffer in; private long accum; private int accumNBits; BitInStream(ByteBuffer in, long accum, int accumNBits) { this.in = Objects.requireNonNull(in); this.accum = accum; this.accumNBits = accumNBits; } BitInStream(ByteBuffer in) { this(in, 0, 0); } @Nonnull BitInStream init(int n) throws LZFSEDecoderException { try { if (n > 0) { throw new LZFSEDecoderException(); } else if (n == 0) { in.position(in.position() - 7); accum = in.getLong(in.position() - 1); accum >>>= 8; accumNBits = 56; } else { in.position(in.position() - 8); accum = in.getLong(in.position()); accumNBits = n + 64; } // check(); return this; } catch (IllegalArgumentException ex) { throw new LZFSEDecoderException(ex); } } @Nonnull BitInStream fill() throws LZFSEDecoderException { try { if (accumNBits < 56) { int nBits = 63 - accumNBits; int nBytes = nBits >>> 3; int mBits = (nBits & 0x07) + 1; in.position(in.position() - nBytes); accum = in.getLong(in.position()); accum <<= mBits; accum >>>= mBits; accumNBits += nBytes << 3; } // check(); return this; } catch (IllegalArgumentException ex) { throw new LZFSEDecoderException(ex); } } long read(int n) { if (n > accumNBits) { throw new IllegalStateException(); } if (n < 0) { throw new IllegalArgumentException(); } accumNBits -= n; long bits = accum >>> accumNBits; accum ^= bits << accumNBits; return bits; } void check() { if (accumNBits < 56) { throw new AssertionError("accumulator underflow: " + accumNBits); } if (accumNBits > 63) { throw new AssertionError("accumulator overflow: " + accumNBits); } if (accum >>> accumNBits != 0) { throw new AssertionError("accumulator corruption: 0x" + toHexString(accum) + " " + accumNBits); } } @Override public String toString() { return "BitStream{" + "in=" + in + ", accum=0x" + toHexString(accum) + ", accumNBits=" + accumNBits + '}'; } }
package com.github.phantomthief.util; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static java.util.Spliterator.IMMUTABLE; import static java.util.Spliterator.NONNULL; import static java.util.Spliterator.ORDERED; import static java.util.Spliterators.spliteratorUnknownSize; import java.util.Iterator; import java.util.List; import java.util.function.Function; import java.util.stream.Stream; import java.util.stream.StreamSupport; import javax.annotation.CheckReturnValue; import com.google.common.collect.AbstractIterator; /** * * @author w.vela */ public class CursorIterator<Id, Entity> implements Iterable<Entity> { private static final int DEFAULT_BUFFER_SIZE = 30; private final PageScroller<Id, Entity> pageScroller; private CursorIterator(PageScroller<Id, Entity> pageScroller) { this.pageScroller = pageScroller; } @CheckReturnValue public static <I, E> GenericBuilder<I, E> newGenericBuilder() { return new GenericBuilder<>(newBuilder()); } /** * better use {@link #newGenericBuilder()} for type safe */ @Deprecated @CheckReturnValue public static Builder<Object, Object> newBuilder() { return new Builder<>(); } @Override public Iterator<Entity> iterator() { return new AbstractIterator<Entity>() { private final Iterator<List<Entity>> pageIterator = pageScroller.iterator(); private Iterator<Entity> entityIteratorInPage; @Override protected Entity computeNext() { if (entityIteratorInPage == null || !entityIteratorInPage.hasNext()) { if (pageIterator.hasNext()) { entityIteratorInPage = pageIterator.next().iterator(); } else { return endOfData(); } } return entityIteratorInPage.next(); } }; } public Stream<Entity> stream() { return StreamSupport .stream(spliteratorUnknownSize(iterator(), (NONNULL | IMMUTABLE | ORDERED)), false); } public static class GenericBuilder<Id, Entity> { private final Builder<Object, Object> builder; private GenericBuilder(Builder<Object, Object> builder) { this.builder = builder; } public CursorIterator<Id, Entity> build(GetByCursorDAO<? super Id, ? extends Entity> dao) { return builder.build(dao); } @CheckReturnValue public GenericBuilder<Id, Entity> cursorExtractor(Function<? super Entity, ? extends Id> function) { builder.cursorExtractor(function); return this; } @CheckReturnValue public GenericBuilder<Id, Entity> start(Id init) { builder.start(init); return this; } @CheckReturnValue public GenericBuilder<Id, Entity> bufferSize(int bufferSize) { builder.bufferSize(bufferSize); return this; } @CheckReturnValue public GenericBuilder<Id, Entity> maxNumberOfPages(int maxNumberOfPages) { builder.maxNumberOfPages(maxNumberOfPages); return this; } } @SuppressWarnings("unchecked") public static class Builder<Id, Entity> { private GetByCursorDAO<Id, Entity> dao; private Integer bufferSize; private Function<Entity, Id> function; private Id init; private int maxNumberOfPages = 0; public <I, E> CursorIterator<I, E> build(GetByCursorDAO<? super I, ? extends E> dao) { Builder<I, E> thisBuilder = (Builder<I, E>) this; thisBuilder.dao = (GetByCursorDAO<I, E>) dao; return thisBuilder.build(); } @CheckReturnValue public Builder<Id, Entity> bufferSize(int bufferSize) { checkArgument(bufferSize > 0); this.bufferSize = bufferSize; return this; } @CheckReturnValue public <I, E> Builder<I, E> cursorExtractor(Function<? super E, ? extends I> function) { Builder<I, E> thisBuilder = (Builder<I, E>) this; thisBuilder.function = (Function<E, I>) function; return thisBuilder; } @CheckReturnValue public <I, E> Builder<I, E> start(I init) { Builder<I, E> thisBuilder = (Builder<I, E>) this; thisBuilder.init = init; return thisBuilder; } @CheckReturnValue public <I, E> Builder<I, E> maxNumberOfPages(int maxNumberOfPages) { Builder<I, E> thisBuilder = (Builder<I, E>) this; thisBuilder.maxNumberOfPages = maxNumberOfPages; return thisBuilder; } private CursorIterator<Id, Entity> build() { ensure(); PageScroller<Id, Entity> scroller = new PageScroller<>(dao, init, bufferSize, function); if (maxNumberOfPages > 0) { scroller.setMaxNumberOfPages(maxNumberOfPages); } return new CursorIterator<>(scroller); } private void ensure() { checkNotNull(dao); checkNotNull(function); if (bufferSize == null) { bufferSize = DEFAULT_BUFFER_SIZE; } } } }
package joliex.io; import com.sun.xml.xsom.XSSchemaSet; import com.sun.xml.xsom.XSType; import com.sun.xml.xsom.parser.XSOMParser; import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.io.Writer; import java.net.URL; import java.net.URLConnection; import java.nio.charset.Charset; import java.util.Arrays; import java.util.Enumeration; import java.util.Properties; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.activation.FileTypeMap; import javax.activation.MimetypesFileTypeMap; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.OutputKeys; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import jolie.jap.JapURLConnection; import jolie.js.JsUtils; import jolie.runtime.AndJarDeps; import jolie.runtime.ByteArray; import jolie.runtime.FaultException; import jolie.runtime.JavaService; import jolie.runtime.Value; import jolie.runtime.ValueVector; import jolie.runtime.embedding.RequestResponse; import org.w3c.dom.Document; import org.xml.sax.InputSource; import org.xml.sax.SAXException; /** * * @author Fabrizio Montesi */ @AndJarDeps( { "jolie-xml.jar", "xsom.jar", "jolie-js.jar", "json_simple.jar" } ) public class FileService extends JavaService { private final static Pattern fileKeywordPattern = Pattern.compile( "(#+)file\\s+(.*)" ); private final DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); private final TransformerFactory transformerFactory = TransformerFactory.newInstance(); private FileTypeMap fileTypeMap = FileTypeMap.getDefaultFileTypeMap(); public FileService() { super(); documentBuilderFactory.setIgnoringElementContentWhitespace( true ); } @RequestResponse public String convertFromBinaryToBase64Value( Value value ) { byte[] buffer = value.byteArrayValue().getBytes(); sun.misc.BASE64Encoder encoder = new sun.misc.BASE64Encoder(); return encoder.encode( buffer ); } @RequestResponse public ByteArray convertFromBase64ToBinaryValue( Value value ) throws FaultException { ByteArray returnValue = null; try { String stringValue = value.strValue(); sun.misc.BASE64Decoder decoder = new sun.misc.BASE64Decoder(); byte[] supportArray = decoder.decodeBuffer( stringValue ); returnValue = new ByteArray( supportArray ); return returnValue; } catch( IOException ex ) { throw new FaultException( "IOException", ex ); } } @RequestResponse public void setMimeTypeFile( String filename ) throws FaultException { try { fileTypeMap = new MimetypesFileTypeMap( filename ); } catch( IOException e ) { throw new FaultException( "IOException", e ); } } private static void readBase64IntoValue( InputStream istream, long size, Value value ) throws IOException { byte[] buffer = new byte[ (int) size ]; istream.read( buffer ); sun.misc.BASE64Encoder encoder = new sun.misc.BASE64Encoder(); value.setValue( encoder.encode( buffer ) ); } private static void readBinaryIntoValue( InputStream istream, long size, Value value ) throws IOException { byte[] buffer = new byte[ (int) size ]; istream.read( buffer ); value.setValue( new ByteArray( buffer ) ); } private void readJsonIntoValue( InputStream istream, Value value, Charset charset, boolean strictEncoding ) throws IOException { InputStreamReader isr; if ( charset == null ) { isr = new InputStreamReader( istream ); } else { isr = new InputStreamReader( istream, charset ); } Reader r = new BufferedReader( isr ); JsUtils.parseJsonIntoValue( r, value, strictEncoding ); } private void readXMLIntoValue( InputStream istream, Value value ) throws IOException { try { DocumentBuilder builder = documentBuilderFactory.newDocumentBuilder(); InputSource src = new InputSource( new InputStreamReader( istream ) ); Document doc = builder.parse( src ); value = value.getFirstChild( doc.getDocumentElement().getNodeName() ); jolie.xml.XmlUtils.documentToValue( doc, value ); } catch( ParserConfigurationException e ) { throw new IOException( e ); } catch( SAXException e ) { throw new IOException( e ); } } private void readXMLIntoValueForStoring( InputStream istream, Value value ) throws IOException { try { DocumentBuilder builder = documentBuilderFactory.newDocumentBuilder(); InputSource src = new InputSource( new InputStreamReader( istream ) ); Document doc = builder.parse( src ); value = value.getFirstChild( doc.getDocumentElement().getNodeName() ); jolie.xml.XmlUtils.storageDocumentToValue( doc, value ); } catch( ParserConfigurationException e ) { throw new IOException( e ); } catch( SAXException e ) { throw new IOException( e ); } } private static void readTextIntoValue( InputStream istream, long size, Value value, Charset charset ) throws IOException { byte[] buffer = new byte[ (int) size ]; istream.read( buffer ); if ( charset == null ) { value.setValue( new String( buffer ) ); } else { value.setValue( new String( buffer, charset ) ); } } private void readPropertiesFile( InputStream istream, Value value ) throws IOException { Properties properties = new Properties(); properties.load( new InputStreamReader( istream ) ); Enumeration< String> names = (Enumeration< String>) properties.propertyNames(); String name; String propertyValue; Matcher matcher; while( names.hasMoreElements() ) { name = names.nextElement(); propertyValue = properties.getProperty( name ); matcher = fileKeywordPattern.matcher( propertyValue ); if ( matcher.matches() ) { if ( matcher.group( 1 ).length() > 1 ) { // The number of propertyValue = propertyValue.substring( 1 ); } else { // It's a #file directive // TODO: this is a bit of a hack. We should have a private // method for performing all the lookups of files into // JAPs, local directories etc. instead of calling readFile // again. Value request = Value.create(); request.getFirstChild( "filename" ).setValue( matcher.group( 2 ) ); request.getFirstChild( "format" ).setValue( "text" ); try { propertyValue = readFile( request ).strValue(); } catch( FaultException e ) { throw new IOException( e ); } } } value.getFirstChild( name ).setValue( propertyValue ); } } private void __copyDir( File src, File dest ) throws FileNotFoundException, IOException { if ( src.isDirectory() ) { if ( !dest.exists() ) { dest.mkdir(); } String[] files = src.list(); for( String file : files ) { File fileSrc = new File( src, file ); File fileDest = new File( dest, file ); __copyDir( fileSrc, fileDest ); } } else { // copy files FileInputStream inStream = new FileInputStream( src ); FileOutputStream outStream = new FileOutputStream( dest ); byte[] buffer = new byte[ 4096 ]; int length; while( (length = inStream.read( buffer )) > 0 ) { outStream.write( buffer, 0, length ); } inStream.close(); outStream.close(); } } public Value copyDir( Value request ) throws FaultException { Value retValue = Value.create(); retValue.setValue( true ); String fromDirName = request.getFirstChild( "from" ).strValue(); String toDirName = request.getFirstChild( "to" ).strValue(); File fromDir = new File( fromDirName ); File toDir = new File( toDirName ); try { __copyDir( fromDir, toDir ); } catch( FileNotFoundException e ) { throw new FaultException( "FileNotFound" ); } catch( IOException e ) { throw new FaultException( "IOException" ); } return retValue; } public Value readFile( Value request ) throws FaultException { Value filenameValue = request.getFirstChild( "filename" ); Value retValue = Value.create(); String format = request.getFirstChild( "format" ).strValue(); File file = new File( filenameValue.strValue() ); InputStream istream = null; long size; try { if ( file.exists() ) { istream = new FileInputStream( file ); size = file.length(); } else { URL fileURL = interpreter().getClassLoader().findResource( filenameValue.strValue() ); if ( fileURL != null && fileURL.getProtocol().equals( "jap" ) ) { URLConnection conn = fileURL.openConnection(); if ( conn instanceof JapURLConnection ) { JapURLConnection jarConn = (JapURLConnection) conn; size = jarConn.getEntrySize(); if ( size < 0 ) { throw new IOException( "File dimension is negative for file " + fileURL.toString() ); } istream = jarConn.getInputStream(); } else { throw new FileNotFoundException( filenameValue.strValue() ); } } else { throw new FileNotFoundException( filenameValue.strValue() ); } } istream = new BufferedInputStream( istream ); try { if ( "base64".equals( format ) ) { readBase64IntoValue( istream, size, retValue ); } else if ( "binary".equals( format ) ) { readBinaryIntoValue( istream, size, retValue ); } else if ( "xml".equals( format ) ) { readXMLIntoValue( istream, retValue ); } else if ( "xml_store".equals( format ) ) { readXMLIntoValueForStoring( istream, retValue ); } else if ( "properties".equals( format ) ) { readPropertiesFile( istream, retValue ); } else if ( "json".equals( format ) ) { Charset charset = null; Value formatValue = request.getFirstChild( "format" ); if ( formatValue.hasChildren( "charset" ) ) { charset = Charset.forName( formatValue.getFirstChild( "charset" ).strValue() ); } boolean strictEncoding = false; if ( request.getFirstChild( "format" ).hasChildren( "json_encoding" ) ) { if ( request.getFirstChild( "format" ).getFirstChild( "json_encoding" ).strValue().equals( "strict" ) ) { strictEncoding = true; } } readJsonIntoValue( istream, retValue, charset, strictEncoding ); } else { Charset charset = null; Value formatValue = request.getFirstChild( "format" ); if ( formatValue.hasChildren( "charset" ) ) { charset = Charset.forName( formatValue.getFirstChild( "charset" ).strValue() ); } readTextIntoValue( istream, size, retValue, charset ); } } finally { istream.close(); } } catch( FileNotFoundException e ) { throw new FaultException( "FileNotFound", e ); } catch( IOException e ) { throw new FaultException( "IOException", e ); } return retValue; } public Boolean exists( String filename ) { return (new File( filename ).exists()) ? true : false; } public Boolean mkdir( String directory ) { return (new File( directory ).mkdirs()) ? true : false; } public String getMimeType( String filename ) throws FaultException { File file = new File( filename ); if ( file.exists() == false ) { throw new FaultException( "FileNotFound", filename ); } return fileTypeMap.getContentType( file ); } public String getServiceDirectory() { String dir = null; try { dir = interpreter().programDirectory().getCanonicalPath(); } catch( IOException e ) { e.printStackTrace(); } if ( dir == null || dir.isEmpty() ) { dir = "."; } return dir; } public String getFileSeparator() { return jolie.lang.Constants.fileSeparator; } private void writeXML( File file, Value value, boolean append, String schemaFilename, String doctypeSystem, String encoding, boolean indent ) throws IOException { if ( value.children().isEmpty() ) { return; // TODO: perhaps we should erase the content of the file before returning. } String rootName = value.children().keySet().iterator().next(); try { XSType type = null; if ( schemaFilename != null ) { try { XSOMParser parser = new XSOMParser(); parser.parse( schemaFilename ); XSSchemaSet schemaSet = parser.getResult(); if ( schemaSet != null ) { type = schemaSet.getElementDecl( "", rootName ).getType(); } } catch( SAXException e ) { throw new IOException( e ); } } Document doc = documentBuilderFactory.newDocumentBuilder().newDocument(); if ( type == null ) { jolie.xml.XmlUtils.valueToDocument( value.getFirstChild( rootName ), rootName, doc ); } else { jolie.xml.XmlUtils.valueToDocument( value.getFirstChild( rootName ), rootName, doc, type ); } Transformer transformer = transformerFactory.newTransformer(); if ( indent ) { transformer.setOutputProperty( OutputKeys.INDENT, "yes" ); } else { transformer.setOutputProperty( OutputKeys.INDENT, "no" ); } if ( doctypeSystem != null ) { transformer.setOutputProperty( "doctype-system", doctypeSystem ); } if ( encoding != null ) { transformer.setOutputProperty( OutputKeys.ENCODING, encoding ); } Writer writer = new FileWriter( file, append ); StreamResult result = new StreamResult( writer ); transformer.transform( new DOMSource( doc ), result ); writer.close(); } catch( ParserConfigurationException e ) { throw new IOException( e ); } catch( TransformerConfigurationException e ) { throw new IOException( e ); } catch( TransformerException e ) { throw new IOException( e ); } } private void writeStorageXML( File file, Value value ) throws IOException { if ( value.children().isEmpty() ) { return; // TODO: perhaps we should erase the content of the file before returning. } String rootName = value.children().keySet().iterator().next(); try { Document doc = documentBuilderFactory.newDocumentBuilder().newDocument(); jolie.xml.XmlUtils.valueToStorageDocument( value.getFirstChild( rootName ), rootName, doc ); Transformer transformer = transformerFactory.newTransformer(); transformer.setOutputProperty( OutputKeys.INDENT, "no" ); Writer writer = new FileWriter( file, false ); StreamResult result = new StreamResult( writer ); transformer.transform( new DOMSource( doc ), result ); writer.close(); } catch( ParserConfigurationException e ) { throw new IOException( e ); } catch( TransformerConfigurationException e ) { throw new IOException( e ); } catch( TransformerException e ) { throw new IOException( e ); } } private static void writeBinary( File file, Value value, boolean append ) throws IOException { FileOutputStream os = new FileOutputStream( file, append ); os.write( value.byteArrayValue().getBytes() ); os.flush(); os.close(); } private static void writeText( File file, Value value, boolean append ) throws IOException { FileWriter writer = new FileWriter( file, append ); writer.write( value.strValue() ); writer.flush(); writer.close(); } @RequestResponse public void writeFile( Value request ) throws FaultException { boolean append = false; Value content = request.getFirstChild( "content" ); String format = request.getFirstChild( "format" ).strValue(); File file = new File( request.getFirstChild( "filename" ).strValue() ); if ( request.getFirstChild( "append" ).intValue() > 0 ) { append = true; } try { if ( "text".equals( format ) ) { writeText( file, content, append ); } else if ( "binary".equals( format ) ) { writeBinary( file, content, append ); } else if ( "xml".equals( format ) ) { String schemaFilename = null; if ( request.getFirstChild( "format" ).hasChildren( "schema" ) ) { schemaFilename = request.getFirstChild( "format" ).getFirstChild( "schema" ).strValue(); } boolean indent = false; if ( request.getFirstChild( "format" ).hasChildren( "indent" ) ) { indent = request.getFirstChild( "format" ).getFirstChild( "indent" ).boolValue(); } String doctypePublic = null; if ( request.getFirstChild( "format" ).hasChildren( "doctype_system" ) ) { doctypePublic = request.getFirstChild( "format" ).getFirstChild( "doctype_system" ).strValue(); } String encoding = null; if ( request.getFirstChild( "format" ).hasChildren( "encoding" ) ) { encoding = request.getFirstChild( "format" ).getFirstChild( "encoding" ).strValue(); } writeXML( file, content, append, schemaFilename, doctypePublic, encoding, indent ); } else if ( "xml_store".equals( format ) ) { writeStorageXML( file, content ); } else if ( format.isEmpty() ) { if ( content.isByteArray() ) { writeBinary( file, content, append ); } else { writeText( file, content, append ); } } } catch( IOException e ) { throw new FaultException( "IOException", e ); } } public Boolean delete( Value request ) { String filename = request.strValue(); boolean isRegex = request.getFirstChild( "isRegex" ).intValue() > 0; boolean ret = true; if ( isRegex ) { File dir = new File( filename ).getAbsoluteFile().getParentFile(); String[] files = dir.list( new ListFilter( filename, false ) ); if ( files != null ) { for( String file : files ) { new File( file ).delete(); } } } else { if ( new File( filename ).delete() == false ) { ret = false; } } return ret; } @RequestResponse public Boolean deleteDir( Value request ) { return __deleteDir( new File( request.strValue() ) ); } @RequestResponse public void rename( Value request ) throws FaultException { String filename = request.getFirstChild( "filename" ).strValue(); String toFilename = request.getFirstChild( "to" ).strValue(); if ( new File( filename ).renameTo( new File( toFilename ) ) == false ) { throw new FaultException( "IOException" ); } } @RequestResponse public Value getSize( Value request ) { Value retValue = Value.create(); retValue.setValue( request.byteArrayValue().size() ); return retValue; } public Value list( Value request ) { String [] files = new String[]{}; File dir = new File( request.getFirstChild( "directory" ).strValue() ); String regex; if ( request.hasChildren( "regex" ) ) { regex = request.getFirstChild( "regex" ).strValue(); } else { regex = ".*"; } boolean dirsOnly; if ( request.hasChildren( "dirsOnly" ) ) { dirsOnly = request.getFirstChild( "dirsOnly" ).boolValue(); } else { dirsOnly = false; } files = dir.list( new ListFilter( regex, dirsOnly ) ); if ( request.hasChildren( "order" ) ) { Value order = request.getFirstChild( "order" ); if ( files != null && order.hasChildren( "byname" ) && order.getFirstChild( "byname" ).boolValue() ) { Arrays.sort( files ); } } Value response = Value.create(); if ( files != null ) { ValueVector results = response.getChildren( "result" ); for( String file : files ) { results.add( Value.create( file ) ); } } return response; } public Value isDirectory( Value request ) { File dir = new File( request.strValue() ); Value response = Value.create(); response.setValue( dir.isDirectory() ); return response; } private boolean __deleteDir( File file ) { if ( file.isDirectory() ) { String[] children = file.list(); for( int i = 0; i < children.length; i++ ) { __deleteDir( new File( file, children[ i ] ) ); } }; return file.delete(); } private static class ListFilter implements FilenameFilter { private final Pattern pattern; private final boolean dirsOnly; public ListFilter( String regex, boolean dirsOnly ) { this.pattern = Pattern.compile( regex ); this.dirsOnly = dirsOnly; } public boolean accept( File directory, String filename ) { File file = new File( directory.getAbsolutePath() + File.separator + filename ); return pattern.matcher( filename ).matches() && (!dirsOnly || file.isDirectory()); } } }
package org.basex.query.func; import static org.basex.query.util.Err.*; import static org.basex.util.Token.*; import java.io.IOException; import java.math.BigInteger; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.zip.CRC32; import org.basex.io.IO; import org.basex.query.QueryContext; import org.basex.query.QueryException; import org.basex.query.expr.Expr; import org.basex.query.item.B64; import org.basex.query.item.Dbl; import org.basex.query.item.Hex; import org.basex.query.item.Item; import org.basex.query.item.Itr; import org.basex.query.item.Str; import org.basex.query.item.Type; import org.basex.query.item.Value; import org.basex.query.iter.Iter; import org.basex.query.iter.ItemIter; import org.basex.util.Array; import org.basex.util.ByteList; import org.basex.util.InputInfo; import org.basex.util.Performance; import org.basex.util.Util; final class FNUtil extends Fun { /** * Constructor. * @param ii input info * @param f function definition * @param e arguments */ protected FNUtil(final InputInfo ii, final FunDef f, final Expr... e) { super(ii, f, e); } @Override public Iter iter(final QueryContext ctx) throws QueryException { switch(def) { case EVAL: return eval(ctx); case RUN: return run(ctx); case TO_BYTES: return bytes(ctx); default: return super.iter(ctx); } } @Override public Item item(final QueryContext ctx, final InputInfo ii) throws QueryException { switch(def) { case MB: return mb(ctx); case MS: return ms(ctx); case FRM_BASE: return fromBase(ctx, ii); case TO_BASE: return toBase(ctx, ii); case MD5: return hash(ctx, "MD5"); case SHA1: return hash(ctx, "SHA"); case CRC32: return crc32(ctx); default: return super.item(ctx, ii); } } /** * Performs the eval function. * @param ctx query context * @return iterator * @throws QueryException query exception */ private Iter eval(final QueryContext ctx) throws QueryException { return eval(ctx, checkEStr(expr[0], ctx)); } /** * Performs the query function. * @param ctx query context * @return iterator * @throws QueryException query exception */ private Iter run(final QueryContext ctx) throws QueryException { final IO io = checkIO(expr[0], ctx); try { return eval(ctx, io.content()); } catch(final IOException ex) { NODOC.thrw(input, ex.toString()); return null; } } /** * Evaluates the specified string. * @param ctx query context * @param qu query string * @return iterator * @throws QueryException query exception */ private Iter eval(final QueryContext ctx, final byte[] qu) throws QueryException { final QueryContext qt = new QueryContext(ctx.context); qt.parse(string(qu)); qt.compile(); return ItemIter.get(qt.iter()); } /** * Extracts the bytes from the given xs:base64Binary data. * @param ctx query context * @return iterator * @throws QueryException query exception */ private Iter bytes(final QueryContext ctx) throws QueryException { final Item it = checkItem(expr[0], ctx); final byte[] bin = ((B64) checkType(it, Type.B6B)).toJava(); return new Iter() { /** Position. */ int pos; @Override public Item next() { return pos < bin.length ? new Itr(bin[pos++], Type.BYT) : null; } }; } /** * Measures the memory consumption for the specified expression in MB. * @param ctx query context * @return memory consumption * @throws QueryException query exception */ private Dbl mb(final QueryContext ctx) throws QueryException { // check caching flag final boolean c = expr.length == 2 && checkType(expr[1].item(ctx, input), Type.BLN).bool(input); // measure initial memory consumption Performance.gc(3); final long l = Performance.mem(); // create (and, optionally, cache) result value Iter ir = expr[0].iter(ctx); final Value v = (c ? ItemIter.get(ir) : ir).finish(); // measure resulting memory consumption Performance.gc(2); final double d = Performance.mem() - l; // loop through all results to avoid premature result disposal ir = v.iter(); while(ir.next() != null); // return memory consumption in megabytes return Dbl.get(Math.max(0, d) / 1024 / 1024d); } /** * Measures the execution time for the specified expression in milliseconds. * @param ctx query context * @return time in milliseconds * @throws QueryException query exception */ private Dbl ms(final QueryContext ctx) throws QueryException { // check caching flag final boolean c = expr.length == 2 && checkType(expr[1].item(ctx, input), Type.BLN).bool(input); // create timer final Performance p = new Performance(); // iterate (and, optionally, cache) results final Iter ir = expr[0].iter(ctx); if(c) { ItemIter.get(ir); } else { while(ir.next() != null); } // return measured time in milliseconds return Dbl.get(p.getTime() / 10000 / 100d); } /** Digits used in base conversion. */ private static final byte[] DIGITS = { '0' , '1' , '2' , '3' , '4' , '5' , '6' , '7' , '8' , '9' , 'a' , 'b' , 'c' , 'd' , 'e' , 'f' , 'g' , 'h' , 'i' , 'j' , 'k' , 'l' , 'm' , 'n' , 'o' , 'p' , 'q' , 'r' , 's' , 't' , 'u' , 'v' , 'w' , 'x' , 'y' , 'z' }; /** * Converts the given number to a string, using base * 2<sup>shift</sup>. * @param num number item * @param shift number of bits to use for one digit * @return string representation of the given number */ private Str toBaseFast(final long num, final int shift) { final byte[] bytes = new byte[(64 + shift - 1) / shift]; final int mask = (1 << shift) - 1; long n = num; int pos = bytes.length; do { bytes[--pos] = DIGITS[(int) (n & mask)]; n >>>= shift; } while(n != 0); return Str.get(substring(bytes, pos)); } /** BigInteger representing 2 * ({@link Long#MAX_VALUE} + 1). */ private static final BigInteger MAX_ULONG = BigInteger.ONE.shiftLeft(64); /** * Converts the given number to a string, using the given base. * @param ctx query context * @param ii input info * @return string representation of the given number * @throws QueryException query exception */ private Str toBase(final QueryContext ctx, final InputInfo ii) throws QueryException { final long num = checkItr(expr[0], ctx), base = checkItr(expr[1], ctx); if(base < 2 || base > 36) INVBASE.thrw(ii, base); // use fast variant for powers of two for(int i = 1, p = 2; i < 6; i++, p <<= 1) if(base == p) return toBaseFast(num, i); final ByteList tb = new ByteList(); long n = num; if(n < 0) { // unsigned value doesn't fit in any native type... final BigInteger[] dr = BigInteger.valueOf(n).add( MAX_ULONG).divideAndRemainder(BigInteger.valueOf(base)); n = dr[0].longValue(); tb.add(DIGITS[dr[1].intValue()]); } else { tb.add(DIGITS[(int) (n % base)]); n /= base; } while (n != 0) { tb.add(DIGITS[(int) (n % base)]); n /= base; } final byte[] res = tb.toArray(); Array.reverse(res); return Str.get(res); } /** * Converts the given string to a number, interpreting it as an xs:integer * encoded in the given base. * @param ctx query context * @param ii input info * @return read integer * @throws QueryException exception */ private Itr fromBase(final QueryContext ctx, final InputInfo ii) throws QueryException { final byte[] str = checkStr(expr[0], ctx); final long base = checkItr(expr[1], ctx); if(base < 2 || base > 36) INVBASE.thrw(ii, base); long res = 0; for(final byte b : str) { final int num = b <= '9' ? b - 0x30 : (b & 0xDF) - 0x37; if(!(b >= '0' && b <= '9' || b >= 'a' && b <= 'z' || b >= 'A' && b <= 'Z') || num >= base) INVDIG.thrw(ii, base, (char) (b & 0xff)); res = res * base + num; } return Itr.get(res); } /** * Creates the hash of the given xs:string, using the algorithm {@code algo}. * @param ctx query context * @param algo hashing algorithm * @return xs:hexBinary instance containing the hash * @throws QueryException exception */ private Hex hash(final QueryContext ctx, final String algo) throws QueryException { final byte[] str = checkStr(expr[0], ctx); try { return new Hex(MessageDigest.getInstance(algo).digest(str)); } catch(final NoSuchAlgorithmException ex) { Util.notexpected(ex); return null; } } /** * Creates the CRC32 hash of the given xs:string. * @param ctx query context * @return xs:hexBinary instance containing the hash * @throws QueryException exception */ private Hex crc32(final QueryContext ctx) throws QueryException { final CRC32 crc = new CRC32(); crc.update(checkStr(expr[0], ctx)); final byte[] res = new byte[4]; for(int i = res.length, c = (int) crc.getValue(); i-- > 0; c >>>= 8) res[i] = (byte) (c & 0xFF); return new Hex(res); } @Override public boolean uses(final Use u) { return u == Use.CTX && (def == FunDef.MB || def == FunDef.MB || def == FunDef.EVAL || def == FunDef.RUN) || super.uses(u); } }
package com.google.appengine.gcloudapp; import com.google.appengine.repackaged.com.google.api.client.util.Throwables; import com.google.appengine.repackaged.com.google.common.io.ByteStreams; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import java.io.File; import java.io.IOException; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.List; /** * Runs the App Engine development server. * * @author Ludo * @goal run * @execute phase="package" * @threadSafe false */ public class GCloudAppRun extends AbstractGcloudMojo { /** * The host and port on which to start the API server (in the format * host:port) * * @parameter expression="${gcloud.api_host}" */ private String api_host; /** * Additional directories containing App Engine modules to be run. * * @parameter */ private List<String> modules; /** * The host and port on which to start the local web server (in the format * host:port) * * @parameter expression="${gcloud.host}" */ private String host; /** * The host and port on which to start the admin server (in the format * host:port) * * @parameter expression="${gcloud.admin_host}" */ private String admin_host; /** * The default location for storing application data. Can be overridden for * specific kinds of data using --datastore-path, --blobstore-path, and/or * --logs-path * * @parameter expression="${gcloud.storage_path}" */ private String storage_path; /** * The minimum verbosity of logs from your app that will be displayed in the * terminal. (debug, info, warning, critical, error) Defaults to current * verbosity setting. * * @parameter expression="${gcloud.log_level}" */ private String log_level; /** * Path to a file used to store request logs (defaults to a file in * --storage-path if not set) * * @parameter expression="${gcloud.logs_path}" */ private String logs_path; /** * name of the authorization domain to use (default: gmail.com) * * @parameter expression="${gcloud.auth_domain}" */ private String auth_domain; /** * the maximum number of runtime instances that can be started for a * particular module - the value can be an integer, in what case all modules * are limited to that number of instances or a comma-separated list of * module:max_instances e.g. "default:5,backend:3" (default: None) * * @parameter expression="${gcloud.max_module_instances}" */ private String max_module_instances; /** * email address associated with a service account that has a downloadable * key. May be None for no local application identity. (default: None) * * @parameter expression="${gcloud.appidentity_email_address}" */ private String appidentity_email_address; /** * path to private key file associated with service account (.pem format). * Must be set if appidentity_email_address is set. (default: None) * * * @parameter expression="${gcloud.appidentity_private_key_path}" */ private String appidentity_private_key_path; /** * path to gcloud_directory used to store blob contents (defaults to a * subdirectory of --storage_path if not set) (default: None) * * @parameter expression="${gcloud.blobstore_path}" */ private String blobstore_path; /** * path to a file used to store datastore contents (defaults to a file in * --storage_path if not set) (default: None) * * @parameter expression="${gcloud.datastore_path}" */ private String datastore_path; /** * clear the datastore on startup (default: False) * * * @parameter expression="${gcloud.clear_datastore}" */ private boolean clear_datastore; /** * make files specified in the app.yaml "skip_files" or "static" handles * readable by the application. (default: False) * * @parameter expression="${gcloud.allow_skipped_files}" */ private boolean allow_skipped_files; /** * Enable logs collection and display in local Admin Console for Managed VM * modules. * * @parameter expression="${gcloud.enable_mvm_logs}" */ private boolean enable_mvm_logs; /** * Use the "sendmail" tool to transmit e-mail sent using the Mail API (ignored * if --smtp-host is set) * * @parameter expression="${gcloud.enable_sendmail}" */ private boolean enable_sendmail; /** * Use mtime polling for detecting source code changes - useful if modifying * code from a remote machine using a distributed file system * * @parameter expression="${gcloud.use_mtime_file_watcher}" */ private boolean use_mtime_file_watcher; /** * JVM_FLAG Additional arguments to pass to the java command when launching an * instance of the app. May be specified more than once. Example: * &lt;jvm_flag&gt; &lt;param&gt;-Xmx1024m&lt;/param&gt; * &lt;param&gt;-Xms256m&lt;/param&gt; &lt;/jvm_flag&gt; Note: This is not for * Java Managed VMs applications. Please use a Dockerfile for that. * * @parameter */ private List<String> jvm_flag; /** * default Google Cloud Storage bucket name (default: None) * * @parameter expression="${gcloud.default_gcs_bucket_name}" */ private String default_gcs_bucket_name; /** * enable_cloud_datastore * * @parameter expression="${gcloud.enable_cloud_datastore}" */ private boolean enable_cloud_datastore; /** * datastore_consistency_policy The policy to apply when deciding whether a * datastore write should appear in global queries (default="time") * * @parameter expression="${gcloud.datastore_consistency_policy}" */ private String datastore_consistency_policy; /** * The full path to the PHP executable to use to run your PHP module * * @parameter expression="${gcloud.php_executable_path}" */ private String php_executable_path; /** * The script to run at the startup of new Python runtime instances (useful * for tools such as debuggers) * * @parameter expression="${gcloud.python_startup_script}" */ private String python_startup_script; /** * Generate an error on datastore queries that require a composite index not * found in index.yaml * * @parameter expression="${gcloud.require_indexes}" */ private boolean require_indexes; /** * Logs the contents of e-mails sent using the Mail API * * @parameter expression="${gcloud.show_mail_body}" */ private boolean show_mail_body; /** * Allow TLS to be used when the SMTP server announces TLS support (ignored if * --smtp-host is not set) * * @parameter expression="${gcloud.smtp_allow_tls}" */ private boolean smtp_allow_tls; /** * The host and port of an SMTP server to use to transmit e-mail sent using * the Mail API, in the format host:port * * @parameter expression="${gcloud.smtp_host}" */ private String smtp_host; /** * Password to use when connecting to the SMTP server specified with * --smtp-host * * @parameter expression="${gcloud.smtp_password}" */ private String smtp_password; /** * Username to use when connecting to the SMTP server specified with * --smtp-host * * @parameter expression="${gcloud.smtp_user}" */ private String smtp_user; /** * The location of the appengine application to run. * * @parameter expression="${gcloud.application_directory}" */ protected String application_directory; @Override public void execute() throws MojoExecutionException, MojoFailureException { getLog().info(""); if (application_directory == null) { application_directory = maven_project.getBuild().getDirectory() + "/" + maven_project.getBuild().getFinalName(); } File appDirFile = new File(application_directory); if (!appDirFile.exists()) { File f = new File(maven_project.getBasedir(), application_directory); if (f.exists()) { application_directory = f.getAbsolutePath(); } else { throw new MojoExecutionException("The application directory does not exist : " + application_directory); } } if (!appDirFile.isDirectory()) { throw new MojoExecutionException("The application directory is not a directory : " + application_directory); } //Just before starting, just to make sure, shut down any running devserver on this port. stopDevAppServer(); ArrayList<String> devAppServerCommand = getCommand(application_directory); startCommand(appDirFile, devAppServerCommand, WaitDirective.WAIT_SERVER_STOPPED); } @Override protected ArrayList<String> getCommand(String appDir) throws MojoExecutionException { getLog().info("Running gcloud app run..."); ArrayList<String> devAppServerCommand = new ArrayList<>(); setupInitialCommands(devAppServerCommand); devAppServerCommand.add("run"); File f = new File(appDir, "WEB-INF/appengine-web.xml"); if (!f.exists()) { // EAR project possibly, add all modules one by one: File ear = new File(appDir); boolean oneMod = false; for (File w : ear.listFiles()) { if (new File(w, "WEB-INF/appengine-web.xml").exists()) { devAppServerCommand.add(w.getAbsolutePath()); oneMod = true; } } if (!oneMod) { devAppServerCommand.add(appDir); } } else { // Point to our application devAppServerCommand.add(appDir); } if ((modules != null) && !modules.isEmpty()) { for (String modDir : modules) { getLog().info("Running gcloud app run with extra module in " + modDir); devAppServerCommand.add(modDir); } } setupExtraCommands(devAppServerCommand); // Add in additional options for starting the DevAppServer if (admin_host != null) { devAppServerCommand.add("--admin-host=" + admin_host); } if (api_host != null) { devAppServerCommand.add("--api-host=" + api_host); } if (storage_path != null) { devAppServerCommand.add("--storage-path=" + storage_path); } if (host != null) { devAppServerCommand.add("--host=" + host); } if (admin_host != null) { devAppServerCommand.add("--admin-host=" + admin_host); } if (storage_path != null) { devAppServerCommand.add("--storage-path=" + storage_path); } if (log_level != null) { devAppServerCommand.add("--log-level=" + log_level); } if (logs_path != null) { devAppServerCommand.add("--logs-path=" + logs_path); } if (auth_domain != null) { devAppServerCommand.add("--auth-domain=" + auth_domain); } if (max_module_instances != null) { devAppServerCommand.add("--max-module-instances=" + max_module_instances); } if (appidentity_email_address != null) { devAppServerCommand.add("--appidentity-email-address=" + appidentity_email_address); } if (appidentity_private_key_path != null) { devAppServerCommand.add("--appidentity-private-key-path=" + appidentity_private_key_path); } if (blobstore_path != null) { devAppServerCommand.add("--blobstore-path=" + blobstore_path); } if (datastore_path != null) { devAppServerCommand.add("--datastore-path=" + datastore_path); } if (clear_datastore) { devAppServerCommand.add("--clear-datastore"); } if (allow_skipped_files) { devAppServerCommand.add("--allow-skipped-files"); } if (enable_mvm_logs) { devAppServerCommand.add("--enable-mvm-logs"); } if (enable_sendmail) { devAppServerCommand.add("--enable-sendmail"); } if (use_mtime_file_watcher) { devAppServerCommand.add("--use-mtime-file-watcher"); } if ((jvm_flag != null) && !jvm_flag.isEmpty()) { for (String opt : jvm_flag) { devAppServerCommand.add("--jvm-flag=" + opt); } } if (default_gcs_bucket_name != null) { devAppServerCommand.add("--default-gcs-bucket-name=" + default_gcs_bucket_name); } if (enable_cloud_datastore) { devAppServerCommand.add("--enable-cloud-datastore"); } if (datastore_consistency_policy != null) { devAppServerCommand.add("--datastore-consistency-policy=" + datastore_consistency_policy); } if (php_executable_path != null) { devAppServerCommand.add("--php-executable-path=" + php_executable_path); } if (python_startup_script != null) { devAppServerCommand.add("--python-startup-script=" + python_startup_script); } if (require_indexes) { devAppServerCommand.add("--require-indexes"); } if (show_mail_body) { devAppServerCommand.add("--show-mail-body"); } if (smtp_allow_tls) { devAppServerCommand.add("--smtp-allow-tls"); } if (smtp_host != null) { devAppServerCommand.add("--smtp-host=" + smtp_host); } if (smtp_password != null) { devAppServerCommand.add("--smtp-password=" + smtp_password); } if (smtp_user != null) { devAppServerCommand.add("--smtp-user=" + smtp_user); } return devAppServerCommand; } protected void stopDevAppServer() throws MojoExecutionException { HttpURLConnection connection; try { String ad = "localhost"; if (host != null) { String[] parts = host.split(":"); ad = parts[0]; } URL url = new URL("http", ad, 8000, "/quit"); connection = (HttpURLConnection) url.openConnection(); connection.setDoOutput(true); connection.setDoInput(true); connection.setRequestMethod("GET"); // connection.getOutputStream().write(110); ByteStreams.toByteArray(connection.getInputStream()); // connection.getOutputStream().flush(); // connection.getOutputStream().close(); // connection.getInputStream().close(); connection.disconnect(); getLog().info("Shutting down Cloud SDK Server on port " + 8000 + " and waiting 4 seconds..."); Thread.sleep(4000); } catch (MalformedURLException e) { throw new MojoExecutionException("URL malformed attempting to stop the devserver : " + e.getMessage()); } catch (IOException e) { getLog().debug("Was not able to contact the devappserver to shut it down. Most likely this is due to it simply not running anymore. ", e); } catch (InterruptedException e) { Throwables.propagate(e); } } }
package org.jbox2d.common; /** * Global tuning constants based on MKS units and various integer maximums (vertices per shape, * pairs, etc.). */ public class Settings { /** A "close to zero" float epsilon value for use */ public static final float EPSILON = 1.1920928955078125E-7f; public static final float PI = (float) Math.PI; // JBox2D specific settings public static boolean FAST_ABS = true; public static boolean FAST_FLOOR = true; public static boolean FAST_CEIL = true; public static boolean FAST_ROUND = true; public static boolean FAST_ATAN2 = true; public static boolean FAST_POW = true; public static int CONTACT_STACK_INIT_SIZE = 10; public static boolean SINCOS_LUT_ENABLED = true; /** * smaller the precision, the larger the table. If a small table is used (eg, precision is .006 or * greater), make sure you set the table to lerp it's results. Accuracy chart is in the MathUtils * source. Or, run the tests yourself in {@link SinCosTest}.</br> </br> Good lerp precision * values: * <ul> * <li>.0092</li> * <li>.008201</li> * <li>.005904</li> * <li>.005204</li> * <li>.004305</li> * <li>.002807</li> * <li>.001508</li> * <li>9.32500E-4</li> * <li>7.48000E-4</li> * <li>8.47000E-4</li> * <li>.0005095</li> * <li>.0001098</li> * <li>9.50499E-5</li> * <li>6.08500E-5</li> * <li>3.07000E-5</li> * <li>1.53999E-5</li> * </ul> */ public static final float SINCOS_LUT_PRECISION = .00011f; public static final int SINCOS_LUT_LENGTH = (int) Math.ceil(Math.PI * 2 / SINCOS_LUT_PRECISION); /** * Use if the table's precision is large (eg .006 or greater). Although it is more expensive, it * greatly increases accuracy. Look in the MathUtils source for some test results on the accuracy * and speed of lerp vs non lerp. Or, run the tests yourself in {@link SinCosTest}. */ public static boolean SINCOS_LUT_LERP = false; // Collision /** * The maximum number of contact points between two convex shapes. */ public static int maxManifoldPoints = 2; /** * The maximum number of vertices on a convex polygon. */ public static int maxPolygonVertices = 8; /** * This is used to fatten AABBs in the dynamic tree. This allows proxies to move by a small amount * without triggering a tree adjustment. This is in meters. */ public static float aabbExtension = 0.1f; /** * This is used to fatten AABBs in the dynamic tree. This is used to predict the future position * based on the current displacement. This is a dimensionless multiplier. */ public static float aabbMultiplier = 2.0f; /** * A small length used as a collision and constraint tolerance. Usually it is chosen to be * numerically significant, but visually insignificant. */ public static float linearSlop = 0.005f; /** * A small angle used as a collision and constraint tolerance. Usually it is chosen to be * numerically significant, but visually insignificant. */ public static float angularSlop = (2.0f / 180.0f * PI); /** * The radius of the polygon/edge shape skin. This should not be modified. Making this smaller * means polygons will have and insufficient for continuous collision. Making it larger may create * artifacts for vertex collision. */ public static float polygonRadius = (2.0f * linearSlop); /** Maximum number of sub-steps per contact in continuous physics simulation. */ public static int maxSubSteps = 8; // Dynamics /** * Maximum number of contacts to be handled to solve a TOI island. */ public static int maxTOIContacts = 32; /** * A velocity threshold for elastic collisions. Any collision with a relative linear velocity * below this threshold will be treated as inelastic. */ public static float velocityThreshold = 1.0f; /** * The maximum linear position correction used when solving constraints. This helps to prevent * overshoot. */ public static float maxLinearCorrection = 0.2f; /** * The maximum angular position correction used when solving constraints. This helps to prevent * overshoot. */ public static float maxAngularCorrection = (8.0f / 180.0f * PI); /** * The maximum linear velocity of a body. This limit is very large and is used to prevent * numerical problems. You shouldn't need to adjust this. */ public static float maxTranslation = 2.0f; public static float maxTranslationSquared = (maxTranslation * maxTranslation); /** * The maximum angular velocity of a body. This limit is very large and is used to prevent * numerical problems. You shouldn't need to adjust this. */ public static float maxRotation = (0.5f * PI); public static float maxRotationSquared = (maxRotation * maxRotation); /** * This scale factor controls how fast overlap is resolved. Ideally this would be 1 so that * overlap is removed in one time step. However using values close to 1 often lead to overshoot. */ public static float baumgarte = 0.2f; public static float toiBaugarte = 0.75f; // Sleep /** * The time that a body must be still before it will go to sleep. */ public static float timeToSleep = 0.5f; /** * A body cannot sleep if its linear velocity is above this tolerance. */ public static float linearSleepTolerance = 0.01f; /** * A body cannot sleep if its angular velocity is above this tolerance. */ public static float angularSleepTolerance = (2.0f / 180.0f * PI); /** * Friction mixing law. Feel free to customize this. TODO djm: add customization * * @param friction1 * @param friction2 * @return */ public static float mixFriction(float friction1, float friction2) { return MathUtils.sqrt(friction1 * friction2); } /** * Restitution mixing law. Feel free to customize this. TODO djm: add customization * * @param restitution1 * @param restitution2 * @return */ public static float mixRestitution(float restitution1, float restitution2) { return restitution1 > restitution2 ? restitution1 : restitution2; } }
package org.fungsi.concurrent; import org.fungsi.Either; import org.fungsi.Throwables; import java.time.Duration; import java.util.Deque; import java.util.Optional; import java.util.concurrent.ConcurrentLinkedDeque; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.StampedLock; import java.util.function.Consumer; import java.util.function.Function; final class PromiseImpl<T> implements Promise<T> { private Either<T, Throwable> result; private final StampedLock resultLock = new StampedLock(); private final CountDownLatch resultLatch = new CountDownLatch(1); private Deque<Consumer<Either<T, Throwable>>> responders = new ConcurrentLinkedDeque<>(); private final StampedLock respondersLock = new StampedLock(); @Override public Optional<Either<T, Throwable>> poll() { long stamp = resultLock.tryOptimisticRead(); Either<T, Throwable> result = this.result; if (resultLock.validate(stamp)) { return Optional.ofNullable(result); } stamp = resultLock.readLock(); try { return Optional.ofNullable(result); } finally { resultLock.unlockRead(stamp); } } @Override public T get() { try { resultLatch.await(); return Either.unsafe(result); } catch (InterruptedException e) { throw Throwables.propagate(e); } } @Override public T get(Duration timeout) { try { if (!resultLatch.await(timeout.toNanos(), TimeUnit.NANOSECONDS)) { throw new TimeoutException(timeout.toString()); } return Either.unsafe(result); } catch (InterruptedException e) { throw Throwables.propagate(e); } } @Override public void set(Either<T, Throwable> result) { long stamp; stamp = resultLock.writeLock(); this.result = result; resultLock.unlockWrite(stamp); stamp = respondersLock.writeLock(); while (!responders.isEmpty()) { responders.pollFirst().accept(result); } respondersLock.unlockWrite(stamp); } @Override public void respond(Consumer<Either<T, Throwable>> fn) { Deque<Consumer<Either<T, Throwable>>> responders; responders = this.responders; if (responders == null) { fn.accept(result); return; } long stamp = respondersLock.tryOptimisticRead(); responders = this.responders; if (respondersLock.validate(stamp)) { if (responders == null) { fn.accept(result); } else { responders.addLast(fn); } return; } stamp = respondersLock.readLock(); respondersLock.unlockRead(stamp); fn.accept(result); } @Override public <TT> Future<TT> transform(Function<Either<T, Throwable>, Future<TT>> fn) { return new TransformedFuture<>(this, fn); } }
package org.drpowell.vcf; import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.NoSuchElementException; import java.util.logging.Level; import java.util.logging.Logger; import org.drpowell.util.CustomPercentEncoder; /** * Representation of a single row of a VCF file * * INFO flag fields will be set to the special value 'FLAG_INFO' if set * * @author bpow */ public class VCFVariant { private Map<String, String[]> info; private String qual; private String [] row; private int start; // fixme should this be final? private int end; private boolean urlEncode = true; private volatile double [][] logLikelihoods; private static final String [] FLAG_INFO = new String[0]; private static final CustomPercentEncoder INFO_ENCODER = CustomPercentEncoder.allowAsciiPrintable(true).recodeAdditionalCharacters(" ;=".toCharArray()); private String [] formatKeys; private String [][] splitCalls; public VCFVariant(String line) { this(line.split("\t", -1)); } public VCFVariant(String [] row) { this.row = row; // FIXME - should defensive copy? start = Integer.parseInt(row[VCFParser.VCFFixedColumns.POS.ordinal()]); end = start + getRef().length() - 1; info = splitInfoField(row[VCFParser.VCFFixedColumns.INFO.ordinal()]); formatKeys = getFormat().split(":"); } public static Map<String, String[]> splitInfoField(String info) { Map<String, String[]> map = new LinkedHashMap<String, String[]>(); if (".".equals(info)) { return map; } String [] entries = info.split(";"); for (String entry : entries) { String [] keyvalue = entry.split("=",2); if (map.containsKey(keyvalue[0])) { String message = "VCF spec does not allow for duplicated keys [ " + keyvalue[0] + " ] in the INFO field of a VCF:\n " + info; Logger.getLogger(VCFVariant.class.getName()).log(Level.WARNING, message); //throw new RuntimeException(message); } if (keyvalue.length == 1) { map.put(keyvalue[0], FLAG_INFO); } else { map.put(keyvalue[0], keyvalue[1].split(",")); } } return map; } public static String joinInfo(Map<String, String []> info) { if (info.size() == 0) { return "."; } StringBuilder sb = new StringBuilder(); for (Entry<String, String[]> e: info.entrySet()) { if (e.getValue() == FLAG_INFO) { sb.append(e.getKey()).append(";"); } else { sb.append(e.getKey()).append("=").append(join(",",decodeInfo(false, e.getValue()))).append(";"); } } return sb.substring(0, sb.length()-1); // no need for the last semicolon } /** * Add an item to the VCF variant. * * @param key - the ID of the data, this should be defined in the VCF header * @param values - one or more values (if null, this entry will be treated as a Flag) * @return this VCFVariant, to facilitate chaining */ public VCFVariant putInfo(String key, String... values) { if (null == values || values.length == 0 || null == values[0] || "".equals(values[0])) { values = FLAG_INFO; } else { values = encodeInfo(urlEncode, values); } info.put(key, values); return this; } public VCFVariant putInfoFlag(String key) { info.put(key, FLAG_INFO); return this; } public Double getQual() { return Double.valueOf(qual); } private void updateInfo() { row[VCFParser.VCFFixedColumns.INFO.ordinal()] = joinInfo(info); } public String toString() { updateInfo(); StringBuilder sb = new StringBuilder(row[0]); for (int i = 1; i < row.length; i++) { sb.append("\t").append(row[i]); } return sb.toString(); } /** * Returns the value of one of the fixed columns of a vcf file. * @see VCFParser.VCFFixedColumns */ public String getFixedColumn(int i) { if (i >= VCFParser.VCFFixedColumns.SIZE) { throw new NoSuchElementException("Tried to access an invalid column in a VCF file"); } return row[i]; } public String getSequence() { return row[VCFParser.VCFFixedColumns.CHROM.ordinal()]; } public int getStart() { return start; } public int getEnd() { return end; } public String getID() { return row[VCFParser.VCFFixedColumns.ID.ordinal()]; } public String getRef() { return row[VCFParser.VCFFixedColumns.REF.ordinal()]; } public String getAlt() { return row[VCFParser.VCFFixedColumns.ALT.ordinal()]; } public String getFilter() { return row[VCFParser.VCFFixedColumns.FILTER.ordinal()]; } public VCFVariant addFilter(String f) { String old = getFilter(); if (old.equals("") || old.equals(".") || old.equals("PASS")) { return setFilter(f); } return setFilter(old + "," + f); } private VCFVariant setFilter(String f) { row[VCFParser.VCFFixedColumns.FILTER.ordinal()] = f; return this; } public String getFormat() { return row[VCFParser.VCFFixedColumns.FORMAT.ordinal()]; } private final int findFormatItemIndex(String key) { for (int i = 0; i < formatKeys.length; i++) { if (key.equals(formatKeys[i])) return i; } return -1; } public static int[] PLfromGL(double [] GLs) { final int[] pls = new int[GLs.length]; int min = 255; for ( int i = 0; i < GLs.length; i++ ) { pls[i] = Math.min((int) Math.round(-10 * GLs[i]), 255); min = Math.min(pls[i], min); } for ( int i = 0; i < GLs.length; i++ ) { pls[i] -= min; } return pls; } private double [][] extractLikelihoods() { // i indexes sample, j indexes individual likelihood boolean foundGL = false; int index = findFormatItemIndex("GL"); if (index >= 0) { foundGL = true; } else { index = findFormatItemIndex("PL"); if (index < 0) { // didn't find GL or PL... but if we were to return 'null', someone might try again return new double[0][0]; } } String [] calls = getCalls(); double [][] res = new double[calls.length][]; for (int i = 0; i < res.length; i++) { String [] callFields = calls[i].split(":"); if (index >= callFields.length) { // no call for this sample res[i] = null; } else { res[i] = VCFUtils.parseDoubleList(callFields[index]); if (!foundGL) { for (int j = 0; j < res[i].length; j++) { res[i][j] /= -10.0; } } } } return res; } public double [][] getGenotypeLikelihoods() { double [][] result = logLikelihoods; if (null == result) { synchronized(this) { result = logLikelihoods; if (null == result) { result = logLikelihoods = extractLikelihoods(); } } } if (result.length == 0) return null; return result; } public List<String> getRow() { return Collections.unmodifiableList(Arrays.asList(row)); } public VCFVariant mergeID(String newID) { int idcol = VCFParser.VCFFixedColumns.ID.ordinal(); String oldID = row[idcol]; if (!".".equals(oldID)) { if (oldID.equals(newID)) { return this; } // should probably log this -- changing a previously-written rsID } row[idcol] = newID; return this; } public String [] getCalls() { int num = row.length - VCFParser.VCFFixedColumns.SIZE; if (num <= 0) { return new String[0]; } else { return Arrays.copyOfRange(row, VCFParser.VCFFixedColumns.SIZE, row.length); } } public String getGenotypeValue(int sampleIndex, String key) { if (splitCalls == null) { String [] callStrings = getCalls(); String [][] calls = new String[callStrings.length][]; for (int i = 0; i < calls.length; i++) { calls[i] = callStrings[i].split(":"); } splitCalls = calls; } return splitCalls[sampleIndex][findFormatItemIndex(key)]; } public String getGenotype(int sampleIndex) { if (!getFormat().startsWith("GT")) return null; // FIXME log? exception? String call = row[sampleIndex + VCFParser.VCFFixedColumns.SIZE]; int colon = call.indexOf(':'); return colon < 0 ? call : call.substring(0, colon); } private String phaseCall(String oldCall, int phase) { int delim = oldCall.indexOf('/'); if (delim < 0) delim = oldCall.indexOf('|'); if (delim < 0) delim = oldCall.indexOf('\\'); if (delim < 0) { Logger.getLogger(getClass().getName()).fine("Unable to phase [" + oldCall + "] because I could not find a delimiter"); return oldCall; } try { int a = Integer.parseInt(oldCall.substring(0, delim)); int b = Integer.parseInt(oldCall.substring(delim+1)); if (b < a) { a ^= b; b ^= a; a ^= b; // obscure swap, make sure a is less than b } String outDelim = phase == 0 ? "/" : "|"; if (phase < 0) { return Integer.toString(b) + outDelim + Integer.toString(a); } else { return Integer.toString(a) + outDelim + Integer.toString(b); } } catch (NumberFormatException nfe) { Logger.getLogger(VCFVariant.class.getName()).log(Level.FINE, "Tried to phase a non-numeric call: " + oldCall); } return oldCall; } public VCFVariant setPhases(int [] sampleIndices, int [] phases) { // TODO - decide if I really want this to be mutable or to return a new VCFVariant if (sampleIndices.length != phases.length) { throw new RuntimeException("attempted to set phases for samplenum != phasenum"); } if (!getFormat().startsWith("GT:")) { throw new RuntimeException("GT must be the first element of VCF file per the spec (if present), unable to set phase as requested"); } int offset = VCFParser.VCFFixedColumns.SIZE; for (int i = 0; i < phases.length; i++) { String sampleRecord = row[offset + sampleIndices[i]]; int colonPos = sampleRecord.indexOf(':'); if (colonPos < 0) colonPos = sampleRecord.length(); String call = phaseCall(sampleRecord.substring(0, colonPos), phases[i]); row[offset+sampleIndices[i]] = call + sampleRecord.substring(colonPos); } return this; } public String [] getInfoValues(boolean urlDecode, String key) { return decodeInfo(urlDecode, info.get(key)); } public String getInfoValue(String key) { return getInfoValue(key, true); } /** * Return the value within the INFO dictionary for a given key, optionally performing urlDecoding * * @param key * @return null if key not present, "" for flag fields, the encoded value otherwise */ public String getInfoValue(String key, boolean urlDecode) { String [] vals = info.get(key); if (vals == FLAG_INFO) return ""; if (vals == null) return null; vals = decodeInfo(urlDecode, vals); return join(",", vals); } public boolean hasInfo(String key) { return info.containsKey(key); } public static final String [] decodeInfo(boolean urlDecode, String... values) { if (urlDecode) { String [] decoded = new String[values.length]; for (int i = 0; i < values.length; i++) { decoded[i] = INFO_ENCODER.decode(values[i]); } values = decoded; } return values; } public static final String [] encodeInfo(boolean urlEncode, String... values) { if (urlEncode) { String [] encoded = new String[values.length]; for (int i = 0; i < values.length; i++) { encoded[i] = INFO_ENCODER.encode(values[i]); } values = encoded; } return values; } private static String join(String sep, String... strings) { if (strings.length == 0) return ""; if (strings.length == 1) return strings[0]; StringBuilder sb = new StringBuilder(); for (String s: strings) { sb.append(",").append(s); } return sb.substring(1); } }
package com.plivo.examples.multipartycall; import com.plivo.api.Plivo; import com.plivo.api.exceptions.PlivoRestException; import com.plivo.api.exceptions.PlivoValidationException; import com.plivo.api.models.base.ListResponse; import com.plivo.api.models.multipartycall.MultiPartyCall; import com.plivo.api.models.multipartycall.MultiPartyCallUtils; import com.plivo.api.util.PropertyFilter; import java.io.IOException; import java.text.SimpleDateFormat; import java.time.OffsetDateTime; import java.time.format.DateTimeFormatter; import java.util.Date; public class ListMPC { private static boolean checkTime(String timeString) { return OffsetDateTime.parse(timeString, DateTimeFormatter.ofPattern("uuuu-MM-dd HH:mm:ssXXXXX")).isBefore(OffsetDateTime.now().minusHours(1)); } public static void main(String[] args) throws IOException, PlivoRestException, PlivoValidationException, NoSuchMethodException { Plivo.init("<YOUR-AUTH-ID>", "<YOUR-AUTH-TOKEN>"); // Fetch all MultiPartyCalls for an account ListResponse<MultiPartyCall> allMPC = MultiPartyCall.lister().list(); System.out.println(allMPC.getMeta().getTotalCount()); // Fetch only ended MultiPartyCalls for an account. Other possible status are (initialized, active) ListResponse<MultiPartyCall> endedMPC = MultiPartyCall.lister().status(MultiPartyCallUtils.ended).list(); if (endedMPC.getObjects().stream().allMatch(mpc -> mpc.getStatus().equals(MultiPartyCallUtils.ended))) { System.out.println("fetched only ended MPC"); } else { System.out.println("failed to fetch only ended MPC"); } ListResponse<MultiPartyCall> previousHourMPC = MultiPartyCall.lister(). endTime(new PropertyFilter<String>().lessThan(new SimpleDateFormat("yyyy-MM-dd hh:mm"). format(new Date(System.currentTimeMillis() - 3600 * 1000)))).list(); if (previousHourMPC.getObjects().stream().allMatch(mpc -> ListMPC.checkTime(mpc.getEndTime()))) { System.out.println("fetched only MPC older than 1 hour"); } else { System.out.println("failed to fetch MPC older than 1 hour"); } } }
package com.jenjinstudios.jgsf; import com.jenjinstudios.jgsf.message.ServerExecutableMessage; import com.jenjinstudios.message.ExecutableMessage; import com.jenjinstudios.message.Message; import com.jenjinstudios.net.Communicator; import java.io.IOException; import java.net.Socket; import java.util.LinkedList; import java.util.logging.Level; /** * The {@code ClientHandler} class is used to communicate with an individual client. * * @author Caleb Brinkman */ public class ClientHandler extends Communicator { /** The list of messages to be broadcast after the world update. */ private final LinkedList<Message> broadcastMessages; /** The server. */ private final Server<? extends ClientHandler> server; /** Flags whether the socket is connected. */ private boolean linkOpen; /** The id of the client handler. */ private int handlerId = -1; /** Flags whether the user is logged in. */ private boolean loggedIn; /** The username of this client. */ private String username; /** The time at which this client was successfully logged in. */ private long loggedInTime; /** * Construct a new Client Handler using the given socket. When constructing a new ClientHandler, it is necessary to * send the client a FirstConnectResponse message with the server's UPS * * @param s The server for which this handler works. * @param sk The socket used to communicate with the client. * * @throws IOException If the socket is unable to connect. */ public ClientHandler(Server<? extends ClientHandler> s, Socket sk) throws IOException { setName("ClientHandler: " + sk.getInetAddress()); server = s; super.setSocket(sk); broadcastMessages = new LinkedList<>(); linkOpen = true; Message firstConnectResponse = new Message("FirstConnectResponse"); firstConnectResponse.setArgument("ups", server.UPS); queueMessage(firstConnectResponse); } /** * Add a message to the broadcast queue, to be sent at the next broadcast. * * @param o The object (message) to be sent to the client. */ public void queueMessage(Message o) { synchronized (broadcastMessages) { broadcastMessages.add(o); } } /** * Set the id for this handler. * * @param id The new id number for the handler. */ public void setID(int id) { handlerId = id; super.setName("Client Handler " + handlerId); } /** Update anything that needs to be taken care of before broadcast. */ @SuppressWarnings("EmptyMethod") public void update() { } /** Reset anything that needs to be taken care of after broadcast. */ @SuppressWarnings("EmptyMethod") public void refresh() { } /** Send all messages in the message queue to the client. */ public void broadcast() { synchronized (broadcastMessages) { while (!broadcastMessages.isEmpty()) { sendMessage(broadcastMessages.remove()); } } } /** * Send the specified message to the client. * * @param o The message to send to the client. */ public void sendMessage(Message o) { try { getOutputStream().writeMessage(o); } catch (Exception ex) { shutdown(); } } /** Shut down the client handler. */ public void shutdown() { // Try and log out if not already. This is an "emergency" logout because the connection closed without a // proper logout, so we handle the query directly instead of in an executable message. // This is a big no-no, but this can be caused by an unexpected server or client shutdown, which means that // there may not be time to finish any executable messages created. I'm not happy about it but there it is. // TODO Make this better. if (isLoggedIn()) loggedIn = !server.getSqlHandler().logOutUser(username); closeLink(); getServer().removeClient(this); } /** * Flags whether the user is logged in. * * @return true if the user is logged in. */ public boolean isLoggedIn() { return loggedIn; } /** * The server. * * @return The server for which this client handler works. */ public Server<? extends ClientHandler> getServer() { return server; } /** Close the link with the client, if possible. */ protected void closeLink() { if (linkOpen) { super.closeLink(); } linkOpen = false; } /** * Queue a message indicating the success or failure of a login attempt. * * @param success Whether the attempt was successful. */ public void setLoginStatus(boolean success) { loggedIn = success; loggedInTime = server.getCycleStartTime(); } /** * Queue a message indicating the success or failure of a logout attempt. * * @param success Whether the attempt was successful. */ public void sendLogoutStatus(boolean success) { loggedIn = !success; Message logoutResponse = new Message("LogoutResponse"); logoutResponse.setArgument("success", success); queueMessage(logoutResponse); } /** Enter a loop that receives and processes messages until the link is closed. */ @Override public void run() { Server.LOGGER.log(Level.FINE, "Client Handler Started. Link open:{0}", linkOpen); Message message; while (linkOpen) { try { message = getInputStream().readMessage(); if (message == null) { Server.LOGGER.log(Level.FINE, "Received null message, shutting down ClientHandler"); shutdown(); break; } Server.LOGGER.log(Level.FINE, "Message received: {0}", message); processMessage(message); } catch (Exception ex) { Server.LOGGER.log(Level.SEVERE, "Exception with client handler.", ex); shutdown(); } } } /** * Process the given message. * * @param message The message to be processed. */ protected void processMessage(Message message) { ExecutableMessage exec; exec = ServerExecutableMessage.getServerExecutableMessageFor(this, message); if (exec != null) { exec.runASync(); getServer().addSyncedTask(exec); } else { Message invalid = new Message("InvalidMessage"); invalid.setArgument("messageName", message.name); invalid.setArgument("messageID", message.getID()); queueMessage(invalid); } } /** * Get an executable message for a given message. * * @param message The message to be used. * * @return The ExecutableMessage. */ @Override protected ExecutableMessage getExecutableMessage(Message message) { return ServerExecutableMessage.getServerExecutableMessageFor(this, message); } /** * The username of this client. * * @return The username of this client. */ public String getUsername() { return username; } /** * Set the username of this client handler. * * @param username The client handler's username. */ public void setUsername(String username) { this.username = username; server.clientUsernameSet(username, this); } /** * Get the time at which this client was successfully logged in. * * @return The time at which this client was successfully logged in. */ public long getLoggedInTime() { return loggedInTime; } /** * Get the ClientHandler ID for this client handler. * * @return The ID of this client handler. */ public int getHandlerId() { return handlerId; } /** * Set the AES key used to encrypt and decrypt messages. * * @param key The AES key bytes used to encrypt and decrypt messages. */ public void setAesKey(byte[] key) { getInputStream().setAESKey(key); getOutputStream().setAesKey(key); } /** * Immediately force send a message. This method should only be used if a message is <i>extremely</i> time dependent, * otherwise messages should be queued using the {@code queueMessage} method, because this method may cause * synchronization issues. * * @param message The message to send. * * @throws IOException If there is an IOException. */ public void forceMessage(Message message) throws IOException { getOutputStream().writeMessage(message); } }
package org.mskcc.cbio.portal.dao; import org.mskcc.cbio.portal.model.CanonicalGene; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; /** * Data Access Object to Gene Table. * For faster access, consider using DaoGeneOptimized. * * @author Ethan Cerami. */ final class DaoGene { /** * Private Constructor to enforce Singleton Pattern. */ private DaoGene() { } private static int fakeEntrezId = 0; private static synchronized int getNextFakeEntrezId() throws DaoException { while (getGene(--fakeEntrezId)!=null); return fakeEntrezId; } public static synchronized int addGeneWithoutEntrezGeneId(CanonicalGene gene) throws DaoException { CanonicalGene existingGene = getGene(gene.getHugoGeneSymbolAllCaps()); gene.setEntrezGeneId(existingGene==null?getNextFakeEntrezId():existingGene.getEntrezGeneId()); return addGene(gene); } /** * Adds a new Gene Record to the Database. * * @param gene Canonical Gene Object. * @return number of records successfully added. * @throws DaoException Database Error. */ public static int addGene(CanonicalGene gene) throws DaoException { if (MySQLbulkLoader.isBulkLoad()) { // write to the temp file maintained by the MySQLbulkLoader MySQLbulkLoader.getMySQLbulkLoader("gene").insertRecord(Long.toString(gene.getEntrezGeneId()), gene.getHugoGeneSymbolAllCaps(),gene.getType(),gene.getCytoband(),gene.getLength()==0?null:Integer.toString(gene.getLength())); addGeneAliases(gene); // return 1 because normal insert will return 1 if no error occurs return 1; } Connection con = null; PreparedStatement pstmt = null; ResultSet rs = null; try { int rows = 0; CanonicalGene existingGene = getGene(gene.getEntrezGeneId()); if (existingGene == null) { con = JdbcUtil.getDbConnection(DaoGene.class); pstmt = con.prepareStatement ("INSERT INTO gene (`ENTREZ_GENE_ID`,`HUGO_GENE_SYMBOL`,`TYPE`,`CYTOBAND`,`LENGTH`) " + "VALUES (?,?,?,?,?)"); pstmt.setLong(1, gene.getEntrezGeneId()); pstmt.setString(2, gene.getHugoGeneSymbolAllCaps()); pstmt.setString(3, gene.getType()); pstmt.setString(4, gene.getCytoband()); pstmt.setInt(5, gene.getLength()); rows += pstmt.executeUpdate(); } rows += addGeneAliases(gene); return rows; } catch (SQLException e) { throw new DaoException(e); } finally { JdbcUtil.closeAll(DaoGene.class, con, pstmt, rs); } } /** * Add gene_alias records. * @param gene Canonical Gene Object. * @return number of records successfully added. * @throws DaoException Database Error. */ public static int addGeneAliases(CanonicalGene gene) throws DaoException { if (MySQLbulkLoader.isBulkLoad()) { // write to the temp file maintained by the MySQLbulkLoader Set<String> aliases = gene.getAliases(); for (String alias : aliases) { MySQLbulkLoader.getMySQLbulkLoader("gene_alias").insertRecord( Long.toString(gene.getEntrezGeneId()), alias); } // return 1 because normal insert will return 1 if no error occurs return 1; } Connection con = null; PreparedStatement pstmt = null; ResultSet rs = null; try { con = JdbcUtil.getDbConnection(DaoGene.class); Set<String> aliases = gene.getAliases(); Set<String> existingAliases = getAliases(gene.getEntrezGeneId()); int rows = 0; for (String alias : aliases) { if (!existingAliases.contains(alias)) { pstmt = con.prepareStatement("INSERT INTO gene_alias " + "(`ENTREZ_GENE_ID`,`GENE_ALIAS`) VALUES (?,?)"); pstmt.setLong(1, gene.getEntrezGeneId()); pstmt.setString(2, alias); rows += pstmt.executeUpdate(); } } return rows; } catch (SQLException e) { throw new DaoException(e); } finally { JdbcUtil.closeAll(DaoGene.class, con, pstmt, rs); } } /** * Gets the Gene with the Specified Entrez Gene ID. * For faster access, consider using DaoGeneOptimized. * * @param entrezGeneId Entrez Gene ID. * @return Canonical Gene Object. * @throws DaoException Database Error. */ private static CanonicalGene getGene(long entrezGeneId) throws DaoException { Connection con = null; PreparedStatement pstmt = null; ResultSet rs = null; try { con = JdbcUtil.getDbConnection(DaoGene.class); pstmt = con.prepareStatement ("SELECT * FROM gene WHERE ENTREZ_GENE_ID = ?"); pstmt.setLong(1, entrezGeneId); rs = pstmt.executeQuery(); if (rs.next()) { return extractGene(rs); } else { return null; } } catch (SQLException e) { throw new DaoException(e); } finally { JdbcUtil.closeAll(DaoGene.class, con, pstmt, rs); } } /** * Gets aliases for all genes. * @return map from entrez gene id to a set of aliases. * @throws DaoException Database Error. */ private static Set<String> getAliases(long entrezGeneId) throws DaoException { Connection con = null; PreparedStatement pstmt = null; ResultSet rs = null; try { con = JdbcUtil.getDbConnection(DaoGene.class); pstmt = con.prepareStatement ("SELECT * FROM gene_alias WHERE ENTREZ_GENE_ID = ?"); pstmt.setLong(1, entrezGeneId); rs = pstmt.executeQuery(); Set<String> aliases = new HashSet<String>(); while (rs.next()) { aliases.add(rs.getString("GENE_ALIAS")); } return aliases; } catch (SQLException e) { throw new DaoException(e); } finally { JdbcUtil.closeAll(DaoGene.class, con, pstmt, rs); } } private static Map<Long,Set<String>> getAllAliases() throws DaoException { Connection con = null; PreparedStatement pstmt = null; ResultSet rs = null; try { con = JdbcUtil.getDbConnection(DaoGene.class); pstmt = con.prepareStatement ("SELECT * FROM gene_alias"); rs = pstmt.executeQuery(); Map<Long,Set<String>> map = new HashMap<Long,Set<String>>(); while (rs.next()) { Long entrez = rs.getLong("ENTREZ_GENE_ID"); Set<String> aliases = map.get(entrez); if (aliases==null) { aliases = new HashSet<String>(); map.put(entrez, aliases); } aliases.add(rs.getString("GENE_ALIAS")); } return map; } catch (SQLException e) { throw new DaoException(e); } finally { JdbcUtil.closeAll(DaoGene.class, con, pstmt, rs); } } /** * Gets all Genes in the Database. * * @return ArrayList of Canonical Genes. * @throws DaoException Database Error. */ public static ArrayList<CanonicalGene> getAllGenes() throws DaoException { Map<Long,Set<String>> mapAliases = getAllAliases(); ArrayList<CanonicalGene> geneList = new ArrayList<CanonicalGene>(); Connection con = null; PreparedStatement pstmt = null; ResultSet rs = null; try { con = JdbcUtil.getDbConnection(DaoGene.class); pstmt = con.prepareStatement ("SELECT * FROM gene"); rs = pstmt.executeQuery(); while (rs.next()) { long entrezGeneId = rs.getInt("ENTREZ_GENE_ID"); Set<String> aliases = mapAliases.get(entrezGeneId); CanonicalGene gene = new CanonicalGene(entrezGeneId, rs.getString("HUGO_GENE_SYMBOL"), aliases); gene.setCytoband(rs.getString("CYTOBAND")); gene.setLength(rs.getInt("LENGTH")); gene.setType(rs.getString("TYPE")); geneList.add(gene); } return geneList; } catch (SQLException e) { throw new DaoException(e); } finally { JdbcUtil.closeAll(DaoGene.class, con, pstmt, rs); } } /** * Gets the Gene with the Specified HUGO Gene Symbol. * For faster access, consider using DaoGeneOptimized. * * @param hugoGeneSymbol HUGO Gene Symbol. * @return Canonical Gene Object. * @throws DaoException Database Error. */ private static CanonicalGene getGene(String hugoGeneSymbol) throws DaoException { Connection con = null; PreparedStatement pstmt = null; ResultSet rs = null; try { con = JdbcUtil.getDbConnection(DaoGene.class); pstmt = con.prepareStatement ("SELECT * FROM gene WHERE HUGO_GENE_SYMBOL = ?"); pstmt.setString(1, hugoGeneSymbol); rs = pstmt.executeQuery(); if (rs.next()) { return extractGene(rs); } else { return null; } } catch (SQLException e) { throw new DaoException(e); } finally { JdbcUtil.closeAll(DaoGene.class, con, pstmt, rs); } } private static CanonicalGene extractGene(ResultSet rs) throws SQLException, DaoException { long entrezGeneId = rs.getInt("ENTREZ_GENE_ID"); Set<String> aliases = getAliases(entrezGeneId); CanonicalGene gene = new CanonicalGene(entrezGeneId, rs.getString("HUGO_GENE_SYMBOL"), aliases); gene.setCytoband(rs.getString("CYTOBAND")); gene.setLength(rs.getInt("LENGTH")); gene.setType(rs.getString("TYPE")); return gene; } /** * Gets the Number of Gene Records in the Database. * * @return number of gene records. * @throws DaoException Database Error. */ public static int getCount() throws DaoException { Connection con = null; PreparedStatement pstmt = null; ResultSet rs = null; try { con = JdbcUtil.getDbConnection(DaoGene.class); pstmt = con.prepareStatement ("SELECT COUNT(*) FROM gene"); rs = pstmt.executeQuery(); if (rs.next()) { return rs.getInt(1); } return 0; } catch (SQLException e) { throw new DaoException(e); } finally { JdbcUtil.closeAll(DaoGene.class, con, pstmt, rs); } } /** * * @param entrezGeneId */ public static void deleteGene(long entrezGeneId) throws DaoException { Connection con = null; PreparedStatement pstmt = null; ResultSet rs = null; try { con = JdbcUtil.getDbConnection(DaoGene.class); pstmt = con.prepareStatement("DELETE FROM gene WHERE ENTREZ_GENE_ID=?"); pstmt.setLong(1, entrezGeneId); pstmt.executeUpdate(); } catch (SQLException e) { throw new DaoException(e); } finally { JdbcUtil.closeAll(DaoGene.class, con, pstmt, rs); } deleteGeneAlias(entrezGeneId); } /** * * @param entrezGeneId */ public static void deleteGeneAlias(long entrezGeneId) throws DaoException { Connection con = null; PreparedStatement pstmt = null; ResultSet rs = null; try { con = JdbcUtil.getDbConnection(DaoGene.class); pstmt = con.prepareStatement("DELETE FROM gene_alias WHERE ENTREZ_GENE_ID=?"); pstmt.setLong(1, entrezGeneId); pstmt.executeUpdate(); } catch (SQLException e) { throw new DaoException(e); } finally { JdbcUtil.closeAll(DaoGene.class, con, pstmt, rs); } } /** * Deletes all Gene Records in the Database. * * @throws DaoException Database Error. */ public static void deleteAllRecords() throws DaoException { Connection con = null; PreparedStatement pstmt = null; ResultSet rs = null; try { con = JdbcUtil.getDbConnection(DaoGene.class); pstmt = con.prepareStatement("TRUNCATE TABLE gene"); pstmt.executeUpdate(); } catch (SQLException e) { throw new DaoException(e); } finally { JdbcUtil.closeAll(DaoGene.class, con, pstmt, rs); } deleteAllAliasRecords(); } private static void deleteAllAliasRecords() throws DaoException { Connection con = null; PreparedStatement pstmt = null; ResultSet rs = null; try { con = JdbcUtil.getDbConnection(DaoGene.class); pstmt = con.prepareStatement("TRUNCATE TABLE gene_alias"); pstmt.executeUpdate(); } catch (SQLException e) { throw new DaoException(e); } finally { JdbcUtil.closeAll(DaoGene.class, con, pstmt, rs); } } }
package org.javafp.util; /** * Interfaces for composable functions which throw. */ public abstract class FunctionsEx { /** * Function of arity 0. * @param <R> return type */ @FunctionalInterface public interface F0<R> { static <R> F0<R> of(F0<R> f) { return f; } static <R> F0<R> konst(R r) { return () -> r; } R apply() throws Exception; } /** * Function of arity 1. * @param <A> 1st argument type * @param <R> return type */ @FunctionalInterface public interface F<A, R> { static <A, R> F<A, R> of(F<A, R> f) { return f; } static <A> F<A, A> id() { return x -> x; } static <A, R> F<A, R> konst(R r) { return a -> r; } static <A, B, R> F<B, F<A, R>> flip(F<A, F<B, R>> f) { return b -> a -> f.apply(a).apply(b); } R apply(A a) throws Exception; default <T> F<T, R> compose(F<T, A> f) { return t -> this.apply(f.apply(t)); } } /** * Function of arity 2. * @param <A> 1st argument type * @param <B> 2nd argument type * @param <R> return type */ @FunctionalInterface public interface F2<A, B, R> { static <A, B, R> F2<A, B, R> of(F2<A, B, R> f) { return f; } static <A, B, R> F<A, F<B, R>> curry(F2<A, B, R> f) { return f.curry(); } static <A, B, R> F2<A, B, R> uncurry(F<A, F<B, R>> f) { return (a, b) -> f.apply(a).apply(b); } static <A, B> F2<A, B, A> first() { return (a, b) -> a; } static <A, B> F2<A, B, B> second() { return (a, b) -> b; } R apply(A a, B b) throws Exception; default F<B, R> partial(A a) { return b -> apply(a, b); } default F<A, F<B, R>> curry() { return a -> b -> apply(a, b); } default F2<B, A, R> flip() { return (b, a) -> apply(a, b); } } /** * Function of arity 3. * @param <A> 1st argument type * @param <B> 2nd argument type * @param <C> 3rd argument type * @param <R> return type */ @FunctionalInterface public interface F3<A, B, C, R> { static <A, B, C, R> F3<A, B, C, R>of(F3<A, B, C, R> f) { return f; } static <A, B, C, R> F<A, F<B, F<C, R>>> curry(F3<A, B, C, R> f) { return f.curry(); } static <A, B, C, R> F3<A, B, C, R> uncurry(F<A, F<B, F<C, R>>> f) { return (a, b, c) -> f.apply(a).apply(b).apply(c); } R apply(A a, B b, C c) throws Exception; default F2<B, C, R> partial(A a) { return (b, c) -> apply(a, b, c); } default F<C, R> partial(A a, B b) { return c -> apply(a, b, c); } default F<A, F<B, F<C, R>>> curry() { return a -> b -> c -> apply(a, b, c); } } /** * Function of arity 4. * @param <A> 1st argument type * @param <B> 2nd argument type * @param <C> 3rd argument type * @param <D> 4th argument type * @param <R> return type */ @FunctionalInterface public interface F4<A, B, C, D, R> { static <A, B, C, D, R> F4<A, B, C, D, R> of(F4<A, B, C, D, R> f) { return f; } static <A, B, C, D, R> F<A, F<B, F<C, F<D, R>>>> curry(F4<A, B, C, D, R> f) { return f.curry(); } static <A, B, C, D, R> F4<A, B, C, D, R> uncurry(F<A, F<B, F<C, F<D, R>>>> f) { return (a, b, c, d) -> f.apply(a).apply(b).apply(c).apply(d); } R apply(A a, B b, C c, D d) throws Exception; default F3<B, C, D, R> partial(A a) { return (b, c, d) -> apply(a, b, c, d); } default F2<C, D, R> partial(A a, B b) { return (c, d) -> apply(a, b, c, d); } default F<D, R> partial(A a, B b, C c) { return d -> apply(a, b, c, d); } default F<A, F<B, F<C, F<D, R>>>> curry() { return a -> b -> c -> d -> apply(a, b, c, d); } } /** * Function of arity 5. * @param <A> 1st argument type * @param <B> 2nd argument type * @param <C> 3rd argument type * @param <D> 4th argument type * @param <E> 5th argument type * @param <R> return type */ @FunctionalInterface public interface F5<A, B, C, D, E, R> { static <A, B, C, D, E, R> F5<A, B, C, D, E, R> of(F5<A, B, C, D, E, R> f) { return f; } static <A, B, C, D, E, R> F<A, F<B, F<C, F<D, F<E, R>>>>> curry(F5<A, B, C, D, E, R> f) { return f.curry(); } static <A, B, C, D, E, R> F5<A, B, C, D, E, R> uncurry(F<A, F<B, F<C, F<D, F<E, R>>>>> f) { return (a, b, c, d, e) -> f.apply(a).apply(b).apply(c).apply(d).apply(e); } R apply(A a, B b, C c, D d, E e) throws Exception; default F4<B, C, D, E, R> partial(A a) { return (b, c, d, e) -> apply(a, b, c, d, e); } default F3<C, D, E, R> partial(A a, B b) { return (c, d, e) -> apply(a, b, c, d, e); } default F2<D, E, R> partial(A a, B b, C c) { return (d, e) -> apply(a, b, c, d, e); } default F<E, R> partial(A a, B b, C c, D d) { return e -> apply(a, b, c, d, e); } default F<A, F<B, F<C, F<D, F<E, R>>>>> curry() { return a -> b -> c -> d -> e -> apply(a, b, c, d, e); } } /** * Function of arity 6. * @param <A> 1st argument type * @param <B> 2nd argument type * @param <C> 3rd argument type * @param <D> 4th argument type * @param <E> 5th argument type * @param <G> 6th argument type * @param <R> return type */ @FunctionalInterface public interface F6<A, B, C, D, E, G, R> { static <A, B, C, D, E, G, R> F6<A, B, C, D, E, G, R> of(F6<A, B, C, D, E, G, R> f) { return f; } static <A, B, C, D, E, G, R> F<A, F<B, F<C, F<D, F<E, F<G, R>>>>>> curry(F6<A, B, C, D, E, G, R> f) { return f.curry(); } static <A, B, C, D, E, G, R> F6<A, B, C, D, E, G, R> uncurry(F<A, F<B, F<C, F<D, F<E, F<G, R>>>>>> f) { return (a, b, c, d, e, g) -> f.apply(a).apply(b).apply(c).apply(d).apply(e).apply(g); } R apply(A a, B b, C c, D d, E e, G g) throws Exception; default F5<B, C, D, E, G, R> partial(A a) { return (b, c, d, e, g) -> apply(a, b, c, d, e, g); } default F4<C, D, E, G, R> partial(A a, B b) { return (c, d, e, g) -> apply(a, b, c, d, e, g); } default F3<D, E, G, R> partial(A a, B b, C c) { return (d, e, g) -> apply(a, b, c, d, e, g); } default F2<E, G, R> partial(A a, B b, C c, D d) { return (e, g) -> apply(a, b, c, d, e, g); } default F<G, R> partial(A a, B b, C c, D d, E e) { return g -> apply(a, b, c, d, e, g); } default F<A, F<B, F<C, F<D, F<E, F<G, R>>>>>> curry() { return a -> b -> c -> d -> e -> g -> apply(a, b, c, d, e, g); } } /** * Unary operator interface. * @param <T> operand type */ @FunctionalInterface public interface Op<T> extends F<T, T> { T apply(T t) throws Exception; } /** * Binary operator interface. * @param <T> operand type */ @FunctionalInterface public interface Op2<T> extends F2<T, T, T> { T apply(T l, T r) throws Exception; default Op2<T> flip() { return (b, a) -> apply(a, b); } } /** * Predicate interface */ @FunctionalInterface public interface Predicate<T> extends F<T, Boolean> { Boolean apply(T t) throws Exception; } }
package com.redhat.ceylon.maven; import java.io.Closeable; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Collections; import java.util.Enumeration; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.zip.ZipEntry; import java.util.zip.ZipException; import java.util.zip.ZipFile; import org.apache.maven.model.Dependency; import org.apache.maven.model.Model; import org.apache.maven.model.io.xpp3.MavenXpp3Reader; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.codehaus.plexus.util.xml.pull.XmlPullParserException; import org.eclipse.aether.util.artifact.JavaScopes; import com.redhat.ceylon.common.Backend; import com.redhat.ceylon.common.FileUtil; import com.redhat.ceylon.common.ModuleUtil; import com.redhat.ceylon.common.Versions; import com.redhat.ceylon.common.config.CeylonConfig; import com.redhat.ceylon.compiler.java.runtime.tools.CompilationListener; import com.redhat.ceylon.compiler.java.runtime.tools.Compiler; import com.redhat.ceylon.compiler.java.runtime.tools.CompilerOptions; import com.redhat.ceylon.compiler.java.runtime.tools.JavaCompilerOptions; import com.redhat.ceylon.compiler.java.runtime.tools.impl.JavaCompilerImpl; /** * @author <a href="mailto:julien@julienviet.com">Julien Viet</a> */ @Mojo(name = "compile", defaultPhase = LifecyclePhase.COMPILE) public class CeylonCompileMojo extends AbstractCeylonCompileMojo { @Parameter private boolean disablePomChecks; @Parameter private Boolean flatClasspath; @Parameter private Boolean autoExportMavenDependencies; @Parameter private Boolean fullyExportMavenDependencies; @Parameter private String jdkProvider; @Parameter private List<String> aptModules; @Parameter private String javacOptions; @Parameter private File explodeTo; @Parameter private boolean explode; @Override protected void compile(List<File> sourcePath, List<File> resourcePath, List<File> files, List<String> modules) throws MojoExecutionException, MojoFailureException { exportDependencies(); Compiler compiler = new JavaCompilerImpl() { @Override protected List<String> translateOptions(CompilerOptions options) { List<String> translatedOptions = super.translateOptions(options); if (javacOptions != null) { Collections.addAll(translatedOptions, javacOptions.split("\\s+")); } // Temporary until 1.3.2 is released Collections.addAll(translatedOptions, "-source", getDefaultTarget().toString()); return translatedOptions; } }; CeylonConfig cfg = CeylonConfig.createFromLocalDir(cwd); JavaCompilerOptions options = JavaCompilerOptions.fromConfig(cfg); options.setModules(modules); options.setJavacTarget(getDefaultTarget()); options.setSourcePath(sourcePath); options.setResourcePath(resourcePath); if (cwd != null) { options.setWorkingDirectory(cwd.getAbsolutePath()); } options.setOutputRepository(out); if(flatClasspath != null) options.setFlatClasspath(flatClasspath); if(autoExportMavenDependencies != null) options.setAutoExportMavenDependencies(autoExportMavenDependencies); if(fullyExportMavenDependencies != null) options.setFullyExportMavenDependencies(fullyExportMavenDependencies); if(jdkProvider != null) options.setJdkProvider(jdkProvider); if (aptModules != null) { options.setAptModules(aptModules); } if (verbose != null) { options.setVerbose(true); if (!"true".equals(verbose)) { options.setVerboseCategory(verbose); } } if (userRepos != null) { for (String userRepo : userRepos) { options.addUserRepository(userRepo); } } else { options.addUserRepository(buildDir + "/modules"); } addExportedUserRepository(options); if(ceylonHome != null) options.setSystemRepository(ceylonHome + "/repo"); if(timeout != null) options.setTimeout(timeout); options.setFiles(files); final MojoExecutionException[] x = new MojoExecutionException[1]; boolean ok = compiler.compile(options, new CompilationListener() { public void error(File file, long line, long column, String message) { String msg; if (file != null) { msg = "Compilation error at (" + line + "," + column + ") in " + file.getAbsolutePath() + ":" + message; } else { msg = "Compilation error:" + message; } getLog().error(msg); } public void warning(File file, long line, long column, String message) { String msg; if (file != null) { msg = "Compilation warning at (" + line + "," + column + ") in " + file.getAbsolutePath() + ":" + message; } else { msg = "Compilation warning:" + message; } getLog().warn(msg); } public void moduleCompiled(String module, String version) { getLog().info("Compiled module " + module + "/" + version); if(explode){ explodeModule(module, version, new File(getClassesOutput())); }else if (explodeTo != null) { explodeModule(module, version, explodeTo); } try { if(!disablePomChecks && !isTest()) checkDependencies(module, version); } catch (MojoExecutionException e) { x[0] = e; } } }); if(x[0] != null) throw x[0]; if (!ok) { throw new MojoExecutionException("Compilation failed"); } } protected String getClassesOutput() { return project.getBuild().getOutputDirectory(); } private static Long getDefaultTarget() { String dottedVersion = System.getProperty("java.version"); String[] parts = dottedVersion.split("\\.|_|-"); String versionPart = parts[0].equals("1") ? parts[1] : parts[0]; return Long.parseLong(versionPart); } protected void explodeModule(String module, String version, File explodeTo) { File fOut = new File(out); if (fOut.isDirectory()) { File path = new File(ModuleUtil.moduleToPath(fOut, module), version); File car = new File(path, module + "-" + version + ".car"); unzip(car, FileUtil.applyCwd(cwd, explodeTo)); } } private void checkDependencies(String module, String version) throws MojoExecutionException{ File fOut = new File(out); if (fOut.isDirectory()) { File path = new File(ModuleUtil.moduleToPath(fOut, module), version); File car = new File(path, module + "-" + version + ".car"); MavenXpp3Reader reader = new MavenXpp3Reader(); try(ZipFile zipFile = new ZipFile(car)){ String groupId = project.getGroupId(); String artifactId = project.getArtifactId(); ZipEntry entry = zipFile.getEntry("META-INF/maven/"+groupId+"/"+artifactId+"/pom.xml"); if(entry == null){ throw new MojoExecutionException("Maven descriptor missing in Ceylon module "+car +": perhaps you did not set group/artifact to "+groupId+":"+artifactId+"?"); } try(InputStream is = zipFile.getInputStream(entry)){ Model model = reader.read(is); compareDependencies(model); } catch (XmlPullParserException e) { // TODO Auto-generated catch block e.printStackTrace(); } } catch (ZipException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } private void compareDependencies(Model model) throws MojoExecutionException { Set<Dependency> projectDependencies = new HashSet<>(project.getDependencies()); Set<Dependency> augmentedModelDependencies = new HashSet<>(model.getDependencies()); Dependency languageDep = new Dependency(); languageDep.setGroupId("org.ceylon-lang"); languageDep.setArtifactId("ceylon.language"); languageDep.setVersion(Versions.CEYLON_VERSION_NUMBER); augmentedModelDependencies.add(languageDep); Set<Dependency> modelDependencies = new HashSet<>(augmentedModelDependencies); OUTER: for (Dependency pomDependency : project.getDependencies()) { // skip test deps if(pomDependency.getScope().equals(JavaScopes.TEST)){ projectDependencies.remove(pomDependency); continue; } String pomGroupId = pomDependency.getGroupId(); String pomArtifactId = pomDependency.getArtifactId(); String pomVersion = pomDependency.getVersion(); for (Dependency modelDependency : augmentedModelDependencies) { String modelGroupId = modelDependency.getGroupId(); String modelArtifactId = modelDependency.getArtifactId(); // FIXME: workaround for ceylon bug int colon = modelArtifactId.indexOf(':'); if(colon != -1){ modelGroupId += "."+modelArtifactId.substring(0, colon); modelArtifactId = modelArtifactId.substring(colon+1); } String modelVersion = modelDependency.getVersion(); if(pomGroupId.equals(modelGroupId) && pomArtifactId.equals(modelArtifactId) && pomVersion.equals(modelVersion)){ // we found a match, let's remove both and move to the next projectDependencies.remove(pomDependency); modelDependencies.remove(modelDependency); continue OUTER; } } } if(projectDependencies.isEmpty() && modelDependencies.isEmpty()) return; StringBuilder sb = new StringBuilder(); // At this point, the sets are left with the differences where each are errors if(!projectDependencies.isEmpty()){ sb.append("pom.xml dependencies missing from module.ceylon descriptor: "); boolean once = true; for (Dependency projectDependency : projectDependencies) { if(once) once = false; else sb.append(", "); sb.append(projectDependency.getGroupId()+":"+projectDependency.getArtifactId()+"/"+projectDependency.getVersion()); } sb.append("."); if(!modelDependencies.isEmpty()) sb.append(" "); } if(!modelDependencies.isEmpty()){ sb.append("module.ceylon dependencies missing from pom.xml descriptor: "); boolean once = true; for (Dependency modelDependency : modelDependencies) { if(once) once = false; else sb.append(", "); sb.append(modelDependency.getGroupId()+":"+modelDependency.getArtifactId()+"/"+modelDependency.getVersion()); } } throw new MojoExecutionException("Descriptors mismatch: "+sb.toString()); } private void unzip(File zip, File targetDir) { try { final ZipFile zipFile = new ZipFile(zip); try { final Enumeration<? extends ZipEntry> entries = zipFile.entries(); while (entries.hasMoreElements()) { final ZipEntry ze = entries.nextElement(); final File file = new File(targetDir, ze.getName()); if (ze.isDirectory()) { if (!file.exists() && file.mkdirs() == false) throw new IllegalArgumentException("Cannot create dir: " + file); } else { final FileOutputStream fos = new FileOutputStream(file); copyStream(zipFile.getInputStream(ze), fos); } } } finally { zipFile.close(); } } catch (IOException e) { throw new IllegalArgumentException(e); } } private static void copyStream(final InputStream in, final OutputStream out) throws IOException { final byte[] bytes = new byte[8192]; int cnt; try { while ((cnt = in.read(bytes)) != -1) { out.write(bytes, 0, cnt); } } finally { safeClose(in); safeClose(out); } } private static void safeClose(Closeable c) { try { c.close(); } catch (Exception ignored) { } } @Override protected Backend getBackend() { return Backend.Java; } }
package com.indexdata.pz2utils4jsf.config; import java.io.IOException; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import javax.enterprise.context.SessionScoped; import javax.inject.Named; import org.apache.log4j.Logger; import com.indexdata.masterkey.config.MissingMandatoryParameterException; import com.indexdata.masterkey.config.ModuleConfiguration; import com.indexdata.masterkey.config.ModuleConfigurationGetter; import com.indexdata.pz2utils4jsf.utils.Utils; @Named @SessionScoped public class Pz2Config implements ModuleConfigurationGetter, Serializable { private static final long serialVersionUID = -6801241975338182197L; private static Logger logger = Logger.getLogger(Pz2Config.class); Map<String,String> properties = new HashMap<String,String>(); ModuleConfiguration moduleConfig = null; public Pz2Config () { logger.debug(Utils.objectId(this) + " being constructed with no argument"); setDefaults(); } public Pz2Config (Map<String,String> parameters) { logger.debug(Utils.objectId(this) + " being constructed with parameter map argument"); setDefaults(); for (String key : parameters.keySet()) { properties.put(key, parameters.get(key)); } } public Pz2Config (ModuleConfiguration moduleConfig) throws IOException { logger.debug(Utils.objectId(this) + " being constructed with moduleConfig argument."); this.moduleConfig = moduleConfig; for (String key : moduleConfig.getConfigMap().keySet()) { properties.put(key, moduleConfig.getConfigParameter(key)); } } private void setDefaults () { properties.put("PROXY_MODE","1"); properties.put("SERIALIZE_REQUESTS", "false"); properties.put("STREAMBUFF_SIZE", "4096"); properties.put("PARSE_RESPONSES", "true"); } @Override public String get(String key) { return properties.get(key); } public void set(String key, String value) { properties.put(key, value); } public void setPazpar2Url (String value) { properties.put("PAZPAR2_URL", value); } public void setPazpar2ServiceId (String value) { properties.put("PAZPAR2_SERVICE_ID",value); } @Override public String get(String key, String defaultValue) { if (properties.containsKey(key)) { return properties.get(key); } else { return defaultValue; } } @Override public String getMandatory(String key) throws MissingMandatoryParameterException { if (properties.containsKey(key)) { return properties.get(key); } throw new MissingMandatoryParameterException("Missing mandatory parameter: " + key); } @Override public String getConfigFilePath() { return (moduleConfig != null ? moduleConfig.getConfigFilePath() : "nopath"); } }
package com.jme3.animation; import com.jme3.export.InputCapsule; import com.jme3.export.JmeExporter; import com.jme3.export.JmeImporter; import com.jme3.export.OutputCapsule; import com.jme3.math.Quaternion; import com.jme3.math.Vector3f; import com.jme3.scene.Spatial; import com.jme3.util.TempVars; import com.jme3.util.clone.Cloner; import com.jme3.util.clone.JmeCloneable; import java.io.IOException; /** * This class represents the track for spatial animation. * * @author Marcin Roguski (Kaelthas) */ public class SpatialTrack implements JmeCloneable, Track { /** * Translations of the track. */ private CompactVector3Array translations; /** * Rotations of the track. */ private CompactQuaternionArray rotations; /** * Scales of the track. */ private CompactVector3Array scales; /** * The spatial to which this track applies. * Note that this is optional, if no spatial is defined, the AnimControl's Spatial will be used. */ private Spatial trackSpatial; /** * The times of the animations frames. */ private float[] times; public SpatialTrack() { } /** * Creates a spatial track for the given track data. * * @param times * a float array with the time of each frame * @param translations * the translation of the bone for each frame * @param rotations * the rotation of the bone for each frame * @param scales * the scale of the bone for each frame */ public SpatialTrack(float[] times, Vector3f[] translations, Quaternion[] rotations, Vector3f[] scales) { setKeyframes(times, translations, rotations, scales); } /** * * Modify the spatial which this track modifies. * * @param time * the current time of the animation */ public void setTime(float time, float weight, AnimControl control, AnimChannel channel, TempVars vars) { Spatial spatial = trackSpatial; if (spatial == null) { spatial = control.getSpatial(); } Vector3f tempV = vars.vect1; Vector3f tempS = vars.vect2; Quaternion tempQ = vars.quat1; Vector3f tempV2 = vars.vect3; Vector3f tempS2 = vars.vect4; Quaternion tempQ2 = vars.quat2; int lastFrame = times.length - 1; if (time < 0 || lastFrame == 0) { if (rotations != null) rotations.get(0, tempQ); if (translations != null) translations.get(0, tempV); if (scales != null) { scales.get(0, tempS); } } else if (time >= times[lastFrame]) { if (rotations != null) rotations.get(lastFrame, tempQ); if (translations != null) translations.get(lastFrame, tempV); if (scales != null) { scales.get(lastFrame, tempS); } } else { int startFrame = 0; int endFrame = 1; // use lastFrame so we never overflow the array for (int i = 0; i < lastFrame && times[i] < time; ++i) { startFrame = i; endFrame = i + 1; } float blend = (time - times[startFrame]) / (times[endFrame] - times[startFrame]); if (rotations != null) rotations.get(startFrame, tempQ); if (translations != null) translations.get(startFrame, tempV); if (scales != null) { scales.get(startFrame, tempS); } if (rotations != null) rotations.get(endFrame, tempQ2); if (translations != null) translations.get(endFrame, tempV2); if (scales != null) { scales.get(endFrame, tempS2); } tempQ.nlerp(tempQ2, blend); tempV.interpolateLocal(tempV2, blend); tempS.interpolateLocal(tempS2, blend); } if (translations != null) { spatial.setLocalTranslation(tempV); } if (rotations != null) { spatial.setLocalRotation(tempQ); } if (scales != null) { spatial.setLocalScale(tempS); } } /** * Set the translations, rotations and scales for this track. * * @param times * a float array with the time of each frame * @param translations * the translation of the bone for each frame * @param rotations * the rotation of the bone for each frame * @param scales * the scale of the bone for each frame */ public void setKeyframes(float[] times, Vector3f[] translations, Quaternion[] rotations, Vector3f[] scales) { if (times.length == 0) { throw new RuntimeException("BoneTrack with no keyframes!"); } this.times = times; if (translations != null) { assert times.length == translations.length; this.translations = new CompactVector3Array(); this.translations.add(translations); this.translations.freeze(); } if (rotations != null) { assert times.length == rotations.length; this.rotations = new CompactQuaternionArray(); this.rotations.add(rotations); this.rotations.freeze(); } if (scales != null) { assert times.length == scales.length; this.scales = new CompactVector3Array(); this.scales.add(scales); this.scales.freeze(); } } /** * @return the array of rotations of this track */ public Quaternion[] getRotations() { return rotations == null ? null : rotations.toObjectArray(); } /** * @return the array of scales for this track */ public Vector3f[] getScales() { return scales == null ? null : scales.toObjectArray(); } /** * @return the arrays of time for this track */ public float[] getTimes() { return times; } /** * @return the array of translations of this track */ public Vector3f[] getTranslations() { return translations == null ? null : translations.toObjectArray(); } /** * @return the length of the track */ public float getLength() { return times == null ? 0 : times[times.length - 1] - times[0]; } /** * Create a clone with the same track spatial. * * @return a new track */ @Override public SpatialTrack clone() { Cloner cloner = new Cloner(); cloner.setClonedValue(trackSpatial, trackSpatial); return cloner.clone(this); } @Override public float[] getKeyFrameTimes() { return times; } public void setTrackSpatial(Spatial trackSpatial) { this.trackSpatial = trackSpatial; } public Spatial getTrackSpatial() { return trackSpatial; } /** * Create a shallow clone for the JME cloner. * * @return a new track */ @Override public SpatialTrack jmeClone() { try { return (SpatialTrack) super.clone(); } catch (CloneNotSupportedException exception) { throw new RuntimeException("Can't clone track", exception); } } /** * Callback from {@link com.jme3.util.clone.Cloner} to convert this * shallow-cloned track into a deep-cloned one, using the specified cloner * to resolve copied fields. * * @param cloner the cloner currently cloning this control (not null) * @param original the track from which this track was shallow-cloned * (unused) */ @Override public void cloneFields(Cloner cloner, Object original) { translations = cloner.clone(translations); rotations = cloner.clone(rotations); scales = cloner.clone(scales); trackSpatial = cloner.clone(trackSpatial); times = cloner.clone(times); } @Override public void write(JmeExporter ex) throws IOException { OutputCapsule oc = ex.getCapsule(this); oc.write(translations, "translations", null); oc.write(rotations, "rotations", null); oc.write(times, "times", null); oc.write(scales, "scales", null); oc.write(trackSpatial, "trackSpatial", null); } @Override public void read(JmeImporter im) throws IOException { InputCapsule ic = im.getCapsule(this); translations = (CompactVector3Array) ic.readSavable("translations", null); rotations = (CompactQuaternionArray) ic.readSavable("rotations", null); times = ic.readFloatArray("times", null); scales = (CompactVector3Array) ic.readSavable("scales", null); trackSpatial = (Spatial) ic.readSavable("trackSpatial", null); } }
package me.benjozork.onyx.entity.ai; import me.benjozork.onyx.entity.LivingEntity; /** * @author Benjozork */ public class AIConfiguration { /** * The strategy used by the AI */ public AIStrategy strategy; /** * The degree of bullet avoidance */ public ProjectileReluctance reluctance; /** * The source {@link LivingEntity} of the AI */ public LivingEntity source; /** * The target {@link LivingEntity} of the AI */ public LivingEntity target; public float factor = 100f; /** * The different strategies by which an entity follows another entity */ public enum AIStrategy { ACCELERATED, LINEAR } public enum ProjectileReluctance { NONE, LOW, MED, HIGH, GOD } }
package org.mamute.auth; import org.scribe.model.OAuthRequest; import org.scribe.model.Response; import org.scribe.model.Token; import org.scribe.model.Verb; import org.scribe.oauth.OAuthService; import com.google.common.base.Optional; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; public class FacebookAPI implements SocialAPI{ private final Token accessToken; private final OAuthService service; public FacebookAPI(OAuthService service, Token accessToken) { this.service = service; this.accessToken = accessToken; } public Optional<SignupInfo> getSignupInfo() { String url = "https://graph.facebook.com/me?fields=name,email,location,id"; Response response = makeRequest(url); JsonObject jsonObject = new JsonParser().parse(response.getBody()).getAsJsonObject(); return SignupInfo.fromFacebook(jsonObject); } public String getUserId() { String url = "https://graph.facebook.com/me?fields=id"; Response response = makeRequest(url); String body = response.getBody(); JsonObject jsonObj = new JsonParser().parse(body).getAsJsonObject(); JsonElement jsonElement = jsonObj.get("id"); if (jsonElement == null) { throw new IllegalArgumentException("facebook did not sent data requested! response body: " + body); } return jsonElement.getAsString(); } private Response makeRequest(String url) { OAuthRequest request = new OAuthRequest(Verb.GET, url); service.signRequest(accessToken, request); Response response = request.send(); String body = response.getBody(); if (response.getCode() / 100 != 2) { throw new IllegalArgumentException("http error: " + response.getCode() + ", facebook response body: " + body); } return response; } @Override public Token getAccessToken() { return accessToken; } }
package com.solace.kafka.connect; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.config.ConfigDef.Type; import org.apache.kafka.common.utils.AppInfoParser; import org.apache.kafka.connect.errors.ConnectException; import org.apache.kafka.connect.source.SourceRecord; import org.apache.kafka.connect.source.SourceTask; import org.apache.kafka.common.config.types.Password; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.solacesystems.jcsmp.JCSMPException; import com.solacesystems.jcsmp.JCSMPFactory; import com.solacesystems.jcsmp.JCSMPProperties; import com.solacesystems.jcsmp.JCSMPReconnectEventHandler; import com.solacesystems.jcsmp.JCSMPSession; import com.solacesystems.jcsmp.Queue; import com.solacesystems.jcsmp.Topic; import com.solacesystems.jcsmp.XMLMessageConsumer; import com.solacesystems.jcsmp.XMLMessageListener; import com.solacesystems.jcsmp.BytesXMLMessage; import com.solacesystems.jcsmp.EndpointProperties; import com.solacesystems.jcsmp.JCSMPChannelProperties; public class SolaceSourceTask extends SourceTask { private static final Logger log = LoggerFactory.getLogger(SolaceSourceTask.class); protected JCSMPSession session; public JCSMPSession getSession() { return session; } protected Topic topic; protected XMLMessageConsumer consumer; protected String instanceName; protected String smfHost; protected String msgVpnName; protected String clientUsername; protected Password clientPassword; protected String solaceTopicName; protected String kafkaTopicName; protected int longPollInterval = SolaceConnectorConstants.DEFAULT_LONG_POLL_INTERVAL; protected int shortPollInterval = SolaceConnectorConstants.DEFAULT_SHORT_POLL_INTERVAL; protected int kafkaBufferSize = SolaceConnectorConstants.DEFAULT_POLL_BATCH_SIZE; protected SolaceConverter converter; protected int reconnectRetries; protected int connectTimeoutInMillis; protected int connectRetriesPerHost; protected int keepAliveIntervalInMillis; protected int reconnectRetryWaitInMillis; protected int compressionLevel; protected String haSentinelQueueName = null; public String getHASentinelQueueName() { return haSentinelQueueName; } protected HASentinel haSentinel = null; @Override public String version() { return AppInfoParser.getVersion(); } /** * This is where the main work is done. Grab a bunch of messages from the Solace topic and put in * a list which will be consumed by Kafka. * * Uses the Solace JCSMP API in synchronous mode with a combination of short and long polling. * - When no messages are available this method blocks for the longPollInterval. * - When messages are available we try to assemble kafkaBufferSize records together to pass to Kafka, * allowing max shortPollInterval between consecutive messages. */ @Override public List<SourceRecord> poll() throws InterruptedException { log.debug(instanceName+" in poll()"); ArrayList<SourceRecord> records = new ArrayList<SourceRecord>(); if(haSentinel != null && haSentinel.isActiveMember() || haSentinel == null) { try { BytesXMLMessage msg = consumer.receive(longPollInterval); if (msg == null) return records; records.add(converter.convertMessage(msg)); //Now fast poll as long as we keep getting messages int i=0; while(i < kafkaBufferSize-1) { i++; msg = consumer.receive(shortPollInterval); if (msg == null) break; records.add(converter.convertMessage(msg)); } } catch (JCSMPException e) { e.printStackTrace(); } log.info("{} poll() found {} records",instanceName,records.size()); return records; } else { log.debug("{} poll() not active ",instanceName); Thread.sleep(longPollInterval); return records; } } @Override public void start(Map<String, String> propMap) { setParameters(propMap); log.info("Solace Kafka Source connector started. Will connect to router at url:" +smfHost+" vpn:"+msgVpnName+" user:"+clientUsername+" pass:"+clientPassword +" Solace topic:"+solaceTopicName+" Kafka topic:"+kafkaTopicName); // Now start the subscribers try { connect(); } catch (JCSMPException e) { // TODO Auto-generated catch block e.printStackTrace(); throw new ConnectException("SolaceSourceTask failed to connect.", e); } // Consume messages synchronously converter = new SolaceConverter(this); try { if (consumer == null) { consumer = session.getMessageConsumer((XMLMessageListener)null); session.addSubscription(topic); } consumer.start(); } catch (JCSMPException e) { // TODO Auto-generated catch block e.printStackTrace(); throw new ConnectException("SolaceSourceTask failed to start listener.", e); } } protected void setParameters(Map<String, String> propMap) { // Pull the parameters needed to connect to the Message Router SolaceConfigDef conf = new SolaceConfigDef(SolaceConfigDef.defaultConfig(), propMap); instanceName = conf.getString(SolaceConnectorConstants.CONNECTOR_INSTANCE); smfHost = conf.getString(SolaceConnectorConstants.SOLACE_URL); msgVpnName = conf.getString(SolaceConnectorConstants.SOLACE_VPN); clientUsername = conf.getString(SolaceConnectorConstants.SOLACE_USERNAME); clientPassword = conf.getPassword(SolaceConnectorConstants.SOLACE_PASSWORD); kafkaTopicName = conf.getString(SolaceConnectorConstants.KAFKA_TOPIC); solaceTopicName = conf.getString(SolaceConnectorConstants.SOLACE_TOPIC); longPollInterval = conf.getInt(SolaceConnectorConstants.LONG_POLL_INTERVAL); shortPollInterval = conf.getInt(SolaceConnectorConstants.SHORT_POLL_INTERVAL); kafkaBufferSize = conf.getInt(SolaceConnectorConstants.POLL_BATCH_SIZE); reconnectRetries = conf.getInt(SolaceConnectorConstants.SOLACE_RECONNECT_RETRIES); reconnectRetryWaitInMillis = conf.getInt(SolaceConnectorConstants.SOLACE_RECONNECT_RETRY_WAIT); haSentinelQueueName = conf.getString(SolaceConnectorConstants.SOLACE_HA_QUEUE); } @Override public void stop() { consumer.close(); session.closeSession(); } public void connect() throws JCSMPException { final JCSMPProperties properties = new JCSMPProperties(); properties.setProperty(JCSMPProperties.HOST, smfHost); properties.setProperty(JCSMPProperties.VPN_NAME, msgVpnName); properties.setProperty(JCSMPProperties.USERNAME, clientUsername); if (clientPassword != null) { properties.setProperty(JCSMPProperties.PASSWORD, clientPassword.value()); } properties.setProperty(JCSMPProperties.APPLICATION_DESCRIPTION, SolaceConnectorConstants.CONNECTOR_NAME+" Version "+SolaceConnectorConstants.CONNECTOR_VERSION); // Settings for automatic reconnection to Solace Router JCSMPChannelProperties channelProps = (JCSMPChannelProperties) properties.getProperty(JCSMPProperties.CLIENT_CHANNEL_PROPERTIES); channelProps.setReconnectRetries(reconnectRetries); channelProps.setReconnectRetryWaitInMillis(reconnectRetryWaitInMillis); channelProps.setConnectTimeoutInMillis(connectTimeoutInMillis); channelProps.setConnectRetriesPerHost(connectRetriesPerHost); channelProps.setKeepAliveIntervalInMillis(keepAliveIntervalInMillis); properties.setProperty(JCSMPProperties.CLIENT_CHANNEL_PROPERTIES, channelProps); log.info("Connecting to Solace Message Router..."); topic = JCSMPFactory.onlyInstance().createTopic(solaceTopicName); session = JCSMPFactory.onlyInstance().createSession(properties); session.connect(); log.info("Connection succeeded!"); if (haSentinelQueueName != null) { haSentinel = new HASentinel(session, haSentinelQueueName); haSentinel.connect(); haSentinel.start(); } } }
package cz.zcu.kiv.jop.binding; import java.lang.annotation.Annotation; import java.lang.reflect.Constructor; import javax.inject.Singleton; import cz.zcu.kiv.jop.util.Preconditions; public class BindingImpl<T> implements Binding<T> { /** Class type of annotation for which was created this binding. */ protected final Class<? extends Annotation> annotation; /** Class type of object which was bound to annotation. */ protected Class<? extends T> type; /** Instance of object which was bound to annotation. */ protected transient T instance; /** Information whether the bound object will be singleton. */ protected boolean isSingleton; /** * Constructs new binding for given class type of annotation. * * @param annotation the class type of annotation. */ protected BindingImpl(Class<? extends Annotation> annotation) { this(annotation, null, false); } /** * Constructs new binding between given class type of annotation and class * type of object which was bound to annotation. * * @param annotation the class type of annotation. * @param type the class type of object which will be bound to annotation. */ public BindingImpl(Class<? extends Annotation> annotation, Class<? extends T> type) { this(annotation, type, false); } /** * Constructs new binding between given class type of annotation and class * type of object which was bound to annotation. Based on the argument * <code>isSingletion<code> the bound object will be singleton. * * @param annotation the class type of annotation. * @param type the class type of object which will be bound to annotation. * @param isSingleton information whether the bound object will be singleton. */ public BindingImpl(Class<? extends Annotation> annotation, Class<? extends T> type, boolean isSingleton) { this.annotation = Preconditions.checkArgumentNotNull(annotation, "Annotation type cannot be null"); this.type = type; this.isSingleton = isSingleton; } /** * Constructs new binding between given class type of annotation and instance * of object which was bound to annotation. The bound object will be * singleton. * * @param annotation the class type of annotation. * @param instance the instance of object which will be bound to annotation. */ @SuppressWarnings("unchecked") public BindingImpl(Class<? extends Annotation> annotation, T instance) { this(annotation, (Class<T>)instance.getClass(), true); this.instance = instance; } /** * {@inheritDoc} */ public Class<? extends Annotation> getAnnotation() { return annotation; } /** * {@inheritDoc} */ public Class<? extends T> getType() { return type; } /** * Sets given class type of object which was bound to annotation for which was * created this binding. * * @param type the class type of object which will be bound to annotation. */ void setType(Class<? extends T> type) { this.type = type; } /** * Sets given instance which was bound to annotation for which was created * this binding. * * @param instance the instance of object which will be bound to annotation. */ void setInstance(T instance) { this.instance = instance; this.isSingleton = true; } /** * Returns information whatever the bound type is annotated by * {@link Singleton} annotation - this type is always singleton. * * @return <code>true</code> if bound type is annotated by {@link Singleton} * annotation; <code>false</code> otherwise. */ final boolean isAnnotatedAsSingleton() { return (type != null && type.getAnnotation(Singleton.class) != null); } /** * {@inheritDoc} */ public boolean isSingleton() { return isSingleton || isAnnotatedAsSingleton(); } /** * Sets information whether the bound object will be singleton or each call of * {@link #getInstance()} method will return new instance. * * @param isSingleton information whether the bound object will be singleton. */ void setSingleton(boolean isSingleton) { this.isSingleton = isSingleton; } /** * Returns instance of bound object to annotation for which was created this * binding. The instance can be same or different for each call (depends on * {@link #isSingleton()} flag). This method suppress all exceptions thrown * during object instance creation because it serves to creation of eager * singletons. * * @return Instance of bound object to annotation for which was created this * binding or <code>null</code> in case of some error. */ protected T getInstanceQuietly() { try { return getInstance(); } catch (BindingException exc) { // quiet mode } return null; } /** * {@inheritDoc} */ public T getInstance() { if (instance == null) { if (type == null) { throw new BindingException("No class bound for annotation " + annotation.getName()); } Constructor<? extends T> constructor = null; try { constructor = type.getDeclaredConstructor(); } catch (Exception exc) { throw new BindingException("Cannot get declared parameterless constructor of " + type.getName()); } constructor.setAccessible(true); try { if (!isSingleton()) { return constructor.newInstance(); } instance = constructor.newInstance(); } catch (Exception exc) { throw new BindingException("Cannot create new instance of " + type.getName()); } } return instance; } /** * Returns a string representation of binding. * * @return String representation of binding. */ @Override public String toString() { //@formatter:off return getClass().getName() + " [annotation=" + (annotation == null ? null : annotation.getName()) + ", class = " + (type == null ? null : type.getName()) + "]"; //@formatter:on } }
package ch.ntb.usb.test; import ch.ntb.usb.LibusbJava; import ch.ntb.usb.UsbBus; import ch.ntb.usb.UsbConfigDescriptor; import ch.ntb.usb.UsbDevice; import ch.ntb.usb.UsbEndpointDescriptor; import ch.ntb.usb.UsbInterface; import ch.ntb.usb.UsbInterfaceDescriptor; /** * This class replicates the code from testlibusb.c supplied in the * libusb-0.1.12 release. */ public class TestLibUsbJava { static boolean verbose; /** * prints out endpoint info * * @param endpoint * The end point. */ private static void printEndpoint(UsbEndpointDescriptor endpoint) { System.out.print(String.format(" bEndpointAddress: %02xh\n", endpoint.getBEndpointAddress())); System.out.print(String.format(" bmAttributes: %02xh\n", endpoint.getBmAttributes())); System.out.print(String.format(" wMaxPacketSize: %d\n", endpoint .getWMaxPacketSize())); System.out.print(String.format(" bInterval: %d\n", endpoint .getBInterval())); System.out.print(String.format(" bRefresh: %d\n", endpoint .getBRefresh())); System.out.print(String.format(" bSynchAddress: %d\n", endpoint .getBSynchAddress())); } /** * prints out the interface descriptor * * @param interfaceDescript * The interface descriptor. */ private static void printAltsetting( UsbInterfaceDescriptor interfaceDescript) { System.out.print(String.format(" bInterfaceNumber: %d\n", interfaceDescript.getInterfaceNumber())); System.out.print(String.format(" bAlternateSetting: %d\n", interfaceDescript.getAlternateSetting())); System.out.print(String.format(" bNumEndpoints: %d\n", interfaceDescript.getNumEndpoints())); System.out.print(String.format(" bInterfaceClass: %d\n", interfaceDescript.getInterfaceClass())); System.out.print(String.format(" bInterfaceSubClass: %d\n", interfaceDescript.getInterfaceSubClass())); System.out.print(String.format(" bInterfaceProtocol: %d\n", interfaceDescript.getInterfaceProtocol())); System.out.print(String.format(" iInterface: %d\n", interfaceDescript.getInterface())); for (int i = 0; i < interfaceDescript.getNumEndpoints(); i++) { printEndpoint(interfaceDescript.getEndpoints()[i]); } } /** * prints out interface * * @param usbInterface * The interface. */ private static void printInterface(UsbInterface usbInterface) { for (int i = 0; i < usbInterface.getNumAlternateSetting(); i++) { printAltsetting(usbInterface.getAlternateSetting()[i]); } } /** * prints out configuration * * @param config * The configuration. */ private static void printConfiguration(UsbConfigDescriptor config) { System.out.print(String.format(" wTotalLength: %d\n", config .getTotalLength())); System.out.print(String.format(" bNumInterfaces: %d\n", config .getNumInterfaces())); System.out.print(String.format(" bConfigurationValue: %d\n", config .getConfigurationValue())); System.out.print(String.format(" iConfiguration: %d\n", config .getConfiguration())); System.out.print(String.format(" bmAttributes: %02xh\n", config.getAttributes())); System.out.print(String.format(" MaxPower: %d\n", config .getMaxPower())); for (int i = 0; i < config.getNumInterfaces(); i++) { printInterface(config.getInterfaces()[i]); } } private static int printDevice(UsbDevice dev, int level) { long udev; String mfr; String product; String sn; String spaces; String descript; spaces = " "; udev = LibusbJava.usb_open(dev); if (udev != 0) { if (dev.getDescriptor().getManufacturer() != 0) { mfr = LibusbJava.usb_get_string_simple(udev, dev .getDescriptor().getManufacturer()); if (mfr != null) { descript = String.format("%s - ", mfr); } else { descript = String.format("%04X - ", dev.getDescriptor() .getVendorId()); } } else { descript = String.format("%04X - ", dev.getDescriptor() .getVendorId()); } if (dev.getDescriptor().getProduct() != 0) { product = LibusbJava.usb_get_string_simple(udev, dev .getDescriptor().getProduct()); if (product != null) { descript = descript + String.format("%s", product); } else { descript = descript + String.format("%04X", dev.getDescriptor() .getProductId()); } } else { descript = descript + String.format("%04X", dev.getDescriptor() .getProductId()); } } else { descript = String.format("%04X - %04X", dev.getDescriptor() .getVendorId(), dev.getDescriptor().getProductId()); } System.out.print(String.format("%sDev #%d: %s\n", spaces.substring(0, level * 2), dev.getDevnum(), descript)); if ((udev != 0) && verbose) { if (dev.getDescriptor().getSerialNumber() != 0) { sn = LibusbJava.usb_get_string_simple(udev, dev.getDescriptor() .getSerialNumber()); if (sn != null) { System.out.print(String.format("%s - Serial Number: %s\n", spaces.substring(0, level * 2), sn)); } } } if (udev != 0) { LibusbJava.usb_close(udev); } if (verbose) { if (dev.getConfig().length == 0) { System.out.print(" Couldn't retrieve descriptors\n"); return 0; } for (int i = 0; i < dev.getDescriptor().getNumConfigurations(); i++) { printConfiguration(dev.getConfig()[i]); } } else { UsbDevice childDev = null; for (int i = 0; i < dev.getNumChildren(); i++) { if (i == 0) { childDev = dev.getChildren(); } else { childDev = childDev.getNext(); } printDevice(childDev, level + 1); } } return 0; } // end of printDevice method /** * The main method. * * @param args * The command line arguments. */ public static void main(String args[]) throws Exception { if ((args.length > 0) && (args[0].equals("-v"))) { verbose = true; } else { verbose = false; } // used for debugging. 0 = no debugging, 255 = with debugging LibusbJava.usb_set_debug(255); LibusbJava.usb_init(); LibusbJava.usb_find_busses(); LibusbJava.usb_find_devices(); for (UsbBus bus = LibusbJava.usb_get_busses(); bus != null; bus = bus .getNext()) { if ((bus.getRootDev() != null) && !verbose) { printDevice(bus.getRootDev(), 0); } else { for (UsbDevice dev = bus.getDevices(); dev != null; dev = dev .getNext()) { printDevice(dev, 0); } } } } // end main } // end of TestLibUsbJava class
package org.monarch.golr; import static com.google.common.collect.Collections2.transform; import static java.util.Collections.singleton; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import javax.inject.Inject; import org.apache.commons.lang3.ClassUtils; import org.mapdb.DB; import org.mapdb.DBMaker; import org.monarch.golr.beans.Closure; import org.monarch.golr.beans.GolrCypherQuery; import org.neo4j.graphdb.Direction; import org.neo4j.graphdb.GraphDatabaseService; import org.neo4j.graphdb.Label; import org.neo4j.graphdb.Node; import org.neo4j.graphdb.Path; import org.neo4j.graphdb.PropertyContainer; import org.neo4j.graphdb.Relationship; import org.neo4j.graphdb.RelationshipType; import org.neo4j.graphdb.Result; import org.neo4j.graphdb.Transaction; import org.neo4j.graphdb.traversal.Evaluators; import org.neo4j.graphdb.traversal.TraversalDescription; import org.neo4j.graphdb.traversal.Uniqueness; import org.prefixcommons.CurieUtil; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Charsets; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.HashMultimap; import com.google.common.collect.Multimap; import com.google.common.io.Resources; import io.scigraph.frames.CommonProperties; import io.scigraph.frames.NodeProperties; import io.scigraph.internal.CypherUtil; import io.scigraph.internal.GraphApi; import io.scigraph.internal.TinkerGraphUtil; import io.scigraph.neo4j.DirectedRelationshipType; import io.scigraph.neo4j.Graph; import io.scigraph.neo4j.GraphUtil; import io.scigraph.owlapi.OwlRelationships; public class GolrLoader { private static final String EVIDENCE_GRAPH = "evidence_graph"; private static final String EVIDENCE_FIELD = "evidence"; private static final String SOURCE_FIELD = "source"; private static final String EVIDENCE_OBJECT_FIELD = "evidence_object"; private static final String DEFINED_BY = "is_defined_by"; private final GraphDatabaseService graphDb; private final ResultSerializerFactory factory; private final EvidenceProcessor processor; private final Graph graph; private final CypherUtil cypherUtil; private final CurieUtil curieUtil; private final GraphApi api; private static final RelationshipType inTaxon = RelationshipType.withName("http://purl.obolibrary.org/obo/RO_0002162"); private static final RelationshipType derivesFrom = RelationshipType.withName("http://purl.obolibrary.org/obo/RO_0001000"); private static final RelationshipType derivesSeqFromGene = RelationshipType.withName("http://purl.obolibrary.org/obo/GENO_0000639"); private static final RelationshipType hasGenotype = RelationshipType.withName("http://purl.obolibrary.org/obo/GENO_0000222"); private static final String CHROMOSOME_TYPE = "http://purl.obolibrary.org/obo/SO_0000340"; private static final RelationshipType location = RelationshipType.withName("location"); private static final RelationshipType begin = RelationshipType.withName("begin"); private static final RelationshipType reference = RelationshipType.withName("reference"); private static final Label GENE_LABEL = Label.label("gene"); private static final Label VARIANT_LABEL = Label.label("sequence feature"); private static final Label GENOTYPE_LABEL = Label.label("genotype"); private Collection<RelationshipType> parts_of; private Collection<RelationshipType> subSequenceOfs; private Collection<RelationshipType> variants; private TraversalDescription taxonDescription; private TraversalDescription chromosomeDescription; private TraversalDescription diseaseDescription; private TraversalDescription orthologDescription; private TraversalDescription phenotypeDescription; private Collection<Node> chromsomeEntailment; private TraversalDescription geneDescription; private Collection<String> variantStrings; @Inject GolrLoader(GraphDatabaseService graphDb, Graph graph, CypherUtil cypherUtil, CurieUtil curieUtil, ResultSerializerFactory factory, EvidenceProcessor processor, GraphApi api) { this.graphDb = graphDb; this.cypherUtil = cypherUtil; this.curieUtil = curieUtil; this.graph = graph; this.factory = factory; this.processor = processor; this.api = api; try (Transaction tx = graphDb.beginTx()) { buildTraversals(); tx.success(); } } private void buildTraversals() { parts_of = cypherUtil.getEntailedRelationshipTypes( Collections.singleton("http://purl.obolibrary.org/obo/BFO_0000051")); subSequenceOfs = cypherUtil.getEntailedRelationshipTypes( Collections.singleton("http://purl.obolibrary.org/obo/RO_0002525")); variants = cypherUtil.getEntailedRelationshipTypes( Collections.singleton("http://purl.obolibrary.org/obo/GENO_0000418")); taxonDescription = graphDb.traversalDescription().breadthFirst() .relationships(OwlRelationships.OWL_EQUIVALENT_CLASS, Direction.BOTH) .relationships(OwlRelationships.OWL_SAME_AS, Direction.BOTH) .relationships(OwlRelationships.RDFS_SUBCLASS_OF, Direction.OUTGOING) .relationships(OwlRelationships.RDF_TYPE, Direction.OUTGOING) .relationships(inTaxon, Direction.OUTGOING).uniqueness(Uniqueness.RELATIONSHIP_GLOBAL); for (RelationshipType part_of : parts_of) { taxonDescription = taxonDescription.relationships(part_of, Direction.OUTGOING); } for (RelationshipType subSequenceOf : subSequenceOfs) { taxonDescription = taxonDescription.relationships(subSequenceOf, Direction.INCOMING); } for (RelationshipType variant : variants) { taxonDescription = taxonDescription.relationships(variant, Direction.OUTGOING); } taxonDescription = taxonDescription.relationships(hasGenotype, Direction.OUTGOING); taxonDescription = taxonDescription.relationships(derivesFrom, Direction.OUTGOING); chromosomeDescription = graphDb.traversalDescription().breadthFirst() .relationships(OwlRelationships.OWL_EQUIVALENT_CLASS, Direction.BOTH) .relationships(OwlRelationships.OWL_SAME_AS, Direction.BOTH) .relationships(OwlRelationships.RDFS_SUBCLASS_OF, Direction.OUTGOING) .relationships(OwlRelationships.RDF_TYPE, Direction.OUTGOING) .relationships(location, Direction.OUTGOING).relationships(begin, Direction.OUTGOING) .relationships(reference, Direction.OUTGOING); orthologDescription = graphDb.traversalDescription().breadthFirst() .relationships(RelationshipType.withName("http://purl.obolibrary.org/obo/RO_HOM0000017")) .relationships(RelationshipType.withName("http://purl.obolibrary.org/obo/RO_HOM0000020")) .evaluator(Evaluators.toDepth(1)); Optional<Long> nodeId = graph.getNode(CHROMOSOME_TYPE); if (!nodeId.isPresent()) { // TODO: Move all of this to some external configuration return; } Node chromsomeParent = graphDb.getNodeById(nodeId.get()); chromsomeEntailment = api.getEntailment(chromsomeParent, new DirectedRelationshipType(OwlRelationships.RDFS_SUBCLASS_OF, Direction.INCOMING), true); geneDescription = graphDb.traversalDescription().depthFirst() .relationships(OwlRelationships.OWL_SAME_AS, Direction.BOTH) .relationships(OwlRelationships.OWL_EQUIVALENT_CLASS, Direction.BOTH); for (RelationshipType part_of : parts_of) { geneDescription = geneDescription.relationships(part_of, Direction.OUTGOING); } for (RelationshipType variant : variants) { geneDescription = geneDescription.relationships(variant, Direction.OUTGOING); } geneDescription = geneDescription.relationships(derivesSeqFromGene, Direction.OUTGOING); geneDescription = geneDescription.relationships(hasGenotype, Direction.OUTGOING); geneDescription = geneDescription.relationships(derivesFrom, Direction.OUTGOING); variantStrings = transform(variants, new Function<RelationshipType, String>() { @Override public String apply(RelationshipType type) { return type.name(); } }); diseaseDescription = graphDb.traversalDescription().depthFirst() .relationships(RelationshipType.withName("http://purl.obolibrary.org/obo/RO_0002200"), Direction.OUTGOING) .relationships(RelationshipType.withName("http://purl.obolibrary.org/obo/RO_0002610"), Direction.OUTGOING) .relationships(RelationshipType.withName("http://purl.obolibrary.org/obo/RO_0002326"), Direction.OUTGOING) .evaluator(Evaluators.atDepth(1)); phenotypeDescription = graphDb.traversalDescription().depthFirst() .relationships(RelationshipType.withName("http://purl.obolibrary.org/obo/RO_0002200"), Direction.OUTGOING) .relationships(RelationshipType.withName("http://purl.obolibrary.org/obo/RO_0002610"), Direction.OUTGOING) .relationships(RelationshipType.withName("http://purl.obolibrary.org/obo/RO_0002326"), Direction.OUTGOING) .evaluator(Evaluators.fromDepth(1)).evaluator(Evaluators.toDepth(2)); } Optional<Node> getTaxon(Node source) { for (Path path : taxonDescription.traverse(source)) { if (path.length() > 0 && path.lastRelationship().isType(inTaxon)) { return Optional.of(path.endNode()); } } return Optional.absent(); } Optional<Node> getChromosome(Node source) { for (Path path : chromosomeDescription.traverse(source)) { if (path.length() > 0 && path.lastRelationship().isType(OwlRelationships.RDF_TYPE)) { if (chromsomeEntailment.contains(path.endNode())) { return Optional.of(path.lastRelationship().getOtherNode(path.endNode())); } } } return Optional.absent(); } // TODO return array of all found nodes // TODO and filter only cliqueLeaders Optional<Node> getGene(Node source) { for (Path path : geneDescription.traverse(source)) { if (path.endNode().hasLabel(GENE_LABEL)) { return Optional.of(path.endNode()); } } return Optional.absent(); } Collection<Node> getOrthologs(Node source) throws IOException { Collection<Node> orthologs = new HashSet<>(); for (Path path : orthologDescription.traverse(source)) { if (path.endNode().hasLabel(GENE_LABEL) && path.endNode() != source) { orthologs.add(path.endNode()); } } return orthologs; } Collection<Node> getDiseases(Node source) throws IOException { String cypher = Resources.toString(Resources.getResource("disease.cypher"), Charsets.UTF_8); Multimap<String, Object> params = HashMultimap.create(); params.put("id", source.getId()); Result result = cypherUtil.execute(cypher, params); Collection<Node> diseases = new HashSet<>(); while (result.hasNext()) { Map<String, Object> row = result.next(); diseases.add((Node) row.get("disease")); } return diseases; } Collection<Node> getPhenotypes(Node source) throws IOException { String cypher = Resources.toString(Resources.getResource("phenotype.cypher"), Charsets.UTF_8); Multimap<String, Object> params = HashMultimap.create(); params.put("id", source.getId()); Result result = cypherUtil.execute(cypher, params); Collection<Node> phenotypes = new HashSet<>(); while (result.hasNext()) { Map<String, Object> row = result.next(); phenotypes.add((Node) row.get("phenotype")); } return phenotypes; } LoadingCache<Node, Optional<Node>> taxonCache = CacheBuilder.newBuilder().maximumSize(100_000).build(new CacheLoader<Node, Optional<Node>>() { @Override public Optional<Node> load(Node source) throws Exception { return getTaxon(source); } }); LoadingCache<Node, Optional<Node>> chromosomeCache = CacheBuilder.newBuilder().maximumSize(100_000).build(new CacheLoader<Node, Optional<Node>>() { @Override public Optional<Node> load(Node source) throws Exception { return getChromosome(source); } }); LoadingCache<Node, Optional<Node>> geneCache = CacheBuilder.newBuilder().maximumSize(100_000).build(new CacheLoader<Node, Optional<Node>>() { @Override public Optional<Node> load(Node source) throws Exception { return getGene(source); } }); LoadingCache<Node, Collection<Node>> orthologCache = CacheBuilder.newBuilder() .maximumSize(100_000).build(new CacheLoader<Node, Collection<Node>>() { @Override public Collection<Node> load(Node source) throws Exception { return getOrthologs(source); } }); long process(GolrCypherQuery query, Writer writer) throws IOException, ExecutionException, ClassNotFoundException { return process(query, writer, Optional.absent()); } long process(GolrCypherQuery query, Writer writer, Optional<String> metaSourceQuery) throws IOException, ExecutionException, ClassNotFoundException { long recordCount = 0; try (Transaction tx = graphDb.beginTx()) { Result result = cypherUtil.execute(query.getQuery()); // Golr queries need to have the evidence graphs merged, whereas chromosome queries don't. boolean isGolrQuery = result.columns().contains("subject") && result.columns().contains("object"); if (isGolrQuery) { recordCount = serializeGolrQuery(query, result, writer, metaSourceQuery); } else { recordCount = serializedFeatureQuery(query, result, writer, metaSourceQuery); } tx.success(); } return recordCount; } private long serializeGolrQuery(GolrCypherQuery query, Result result, Writer writer, Optional<String> metaSourceQuery) throws IOException, ClassNotFoundException, ExecutionException { DB db = DBMaker.newTempFileDB().closeOnJvmShutdown().deleteFilesAfterClose() .transactionDisable().cacheSize(1000000).make(); ConcurrentMap<Pair<String, String>, String> resultsSerializable = db.createHashMap("results").make(); ConcurrentMap<Pair<String, String>, EvidenceGraphInfo> resultsGraph = db.createHashMap("graphs").make(); JsonGenerator generator = new JsonFactory().createGenerator(writer); ResultSerializer serializer = factory.create(generator); generator.writeStartArray(); int recordCount = 0; while (result.hasNext()) { recordCount++; Map<String, Object> row = result.next(); String subjectIri = (String) ((Node) row.get("subject")).getProperty(NodeProperties.IRI); String objectIri = (String) ((Node) row.get("object")).getProperty(NodeProperties.IRI); Pair<String, String> pair = new Pair<String, String>(subjectIri, objectIri); String existingResult = resultsSerializable.get(pair); if (existingResult == null) { Set<Long> ignoredNodes = new HashSet<>(); Writer stringWriter = new StringWriter(); JsonGenerator stringGenerator = new JsonFactory().createGenerator(stringWriter); ResultSerializer stringSerializer = factory.create(stringGenerator); boolean emitEvidence = true; TinkerGraphUtil tguEvidenceGraph = new TinkerGraphUtil(curieUtil); stringGenerator.writeStartObject(); serializerRow(row, stringSerializer, tguEvidenceGraph, ignoredNodes, query); stringGenerator.writeEndObject(); stringGenerator.close(); resultsSerializable.put(pair, stringWriter.toString()); resultsGraph.put(pair, new EvidenceGraphInfo(tguEvidenceGraph.getGraph(), emitEvidence, ignoredNodes)); } else { EvidenceGraphInfo pairGraph = resultsGraph.get(pair); TinkerGraphUtil tguEvidenceGraph = new TinkerGraphUtil(EvidenceGraphInfo.toGraph(pairGraph.graphBytes), curieUtil); Set<Long> ignoredNodes = pairGraph.ignoredNodes; for (Entry<String, Object> entry : row.entrySet()) { Object value = entry.getValue(); if (null == value) { continue; } // Add evidence if (value instanceof PropertyContainer) { tguEvidenceGraph.addElement((PropertyContainer) value); } else if (value instanceof Path) { tguEvidenceGraph.addPath((Path) value); } else if (value instanceof Node) { ignoredNodes.add(((Node) value).getId()); } } resultsGraph.put(pair, new EvidenceGraphInfo(tguEvidenceGraph.getGraph(), pairGraph.emitEvidence, ignoredNodes)); } } for (Entry<Pair<String, String>, String> resultSerializable : resultsSerializable.entrySet()) { generator.writeStartObject(); Pair<String, String> p = resultSerializable.getKey(); EvidenceGraphInfo pairGraph = resultsGraph.get(p); ObjectMapper mapper = new ObjectMapper(); Map<String, Object> existingJson = mapper.readValue(resultSerializable.getValue(), Map.class); for (Entry<String, Object> entry : existingJson.entrySet()) { serializer.serialize(entry.getKey(), entry.getValue()); } if (pairGraph != null) { com.tinkerpop.blueprints.Graph evidenceGraph = EvidenceGraphInfo.toGraph(pairGraph.graphBytes); processor.addAssociations(evidenceGraph); /*serializer.serialize(EVIDENCE_GRAPH, processor.getEvidenceGraph(evidenceGraph, metaSourceQuery));*/ // TODO: Hackish to remove evidence but the resulting JSON is blooming out of control // Don't emit evidence for ontology sources if (pairGraph.emitEvidence) { List<Closure> evidenceObjectClosure = processor.getEvidenceObject(evidenceGraph, pairGraph.ignoredNodes); serializer.writeQuint(EVIDENCE_OBJECT_FIELD, evidenceObjectClosure); List<Closure> evidenceClosure = processor.getEvidence(evidenceGraph); serializer.writeQuint(EVIDENCE_FIELD, evidenceClosure); List<Closure> sourceClosure = processor.getSource(evidenceGraph); serializer.writeQuint(SOURCE_FIELD, sourceClosure); serializer.writeArray(DEFINED_BY, processor.getDefinedBys(evidenceGraph)); } } else { System.out.println("No evidence graph"); } generator.writeEndObject(); generator.writeRaw('\n'); } generator.writeEndArray(); generator.close(); db.close(); return recordCount; } private long serializedFeatureQuery(GolrCypherQuery query, Result result, Writer writer, Optional<String> metaSourceQuery) throws IOException, ExecutionException { JsonGenerator generator = new JsonFactory().createGenerator(writer); ResultSerializer serializer = factory.create(generator); int recordCount = 0; generator.writeStartArray(); while (result.hasNext()) { generator.writeStartObject(); Set<Long> ignoredNodes = new HashSet<>(); TinkerGraphUtil tguEvidenceGraph = new TinkerGraphUtil(curieUtil); recordCount++; Map<String, Object> row = result.next(); serializerRow(row, serializer, tguEvidenceGraph, ignoredNodes, query); generator.writeEndObject(); generator.writeRaw('\n'); } generator.writeEndArray(); generator.close(); return recordCount; } private boolean serializerRow(Map<String, Object> row, ResultSerializer serializer, TinkerGraphUtil tguEvidenceGraph, Set<Long> ignoredNodes, GolrCypherQuery query) throws IOException, ExecutionException { boolean emitEvidence = true; for (Entry<String, Object> entry : row.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); if (null == value) { continue; } // Add evidence if (value instanceof PropertyContainer) { tguEvidenceGraph.addElement((PropertyContainer) value); } else if (value instanceof Path) { tguEvidenceGraph.addPath((Path) value); } else if (value instanceof Node) { ignoredNodes.add(((Node) value).getId()); } if (value instanceof Node) { // TODO: Clean this up if ("subject".equals(key) || "object".equals(key)) { Node node = (Node) value; Optional<Node> taxon = taxonCache.get(node); if (taxon.isPresent()) { serializer.serialize(key + "_taxon", taxon.get()); } if (node.hasLabel(GENE_LABEL) || node.hasLabel(VARIANT_LABEL) || node.hasLabel(GENOTYPE_LABEL)) { // Attempt to add gene and chromosome for monarch-initiative/monarch-app/#746 if (node.hasLabel(GENE_LABEL)) { serializer.serialize(key + "_gene", node); } else { Optional<Node> gene = geneCache.get(node); if (gene.isPresent()) { serializer.serialize(key + "_gene", gene.get()); } } Optional<Node> chromosome = chromosomeCache.get(node); if (chromosome.isPresent()) { serializer.serialize(key + "_chromosome", chromosome.get()); } } } if ("subject".equals(key)) { Collection<Node> orthologs = orthologCache.get((Node) value); Collection<String> orthologsId = transform(orthologs, new Function<Node, String>() { @Override public String apply(Node node) { String iri = GraphUtil.getProperty(node, NodeProperties.IRI, String.class).get(); return curieUtil.getCurie(iri).or(iri); } }); serializer.writeArray("subject_ortholog_closure", new ArrayList<String>(orthologsId)); } if ("feature".equals(key)) { // Add disease and phenotype for feature serializer.serialize("disease", getDiseases((Node) value)); serializer.serialize("phenotype", getPhenotypes((Node) value)); } if (query.getCollectedTypes().containsKey(key)) { serializer.serialize(key, singleton((Node) value), query.getCollectedTypes().get(key)); } else { serializer.serialize(key, value); } } else if (value instanceof Relationship) { String objectPropertyIri = GraphUtil.getProperty((Relationship) value, CommonProperties.IRI, String.class).get(); Node objectProperty = graphDb.getNodeById(graph.getNode(objectPropertyIri).get()); serializer.serialize(key, objectProperty); } else if (ClassUtils.isPrimitiveOrWrapper(value.getClass()) || value instanceof String) { // Serialize primitive types and Strings if ((key.equals("subject_category") || key.equals("object_category")) && value.equals("ontology")) { emitEvidence = false; } serializer.serialize(key, value); } } return emitEvidence; } }
package readers; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import org.junit.Test; import basics.Service; import basics.VehicleRoutingProblem; import basics.VehicleRoutingProblem.FleetSize; import basics.route.Vehicle; public class CordeauReaderTest { @Test public void testCordeauReader(){ VehicleRoutingProblem.Builder vrpBuilder = VehicleRoutingProblem.Builder.newInstance(); new CordeauReader(vrpBuilder).read(this.getClass().getClassLoader().getResource("p01").getPath()); vrpBuilder.build(); } @Test public void whenReadingInstance_fleetSizeIsFinite(){ VehicleRoutingProblem.Builder vrpBuilder = VehicleRoutingProblem.Builder.newInstance(); new CordeauReader(vrpBuilder).read(this.getClass().getClassLoader().getResource("p01").getPath()); VehicleRoutingProblem vrp = vrpBuilder.build(); assertEquals(FleetSize.FINITE, vrp.getFleetSize()); } @Test public void testNuOfVehicles(){ VehicleRoutingProblem.Builder vrpBuilder = VehicleRoutingProblem.Builder.newInstance(); new CordeauReader(vrpBuilder).read(this.getClass().getClassLoader().getResource("p01").getPath()); VehicleRoutingProblem vrp = vrpBuilder.build(); assertEquals(16,vrp.getVehicles().size()); } @Test public void whenReadingCordeauInstance_vehiclesHaveTheCorrectCapacity(){ VehicleRoutingProblem.Builder vrpBuilder = VehicleRoutingProblem.Builder.newInstance(); new CordeauReader(vrpBuilder).read(this.getClass().getClassLoader().getResource("p01").getPath()); VehicleRoutingProblem vrp = vrpBuilder.build(); for(Vehicle v : vrp.getVehicles()){ assertEquals(80, v.getCapacity()); } } @Test public void whenReadingCordeauInstance_vehiclesHaveTheCorrectDuration(){ VehicleRoutingProblem.Builder vrpBuilder = VehicleRoutingProblem.Builder.newInstance(); new CordeauReader(vrpBuilder).read(this.getClass().getClassLoader().getResource("p08").getPath()); VehicleRoutingProblem vrp = vrpBuilder.build(); for(Vehicle v : vrp.getVehicles()){ assertEquals(0.0,v.getEarliestDeparture(),0.1); assertEquals(310.0, v.getLatestArrival()-v.getEarliestDeparture(),0.1); } } @Test public void whenReadingCustomersCordeauInstance_customerOneShouldHaveCorrectCoordinates(){ VehicleRoutingProblem.Builder vrpBuilder = VehicleRoutingProblem.Builder.newInstance(); new CordeauReader(vrpBuilder).read(this.getClass().getClassLoader().getResource("p01").getPath()); VehicleRoutingProblem vrp = vrpBuilder.build(); Service service = (Service) vrp.getJobs().get("1"); assertEquals(37.0, service.getCoord().getX(), 0.1); assertEquals(52.0, service.getCoord().getY(), 0.1); } @Test public void whenReadingCustomersCordeauInstance_customerTwoShouldHaveCorrectServiceDuration(){ VehicleRoutingProblem.Builder vrpBuilder = VehicleRoutingProblem.Builder.newInstance(); new CordeauReader(vrpBuilder).read(this.getClass().getClassLoader().getResource("p01").getPath()); VehicleRoutingProblem vrp = vrpBuilder.build(); Service service = (Service) vrp.getJobs().get("2"); assertEquals(0.0, service.getServiceDuration(), 0.1); } @Test public void whenReadingCustomersCordeauInstance_customerThreeShouldHaveCorrectDemand(){ VehicleRoutingProblem.Builder vrpBuilder = VehicleRoutingProblem.Builder.newInstance(); new CordeauReader(vrpBuilder).read(this.getClass().getClassLoader().getResource("p01").getPath()); VehicleRoutingProblem vrp = vrpBuilder.build(); Service service = (Service) vrp.getJobs().get("3"); assertEquals(16.0, service.getCapacityDemand(), 0.1); } @Test public void whenReadingCustomersCordeauInstance_customerFortySevenShouldHaveCorrectDemand(){ VehicleRoutingProblem.Builder vrpBuilder = VehicleRoutingProblem.Builder.newInstance(); new CordeauReader(vrpBuilder).read(this.getClass().getClassLoader().getResource("p01").getPath()); VehicleRoutingProblem vrp = vrpBuilder.build(); Service service = (Service) vrp.getJobs().get("47"); assertEquals(25.0, service.getCapacityDemand(), 0.1); } @Test public void testLocationsAndCapOfVehicles(){ VehicleRoutingProblem.Builder vrpBuilder = VehicleRoutingProblem.Builder.newInstance(); new CordeauReader(vrpBuilder).read(this.getClass().getClassLoader().getResource("p01").getPath()); VehicleRoutingProblem vrp = vrpBuilder.build(); boolean capacityOk = true; boolean loc1ok = false; boolean loc2ok = false; boolean loc3ok = false; boolean loc4ok = false; for(Vehicle v : vrp.getVehicles()){ if(v.getCapacity() != 80) capacityOk = false; if(v.getCoord().getX() == 20.0 && v.getCoord().getY() == 20.0) loc1ok = true; if(v.getCoord().getX() == 30.0 && v.getCoord().getY() == 40.0) loc2ok = true; if(v.getCoord().getX() == 50.0 && v.getCoord().getY() == 30.0) loc3ok = true; if(v.getCoord().getX() == 60.0 && v.getCoord().getY() == 50.0) loc4ok = true; } assertTrue(capacityOk); assertTrue(loc1ok); assertTrue(loc2ok); assertTrue(loc3ok); assertTrue(loc4ok); } @Test public void testNuOfCustomers(){ VehicleRoutingProblem.Builder vrpBuilder = VehicleRoutingProblem.Builder.newInstance(); new CordeauReader(vrpBuilder).read(this.getClass().getClassLoader().getResource("p01").getPath()); VehicleRoutingProblem vrp = vrpBuilder.build(); assertEquals(50,vrp.getJobs().values().size()); } }
package org.mvel; import static org.mvel.DataConversion.canConvert; import org.mvel.ast.AssignmentNode; import org.mvel.ast.LiteralNode; import org.mvel.ast.Substatement; import org.mvel.util.ExecutionStack; import static org.mvel.util.ParseTools.containsCheck; import static org.mvel.util.ParseTools.doOperations; import org.mvel.util.PropertyTools; import org.mvel.util.Stack; import org.mvel.util.StringAppender; import static java.lang.Class.forName; import java.util.LinkedHashSet; import java.util.Set; import java.util.regex.Pattern; public class ExpressionCompiler extends AbstractParser { private final Stack stk = new ExecutionStack(); private Set<String> inputs; private Set<String> locals; private Class returnType; private boolean verifying = true; private ParserContext pCtx; public CompiledExpression compile() { return compile(new ParserContext()); } public CompiledExpression compile(ParserContext ctx) { if (parserContext == null) { parserContext = new ThreadLocal<ParserContext>(); } parserContext.set(ctx); return _compile(); } /** * Initiate an in-context compile. This method should really only be called by the internal API. * * @return compiled expression object */ public CompiledExpression _compile() { ASTNode tk; ASTNode tkOp; ASTNode tkOp2; ASTNode tkLA; ASTNode tkLA2; ASTLinkedList astLinkedList = new ASTLinkedList(); boolean firstLA; pCtx = getParserContext(); try { if (verifying) { inputs = new LinkedHashSet<String>(); locals = new LinkedHashSet<String>(); getParserContext().initializeVariableTable(); } fields |= ASTNode.COMPILE_IMMEDIATE; while ((tk = nextToken()) != null) { if (tk.fields == -1) { astLinkedList.addTokenNode(tk); continue; } returnType = tk.getEgressType(); if (pCtx.isStrictTypeEnforcement() && tk instanceof AssignmentNode && (pCtx.getInputTable() == null || !pCtx.getInputTable().containsKey(tk.getName()))) { addFatalError("untyped var not permitted in strict-mode: " + tk.getName()); } if (tk instanceof Substatement) { ExpressionCompiler subCompiler = new ExpressionCompiler(tk.getNameAsArray()); tk.setAccessor(subCompiler._compile()); if (verifying) inputs.addAll(subCompiler.getInputs()); } /** * This kludge of code is to handle _compile-time literal reduction. We need to avoid * reducing for certain literals like, 'this', ternary and ternary else. */ if (tk.isLiteral() && tk.getLiteralValue() != LITERALS.get("this")) { if ((tkOp = nextToken()) != null && tkOp.isOperator() && !tkOp.isOperator(Operator.TERNARY) && !tkOp.isOperator(Operator.TERNARY_ELSE)) { /** * If the next token is ALSO a literal, then we have a candidate for a _compile-time * reduction. */ if ((tkLA = nextToken()) != null && tkLA.isLiteral()) { stk.push(tk.getLiteralValue(), tkLA.getLiteralValue(), tkOp.getLiteralValue()); /** * Reduce the token now. */ reduceTrinary(); firstLA = true; /** * Now we need to check to see if this is actually a continuing reduction. */ while ((tkOp2 = nextToken()) != null) { if (!tkOp2.isOperator(tkOp.getOperator())) { /** * We can't continue any further because we are dealing with * different operators. */ astLinkedList.addTokenNode(new LiteralNode(stk.pop())); astLinkedList.addTokenNode(tkOp2); break; } else if ((tkLA2 = nextToken()) != null && tkLA2.isLiteral()) { stk.push(tkLA2.getLiteralValue(), tkOp2.getLiteralValue()); reduceTrinary(); firstLA = false; } else { if (firstLA) { /** * There are more tokens, but we can't reduce anymore. So * we create a reduced token for what we've got. */ astLinkedList.addTokenNode(new ASTNode(ASTNode.LITERAL, stk.pop())); } else { /** * We have reduced additional tokens, but we can't reduce * anymore. */ astLinkedList.addTokenNode(new ASTNode(ASTNode.LITERAL, stk.pop()), tkOp); if (tkLA2 != null) astLinkedList.addTokenNode(tkLA2); } break; } } /** * If there are no more tokens left to parse, we check to see if * we've been doing any reducing, and if so we create the token * now. */ if (!stk.isEmpty()) astLinkedList.addTokenNode(new ASTNode(ASTNode.LITERAL, stk.pop())); continue; } else { astLinkedList.addTokenNode(verify(pCtx, tk), verify(pCtx, tkOp)); if (tkLA != null) astLinkedList.addTokenNode(verify(pCtx, tkLA)); continue; } } else { astLinkedList.addTokenNode(verify(pCtx, tk)); if (tkOp != null) astLinkedList.addTokenNode(verify(pCtx, tkOp)); continue; } } astLinkedList.addTokenNode(verify(pCtx, tk)); } if (verifying) { for (String s : locals) { inputs.remove(s); } } if (pCtx.isFatalError()) { parserContext.remove(); throw new CompileException("Failed to _compile: " + pCtx.getErrorList().size() + " compilation error(s)", pCtx.getErrorList()); } else if (pCtx.isFatalError()) { parserContext.remove(); throw new CompileException("Failed to _compile: " + pCtx.getErrorList().size() + " compilation error(s)", pCtx.getErrorList()); } return new CompiledExpression(new ASTArrayList(astLinkedList), getCurrentSourceFileName()); } catch (Throwable e) { parserContext.remove(); if (e instanceof RuntimeException) throw (RuntimeException) e; else { throw new CompileException(e.getMessage(), e); } } } protected ASTNode verify(ParserContext pCtx, ASTNode tk) { if (tk.isDiscard() || (tk.fields & (ASTNode.OPERATOR | ASTNode.LITERAL)) != 0) return tk; if (verifying) { if (tk.isAssignment()) { char[] assign = tk.getNameAsArray(); int c = 0; while (c < assign.length && assign[c] != '=') c++; String varName = new String(assign, 0, c++).trim(); if (isReservedWord(varName)) { addFatalError("invalid assignment - variable name is a reserved keyword: " + varName); } locals.add(varName); ExpressionCompiler subCompiler = new ExpressionCompiler(new String(assign, c, assign.length - c).trim()); subCompiler._compile(); inputs.addAll(subCompiler.getInputs()); pCtx.addVariable(varName, tk.getEgressType()); } else if (tk.isIdentifier()) { inputs.add(tk.getAbsoluteName()); PropertyVerifier propVerifier = new PropertyVerifier(tk.getNameAsArray(), getParserContext()); returnType = propVerifier.analyze(); inputs.addAll(propVerifier.getInputs()); } } return tk; } /** * This method is called when we reach the point where we must subEval a trinary operation in the expression. * (ie. val1 op val2). This is not the same as a binary operation, although binary operations would appear * to have 3 structures as well. A binary structure (or also a junction in the expression) compares the * current state against 2 downrange structures (usually an op and a val). */ private void reduceTrinary() { Object v1 = null, v2 = null; Integer operator; try { while (stk.size() > 1) { operator = (Integer) stk.pop(); v1 = stk.pop(); v2 = stk.pop(); switch (operator) { case Operator.ADD: case Operator.SUB: case Operator.DIV: case Operator.MULT: case Operator.MOD: case Operator.EQUAL: case Operator.NEQUAL: case Operator.GTHAN: case Operator.LTHAN: case Operator.GETHAN: case Operator.LETHAN: case Operator.POWER: stk.push(doOperations(v2, operator, v1)); break; case Operator.AND: stk.push(((Boolean) v2) && ((Boolean) v1)); break; case Operator.OR: stk.push(((Boolean) v2) || ((Boolean) v1)); break; case Operator.CHOR: if (!PropertyTools.isEmpty(v2) || !PropertyTools.isEmpty(v1)) { stk.clear(); stk.push(!PropertyTools.isEmpty(v2) ? v2 : v1); return; } else stk.push(null); break; case Operator.REGEX: stk.push(Pattern.compile(String.valueOf(v1)).matcher(String.valueOf(v2)).matches()); break; case Operator.INSTANCEOF: if (v1 instanceof Class) stk.push(((Class) v1).isInstance(v2)); else stk.push(forName(String.valueOf(v1)).isInstance(v2)); break; case Operator.CONVERTABLE_TO: if (v1 instanceof Class) stk.push(canConvert(v2.getClass(), (Class) v1)); else stk.push(canConvert(v2.getClass(), forName(String.valueOf(v1)))); break; case Operator.CONTAINS: stk.push(containsCheck(v2, v1)); break; case Operator.BW_AND: stk.push(asInt(v2) & asInt(v1)); break; case Operator.BW_OR: stk.push(asInt(v2) | asInt(v1)); break; case Operator.BW_XOR: stk.push(asInt(v2) ^ asInt(v1)); break; case Operator.BW_SHIFT_LEFT: stk.push(asInt(v2) << asInt(v1)); break; case Operator.BW_USHIFT_LEFT: int iv2 = asInt(v2); if (iv2 < 0) iv2 *= -1; stk.push(iv2 << asInt(v1)); break; case Operator.BW_SHIFT_RIGHT: stk.push(asInt(v2) >> asInt(v1)); break; case Operator.BW_USHIFT_RIGHT: stk.push(asInt(v2) >>> asInt(v1)); break; case Operator.STR_APPEND: stk.push(new StringAppender(String.valueOf(v2)).append(String.valueOf(v1)).toString()); break; case Operator.SOUNDEX: stk.push(Soundex.soundex(String.valueOf(v1)).equals(Soundex.soundex(String.valueOf(v2)))); break; case Operator.SIMILARITY: stk.push(PropertyTools.similarity(String.valueOf(v1), String.valueOf(v2))); break; } } } catch (ClassCastException e) { if ((fields & ASTNode.LOOKAHEAD) == 0) { /** * This will allow for some developers who like messy expressions to compileAccessor * away with some messy constructs like: a + b < c && e + f > g + q instead * of using brackets like (a + b < c) && (e + f > g + q) */ fields |= ASTNode.LOOKAHEAD; ASTNode tk = nextToken(); if (tk != null) { stk.push(v1, nextToken(), tk.getOperator()); reduceTrinary(); return; } } throw new CompileException("syntax error or incomptable types (left=" + (v1 != null ? v1.getClass().getName() : "null") + ", right=" + (v2 != null ? v2.getClass().getName() : "null") + ")", expr, cursor, e); } catch (Exception e) { throw new CompileException("failed to subEval expression: <<" + new String(expr) + ">>", e); } } private static int asInt(final Object o) { return (Integer) o; } public Set<String> getInputs() { return inputs; } public Set<String> getLocals() { return locals; } public ExpressionCompiler(String expression) { setExpression(expression); } public ExpressionCompiler(char[] expression) { setExpression(expression); } public boolean isVerifying() { return verifying; } public void setVerifying(boolean verifying) { this.verifying = verifying; } public Class getReturnType() { return returnType; } public void setReturnType(Class returnType) { this.returnType = returnType; } public String getExpression() { return new String(expr); } public ParserContext getParserContextState() { return pCtx; } }
package com.timgroup.statsd; import jnr.unixsocket.UnixDatagramChannel; import java.io.IOException; import java.net.SocketAddress; import java.nio.ByteBuffer; import java.nio.channels.DatagramChannel; public class DatagramClientChannel implements ClientChannel { private final DatagramChannel delegate; private final String transport; private final SocketAddress address; /** * Creates a new DatagramClientChannel that wraps the delegate. * @param address Address to connect the channel to */ public DatagramClientChannel(DatagramChannel delegate, SocketAddress address) throws IOException { this.delegate = delegate; if (delegate instanceof UnixDatagramChannel) { transport = "uds"; } else { transport = "udp"; } this.address = address; } @Override public boolean isOpen() { return delegate.isOpen(); } @Override public int write(ByteBuffer src) throws IOException { return delegate.send(src, address); } @Override public void close() throws IOException { delegate.close(); } @Override public String getTransportType() { return transport; } }
package protocolsupportpocketstuff.packet; import org.bukkit.plugin.PluginManager; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import protocolsupport.api.Connection; import protocolsupport.api.Connection.PacketListener; import protocolsupport.protocol.serializer.VarNumberSerializer; import protocolsupportpocketstuff.ProtocolSupportPocketStuff; public abstract class PEPacket { public abstract int getPacketId(); public abstract void toData(Connection connection, ByteBuf serializer); public abstract void readFromClientData(Connection connection, ByteBuf clientData); public ByteBuf encode(Connection connection) { ByteBuf serializer = Unpooled.buffer(); VarNumberSerializer.writeVarInt(serializer, getPacketId()); serializer.writeByte(0); serializer.writeByte(0); toData(connection, serializer); return serializer; } public void decode(Connection connection, ByteBuf clientData) { clientData.readByte(); clientData.readByte(); readFromClientData(connection, clientData); } public abstract class decodeHandler extends PacketListener { protected ProtocolSupportPocketStuff plugin; protected Connection connection; protected PluginManager pm; public decodeHandler(ProtocolSupportPocketStuff plugin, Connection connection) { this.plugin = plugin; this.connection = connection; this.pm = plugin.getServer().getPluginManager(); } public void onRawPacketReceiving(RawPacketEvent e) { ByteBuf clientData = e.getData(); if(VarNumberSerializer.readVarInt(clientData) == PEPacket.this.getPacketId()) { PEPacket.this.decode(connection, clientData); handle(); } } public abstract void handle(); } }
package com.photon.phresco.commons.model; public class CoreOption { String techId; boolean core; public CoreOption(){ super(); } public CoreOption(String techId, boolean core) { super(); this.techId = techId; this.core = core; } public String getTechId() { return techId; } public void setTechId(String techId) { this.techId = techId; } public boolean isCore() { return core; } public void setCore(boolean core) { this.core = core; } }
package com.volumetricpixels.politics.group; import gnu.trove.iterator.TIntObjectIterator; import gnu.trove.map.TIntObjectMap; import gnu.trove.map.hash.TIntObjectHashMap; import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map.Entry; import java.util.Map; import java.util.Set; import org.bson.BSONObject; import org.bson.BasicBSONObject; import org.spout.api.Server; import org.spout.api.Spout; import org.spout.api.entity.Player; import com.volumetricpixels.politics.data.Storable; import com.volumetricpixels.politics.group.level.GroupLevel; import com.volumetricpixels.politics.group.level.Role; import com.volumetricpixels.politics.universe.Universe; import com.volumetricpixels.politics.universe.UniverseRules; /** * Represents a group of players. */ public final class Group implements Comparable<Group>, Storable { /** * The unique identifier of this group. This is unique for the entire * plugin. */ private final int uid; /** * The level of the group. */ private final GroupLevel level; /** * Properties of this group. */ private final TIntObjectMap<Object> properties; /** * The immediate players of this group. The keys are the players, and the * values are the player privileges. */ private final Map<String, Role> players; /** * The universe this group is part of. */ private Universe universe; /** * C'tor * * @param universe * @param level */ public Group(int uid, GroupLevel level) { this(uid, level, new TIntObjectHashMap<Object>(), new HashMap<String, Role>()); } /** * C'tor * * @param universe * @param level * @param properties * @param players */ private Group(int uid, GroupLevel level, TIntObjectMap<Object> properties, Map<String, Role> players) { this.uid = uid; this.level = level; this.properties = properties; this.players = players; } /** * Initializes the universe. * * @param universe */ public void initialize(Universe universe) { if (universe == null) { throw new IllegalStateException("Someone is trying to screw with the plugin!"); } this.universe = universe; } /** * Gets the universe of this Group. * * @return */ public Universe getUniverse() { return universe; } /** * Gets the UID of this Group. * * @return */ public int getUid() { return uid; } /** * Gets the groups composing this group. * * @return */ public Set<Group> getGroups() { return universe.getChildGroups(this); } /** * Adds the given group as a child of this group. * * @param group * @return True if the given group was able to be a child of the group. */ public boolean addChildGroup(Group group) { return universe.addChildGroup(this, group); } /** * Removes the given group from this group's children. * * @param group * @return * * @see Universe#removeChildGroup(Group, Group) */ public boolean removeChildGroup(Group group) { return universe.removeChildGroup(this, group); } /** * Gets the GroupLevel of this Group. * * @return */ public GroupLevel getLevel() { return level; } /** * Gets the value of a property. * * @param property * @return */ public Object getProperty(int property) { return properties.get(property); } /** * Gets a property as a String. * * @param property * @return */ public String getStringProperty(int property) { return getStringProperty(property, null); } /** * Gets a property as a String. * * @param property * @param def Default value * @return */ public String getStringProperty(int property, String def) { Object p = getProperty(property); if (p != null) { return p.toString(); } return def; } /** * Gets a property as an integer. * * @param property * @return */ public int getIntProperty(int property) { return getIntProperty(property, -1); } /** * Gets a property as an integer. * * @param property * @param def * @return */ public int getIntProperty(int property, int def) { try { return Integer.parseInt(getStringProperty(property)); } catch (NumberFormatException e) { return def; } } /** * Sets the value of a property. * * @param property * @param value */ public void setProperty(int property, Serializable value) { properties.put(property, value); } /** * Gets the immediate players part of this group. * * @return */ public List<String> getImmediatePlayers() { return new ArrayList<String>(players.keySet()); } /** * Gets the immediate online players part of this group. * * @return */ public List<Player> getImmediateOnlinePlayers() { List<Player> players = new ArrayList<Player>(); for (String pn : getImmediatePlayers()) { Player player = ((Server) Spout.getEngine()).getPlayer(pn, true); if (player != null) { players.add(player); } } return players; } /** * Gets all players part of this group. * * @return */ public List<String> getPlayers() { List<String> players = new ArrayList<String>(); for (Group group : getGroups()) { players.addAll(group.getPlayers()); } players.addAll(this.players.keySet()); return players; } /** * Returns true if the given player is an immediate member of this group. * * @param player * @return */ public boolean isImmediateMember(String player) { return players.containsKey(player); } /** * Checks if the given player is a member of this group or child groups. * * @param player * @return */ public boolean isMember(String player) { if (isImmediateMember(player)) { return true; } for (Group group : getGroups()) { if (group.isMember(player)) { return true; } } return false; } /** * Gets the role of the given player. * * @param player * @return */ public Role getRole(String player) { return players.get(player); } /** * Sets the role of the given player to the given role. * * @param player * @param role */ public void setRole(String player, Role role) { players.put(player, role); } /** * Removes the role of the given player from this group. * * @param player */ public void removeRole(String player) { players.remove(player); } @Override public int compareTo(Group o) { return getProperty(GroupProperty.TAG).toString().compareTo(o.getProperty(GroupProperty.TAG).toString()); } @Override public BasicBSONObject toBSONObject() { BasicBSONObject object = new BasicBSONObject(); object.put("uid", uid); object.put("level", level.getId()); final BasicBSONObject propertiesBson = new BasicBSONObject(); TIntObjectIterator<Object> pit = properties.iterator(); while (pit.hasNext()) { pit.advance(); propertiesBson.put(Integer.toHexString(pit.key()), pit.value()); } object.put("properties", propertiesBson); final BasicBSONObject playersBson = new BasicBSONObject(); for (Entry<String, Role> roleEntry : players.entrySet()) { playersBson.put(roleEntry.getKey(), roleEntry.getValue().getId()); } object.put("players", playersBson); return object; } /** * Gets the Group from the given BSONObject. * * @param rules * @param object * @return */ public static Group fromBSONObject(UniverseRules rules, BSONObject object) { if (!(object instanceof BasicBSONObject)) { throw new IllegalStateException("object is not a BasicBsonObject! ERROR ERROR ERROR!"); } BasicBSONObject bobject = (BasicBSONObject) object; int uid = bobject.getInt("uid"); String levelName = bobject.getString("level"); GroupLevel level = rules.getGroupLevel(levelName); if (level == null) { throw new IllegalStateException("Unknown level type '" + level + "'! (Did the universe rules change?)"); } // Properties Object propertiesObj = bobject.get("properties"); if (!(propertiesObj instanceof BasicBSONObject)) { throw new IllegalStateException("WTF you screwed up the properties! CORRUPT!"); } BasicBSONObject propertiesBson = (BasicBSONObject) propertiesObj; TIntObjectMap<Object> properties = new TIntObjectHashMap<Object>(); for (Entry<String, Object> entry : propertiesBson.entrySet()) { int realKey = Integer.valueOf(entry.getKey(), 16); Object value = entry.getValue(); properties.put(realKey, value); } // Players Object playersObj = bobject.get("players"); if (!(playersObj instanceof BasicBSONObject)) { throw new IllegalStateException("Stupid server admin... don't mess with the data!"); } BasicBSONObject playersBson = (BasicBSONObject) playersObj; Map<String, Role> players = new HashMap<String, Role>(); for (Entry<String, Object> entry : playersBson.entrySet()) { String roleId = entry.getValue().toString(); Role role = level.getRole(roleId); players.put(entry.getKey(), role); } return new Group(uid, level, properties, players); } }
import com.google.gson.Gson; import com.researchworx.cresco.library.messaging.MsgEvent; import com.researchworx.cresco.library.plugin.core.CPlugin; import java.util.Map; import java.util.Timer; import java.util.TimerTask; class PerfMonitor { private CPlugin plugin; private Timer timer; private boolean running = false; private DockerEngine de; private String container_id; private Gson gson; PerfMonitor(CPlugin plugin, DockerEngine de, String container_id) { this.plugin = plugin; this.de = de; this.container_id = container_id; gson = new Gson(); } PerfMonitor start() { if (this.running) return this; Long interval = plugin.getConfig().getLongParam("perftimer", 5000L); MsgEvent initial = new MsgEvent(MsgEvent.Type.INFO, plugin.getRegion(), plugin.getAgent(), plugin.getPluginID(), "Performance Monitoring timer set to " + interval + " milliseconds."); initial.setParam("src_region", plugin.getRegion()); initial.setParam("src_agent", plugin.getAgent()); initial.setParam("src_plugin", plugin.getPluginID()); initial.setParam("dst_region", plugin.getRegion()); initial.setParam("dst_agent", plugin.getAgent()); initial.setParam("dst_plugin", "plugin/0"); plugin.sendMsgEvent(initial); timer = new Timer(); timer.scheduleAtFixedRate(new PerfMonitorTask(plugin), 500, interval); return this; } PerfMonitor restart() { if (running) timer.cancel(); running = false; return start(); } void stop() { timer.cancel(); running = false; } private class PerfMonitorTask extends TimerTask { private CPlugin plugin; PerfMonitorTask(CPlugin plugin) { this.plugin = plugin; } public void run() { MsgEvent tick = new MsgEvent(MsgEvent.Type.KPI, plugin.getRegion(), plugin.getAgent(), plugin.getPluginID(), "Performance Monitoring tick."); tick.setParam("src_region", plugin.getRegion()); tick.setParam("src_agent", plugin.getAgent()); tick.setParam("src_plugin", plugin.getPluginID()); tick.setParam("dst_region", plugin.getRegion()); tick.setParam("dst_agent", plugin.getAgent()); tick.setParam("dst_plugin", "plugin/0"); tick.setParam("is_regional", Boolean.TRUE.toString()); tick.setParam("is_global", Boolean.TRUE.toString()); tick.setParam("resource_id", plugin.getConfig().getStringParam("resource_id")); tick.setParam("inode_id", plugin.getConfig().getStringParam("inode_id")); ResourceMetric rm = de.getResourceMetric(container_id); String resourceMetricJSON = gson.toJson(rm); tick.setParam("resource_metric", resourceMetricJSON); String perfInfo = de.getContainerInfoMap(); tick.setCompressedParam("perf",perfInfo); plugin.sendMsgEvent(tick); /* MsgEvent tick = new MsgEvent(MsgEvent.Type.KPI, plugin.getRegion(), plugin.getAgent(), plugin.getPluginID(), "Performance Monitoring tick."); tick.setParam("src_region", plugin.getRegion()); tick.setParam("src_agent", plugin.getAgent()); tick.setParam("src_plugin", plugin.getPluginID()); tick.setParam("dst_region", plugin.getRegion()); tick.setParam("dst_agent", plugin.getAgent()); tick.setParam("dst_plugin", "plugin/0"); tick.setParam("is_regional",Boolean.TRUE.toString()); tick.setParam("is_global",Boolean.TRUE.toString()); tick.setParam("resource_id",plugin.getConfig().getStringParam("resource_id","container_resource")); tick.setParam("inode_id",plugin.getConfig().getStringParam("inode_id","container_inode")); tick.setParam("container_image",de.containerImage); ResourceMetric rm = de.getResourceMetric(container_id); String resourceMetricJSON = gson.toJson(rm); tick.setParam("resource_metric", resourceMetricJSON); String perfInfo = de.getContainerInfoMap(); tick.setCompressedParam("perf",perfInfo); */ /* plugin.sendMsgEvent(tick); //double send required to set container resource and get stats... needs to be fixed tick.setParam("resource_id",plugin.getConfig().getStringParam("resource_id","container_resource")); tick.setParam("inode_id",plugin.getConfig().getStringParam("inode_id","container_inode")); */ //plugin.sendMsgEvent(tick); } } }
package com.pingidentity.developer.pingid; import org.apache.commons.io.IOUtils; import org.jose4j.base64url.Base64; import org.jose4j.jws.AlgorithmIdentifiers; import org.jose4j.jws.JsonWebSignature; import org.jose4j.keys.HmacKey; import org.jose4j.lang.JoseException; import org.json.simple.JSONObject; import org.json.simple.parser.JSONParser; import java.io.InputStream; import java.io.OutputStreamWriter; import java.net.HttpURLConnection; import java.net.URL; import java.text.SimpleDateFormat; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.TimeZone; public class Operation { private String name; private String endpoint; private String requestToken; private String responseToken; private int responseCode; private Boolean wasSuccessful; private long errorId; private String errorMsg; private String uniqueMsgId; private String idpUrl; private String orgAlias; private String token; private String useBase64Key; private String lastActivationCode; private String lastSessionId; private Map<String, Object> values; private String clientData; private User user; private final String apiVersion = "4.6"; public Operation(String orgAlias, String token, String useBase64Key, String pingidUrl) { this.orgAlias = orgAlias; this.token = token; this.useBase64Key = useBase64Key; this.idpUrl = pingidUrl; this.values = new HashMap<String, Object>(); } public String getName() { return name; } public String getEndpoint() { return endpoint; } public String getRequestToken() { return requestToken; } public String getResponseToken() { return responseToken; } public int getResponseCode() { return responseCode; } public Boolean getWasSuccessful() { return wasSuccessful; } public String getLastActivationCode() { return this.lastActivationCode; } public String getLastSessionId() { return this.lastSessionId; } public long getErrorId() { return errorId; } public String getErrorMsg() { return errorMsg; } public String getUniqueMsgId() { return uniqueMsgId; } public Map<String, Object> getReturnValues() { return values; } public User getUser() { return user; } public void setTargetUser(User user) { this.user = user; } public void setTargetUser(String username) { this.user = new User(username); } public void setLastActivationCode(String activationCode) { this.lastActivationCode = activationCode; } public void setLastSessionId(String sessionId) { this.lastSessionId = sessionId; } // public methods @SuppressWarnings("unchecked") public void AddUser(Boolean activateUser) { this.name = "AddUser"; this.endpoint = idpUrl + "/rest/4/adduser/do"; JSONObject reqBody = new JSONObject(); reqBody.put("activateUser", activateUser); reqBody.put("email", this.user.getEmail()); reqBody.put("fName", this.user.getFirstName()); reqBody.put("lname", this.user.getLastName()); reqBody.put("username", this.user.getUserName()); reqBody.put("role", this.user.getRole().getValue()); reqBody.put("clientData", this.clientData); this.requestToken = buildRequestToken(reqBody); sendRequest(); JSONObject response = parseResponse(); values.clear(); if (activateUser) { this.lastActivationCode = (String)response.get("activationCode"); } } @SuppressWarnings("unchecked") public void EditUser(Boolean activateUser) { this.name = "EditUser"; this.endpoint = idpUrl + "/rest/4/edituser/do"; JSONObject reqBody = new JSONObject(); reqBody.put("activateUser", activateUser); reqBody.put("email", this.user.getEmail()); reqBody.put("fName", this.user.getFirstName()); reqBody.put("lName", this.user.getLastName()); reqBody.put("userName", this.user.getUserName()); reqBody.put("role", this.user.getRole().getValue()); reqBody.put("clientData", this.clientData); this.requestToken = buildRequestToken(reqBody); sendRequest(); JSONObject response = parseResponse(); values.clear(); if (activateUser) { this.lastActivationCode = (String)response.get("activationCode"); } } @SuppressWarnings("unchecked") public void GetUserDetails() { this.name = "GetUserDetails"; this.endpoint = idpUrl + "/rest/4/getuserdetails/do"; JSONObject reqBody = new JSONObject(); reqBody.put("getSameDeviceUsers", false); reqBody.put("userName", this.user.getUserName()); reqBody.put("clientData", this.clientData); this.requestToken = buildRequestToken(reqBody); sendRequest(); values.clear(); JSONObject response = parseResponse(); JSONObject userDetails = (JSONObject)response.get("userDetails"); this.user = new User(userDetails); DeviceDetails deviceDetails = new DeviceDetails((JSONObject)userDetails.get("deviceDetails")); this.user.setDeviceDetails(deviceDetails); } @SuppressWarnings("unchecked") public void DeleteUser() { this.name = "DeleteUser"; this.endpoint = idpUrl + "/rest/4/deleteuser/do"; JSONObject reqBody = new JSONObject(); reqBody.put("userName", this.user.getUserName()); reqBody.put("clientData", this.clientData); this.requestToken = buildRequestToken(reqBody); sendRequest(); parseResponse(); values.clear(); } @SuppressWarnings("unchecked") public void SuspendUser() { this.name = "SuspendUser"; this.endpoint = idpUrl + "/rest/4/suspenduser/do"; JSONObject reqBody = new JSONObject(); reqBody.put("userName", this.user.getUserName()); reqBody.put("clientData", this.clientData); this.requestToken = buildRequestToken(reqBody); sendRequest(); parseResponse(); values.clear(); } @SuppressWarnings("unchecked") public void ActivateUser() { this.name = "ActivateUser"; this.endpoint = idpUrl + "/rest/4/activateuser/do"; JSONObject reqBody = new JSONObject(); reqBody.put("userName", this.user.getUserName()); reqBody.put("clientData", this.clientData); this.requestToken = buildRequestToken(reqBody); sendRequest(); parseResponse(); values.clear(); } @SuppressWarnings("unchecked") public void ToggleUserBypass(long until) { this.name = "ToggleUserBypass"; this.endpoint = idpUrl + "/rest/4/userbypass/do"; JSONObject reqBody = new JSONObject(); reqBody.put("userName", this.user.getUserName()); reqBody.put("bypassUntil", (until != 0) ? until : null); reqBody.put("clientData", this.clientData); this.requestToken = buildRequestToken(reqBody); sendRequest(); parseResponse(); values.clear(); } @SuppressWarnings("unchecked") public void UnpairDevice() { this.name = "UnpairDevice"; this.endpoint = idpUrl + "/rest/4/unpairdevice/do"; JSONObject reqBody = new JSONObject(); reqBody.put("userName", this.user.getUserName()); reqBody.put("clientData", this.clientData); this.requestToken = buildRequestToken(reqBody); sendRequest(); parseResponse(); values.clear(); } @SuppressWarnings("unchecked") public void GetPairingStatus(String activationCode) { this.name = "GetPairingStatus"; this.endpoint = idpUrl + "/rest/4/pairingstatus/do"; JSONObject reqBody = new JSONObject(); reqBody.put("activationCode", activationCode); reqBody.put("clientData", this.clientData); this.requestToken = buildRequestToken(reqBody); sendRequest(); JSONObject response = parseResponse(); values.clear(); values.put("pairingStatus", (PairingStatus) PairingStatus.valueOf((String)response.get("pairingStatus"))); } @SuppressWarnings("unchecked") public void PairYubiKey(String otp) { this.name = "PairYubiKey"; this.endpoint = idpUrl + "/rest/4/pairyubikey/do"; JSONObject reqBody = new JSONObject(); reqBody.put("otp", otp); reqBody.put("username", this.user.getUserName()); reqBody.put("clientData", this.clientData); this.requestToken = buildRequestToken(reqBody); sendRequest(); parseResponse(); values.clear(); } @SuppressWarnings("unchecked") public void StartOfflinePairing(OfflinePairingMethod pairingMethod) { this.name = "StartOfflinePairing"; this.endpoint = idpUrl + "/rest/4/startofflinepairing/do"; JSONObject reqBody = new JSONObject(); if (pairingMethod == OfflinePairingMethod.SMS) { reqBody.put("type", OfflinePairingMethod.SMS.getValue()); reqBody.put("pairingData", this.user.getPhoneNumber()); } else if (pairingMethod == OfflinePairingMethod.VOICE) { reqBody.put("type", OfflinePairingMethod.VOICE.getValue()); reqBody.put("pairingData", this.user.getPhoneNumber()); } else if (pairingMethod == OfflinePairingMethod.EMAIL) { reqBody.put("type", OfflinePairingMethod.EMAIL.getValue()); reqBody.put("pairingData", this.user.getEmail()); } reqBody.put("username", this.user.getUserName()); reqBody.put("clientData", this.clientData); this.requestToken = buildRequestToken(reqBody); sendRequest(); JSONObject response = parseResponse(); values.clear(); this.lastSessionId = (String)response.get("sessionId"); } @SuppressWarnings("unchecked") public void FinalizeOfflinePairing(String sessionId, String otp) { this.name = "FinalizeOfflinePairing"; this.endpoint = idpUrl + "/rest/4/finalizeofflinepairing/do"; JSONObject reqBody = new JSONObject(); reqBody.put("otp", otp); reqBody.put("sessionId", sessionId); reqBody.put("clientData", this.clientData); this.requestToken = buildRequestToken(reqBody); sendRequest(); parseResponse(); values.clear(); } @SuppressWarnings("unchecked") public void GetActivationCode() { this.name = "GetActivationCode"; this.endpoint = idpUrl + "/rest/4/getactivationcode/do"; JSONObject reqBody = new JSONObject(); reqBody.put("userName", this.user.getUserName()); reqBody.put("clientData", this.clientData); this.requestToken = buildRequestToken(reqBody); sendRequest(); JSONObject response = parseResponse(); values.clear(); this.lastActivationCode = (String)response.get("activationCode"); } @SuppressWarnings("unchecked") public void AuthenticateOnline(Application application, String authType) { this.name = "AuthenticateOnline"; this.endpoint = idpUrl + "/rest/4/authonline/do"; JSONObject reqBody = new JSONObject(); reqBody.put("authType", authType); reqBody.put("spAlias", application.getSpAlias()); reqBody.put("userName", this.user.getUserName()); reqBody.put("clientData", this.clientData); JSONObject formParameters = new JSONObject(); formParameters.put("sp_name", application.getName()); if (application.getLogoUrl() != null && !application.getLogoUrl().isEmpty()) { formParameters.put("sp_logo", application.getLogoUrl()); } reqBody.put("formParameters", formParameters); this.requestToken = buildRequestToken(reqBody); sendRequest(); JSONObject response = parseResponse(); if (this.wasSuccessful) { values.clear(); this.lastSessionId = (String)response.get("sessionId"); } } @SuppressWarnings("unchecked") public void AuthenticateOffline(String sessionId, String otp) { this.name = "AuthenticateOffline"; this.endpoint = idpUrl + "/rest/4/authoffline/do"; JSONObject reqBody = new JSONObject(); reqBody.put("spAlias", "web"); reqBody.put("otp", otp); reqBody.put("userName", this.user.getUserName()); reqBody.put("sessionId", sessionId); reqBody.put("clientData", this.clientData); this.requestToken = buildRequestToken(reqBody); sendRequest(); parseResponse(); values.clear(); } @SuppressWarnings("unchecked") public void createJob(JobType jobType) { this.name = "CreateJob"; this.endpoint = idpUrl + "/rest/4/createjob/do"; JSONObject reqBody = new JSONObject(); reqBody.put("jobType", jobType.toString()); reqBody.put("clientData", this.clientData); this.requestToken = buildRequestToken(reqBody); sendRequest(); parseResponse(); values.clear(); } @SuppressWarnings("unchecked") public void getJobStatus(String jobToken) { this.name = "GetJobStatus"; this.endpoint = idpUrl + "/rest/4/getjobstatus/do"; JSONObject reqBody = new JSONObject(); reqBody.put("jobToken", jobToken); reqBody.put("clientData", this.clientData); this.requestToken = buildRequestToken(reqBody); sendRequest(); parseResponse(); values.clear(); } @SuppressWarnings("unchecked") public InputStream downloadUserReport(FileType fileType) { this.name = "DownloadUserReport"; this.endpoint = idpUrl + "/rest/4/getorgreport/do"; JSONObject reqBody = new JSONObject(); reqBody.put("fileType", fileType.toString()); reqBody.put("clientData", this.clientData); this.requestToken = buildRequestToken(reqBody); return sendRequestAndGetInputStream(); } //private methods @SuppressWarnings("unchecked") private String buildRequestToken(JSONObject requestBody) { JSONObject requestHeader = buildRequestHeader(); JSONObject payload = new JSONObject(); payload.put("reqHeader", requestHeader); payload.put("reqBody", requestBody); JsonWebSignature jws = new JsonWebSignature(); jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.HMAC_SHA256); jws.setHeader("org_alias", this.orgAlias); jws.setHeader("token", this.token); jws.setPayload(payload.toJSONString()); // Set the verification key HmacKey key = new HmacKey(Base64.decode(this.useBase64Key)); jws.setKey(key); String jwsCompactSerialization = null; try { jwsCompactSerialization = jws.getCompactSerialization(); } catch (JoseException e) { e.printStackTrace(); } this.requestToken = jwsCompactSerialization; return jwsCompactSerialization; } @SuppressWarnings("unchecked") private JSONObject buildRequestHeader() { JSONObject reqHeader = new JSONObject(); reqHeader.put("locale", "en"); reqHeader.put("orgAlias", this.orgAlias); reqHeader.put("secretKey", this.token); reqHeader.put("timestamp", getCurrentTimeStamp()); reqHeader.put("version", this.apiVersion); return reqHeader; } static String getCurrentTimeStamp() { Date currentDate = new Date(); SimpleDateFormat PingIDDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); PingIDDateFormat.setTimeZone(TimeZone.getTimeZone("America/Denver")); return PingIDDateFormat.format(currentDate); } private InputStream sendRequestAndGetInputStream() { try { URL restUrl = new URL(this.getEndpoint()); HttpURLConnection urlConnection = (HttpURLConnection)restUrl.openConnection(); urlConnection.setRequestMethod("POST"); urlConnection.addRequestProperty("Content-Type", "application/json"); urlConnection.addRequestProperty("Accept", "application/octet-stream"); urlConnection.setDoOutput(true); OutputStreamWriter outputStreamWriter = new OutputStreamWriter(urlConnection.getOutputStream(), "UTF-8"); outputStreamWriter.write(this.getRequestToken()); outputStreamWriter.flush(); outputStreamWriter.close(); int responseCode = urlConnection.getResponseCode(); this.responseCode = responseCode; if (responseCode == 200) { InputStream is = urlConnection.getInputStream(); this.wasSuccessful = true; return is; } else { String encoding = urlConnection.getContentEncoding(); InputStream is = urlConnection.getErrorStream(); String stringJWS = IOUtils.toString(is, encoding); this.responseToken = stringJWS; this.wasSuccessful = false; urlConnection.disconnect(); } } catch (Exception ex) { this.responseCode = 500; this.wasSuccessful = false; } return null; } private void sendRequest() { try { URL restUrl = new URL(this.getEndpoint()); HttpURLConnection urlConnection = (HttpURLConnection)restUrl.openConnection(); urlConnection.setRequestMethod("POST"); urlConnection.addRequestProperty("Content-Type", "application/json"); urlConnection.addRequestProperty("Accept", "*/*"); urlConnection.setDoOutput(true); OutputStreamWriter outputStreamWriter = new OutputStreamWriter(urlConnection.getOutputStream(), "UTF-8"); outputStreamWriter.write(this.getRequestToken()); outputStreamWriter.flush(); outputStreamWriter.close(); int responseCode = urlConnection.getResponseCode(); this.responseCode = responseCode; if (responseCode == 200) { String encoding = urlConnection.getContentEncoding(); InputStream is = urlConnection.getInputStream(); String stringJWS = IOUtils.toString(is, encoding); this.responseToken = stringJWS; this.wasSuccessful = true; urlConnection.disconnect(); } else { String encoding = urlConnection.getContentEncoding(); InputStream is = urlConnection.getErrorStream(); String stringJWS = IOUtils.toString(is, encoding); this.responseToken = stringJWS; this.wasSuccessful = false; urlConnection.disconnect(); } } catch (Exception ex) { this.responseCode = 500; this.wasSuccessful = false; } } private JSONObject parseResponse() { JSONParser parser = new JSONParser(); JSONObject responsePayloadJSON = null; try { JsonWebSignature responseJWS = new JsonWebSignature(); responseJWS.setCompactSerialization(this.responseToken); HmacKey key = new HmacKey(Base64.decode(this.useBase64Key)); responseJWS.setKey(key); responsePayloadJSON = (JSONObject)parser.parse(responseJWS.getPayload()); // workaround for PingID API 4.5 beta if (responsePayloadJSON.containsKey("responseBody")) { responsePayloadJSON = (JSONObject)responsePayloadJSON.get("responseBody"); } } catch (Exception e) { e.printStackTrace(); } if (responsePayloadJSON != null) { this.errorId = (long)responsePayloadJSON.get("errorId"); this.errorMsg = (String)responsePayloadJSON.get("errorMsg"); this.uniqueMsgId = (String)responsePayloadJSON.get("uniqueMsgId"); this.clientData = (String)responsePayloadJSON.get("clientData"); } else { this.errorId = 501; this.errorMsg = "Could not parse JWS"; this.uniqueMsgId = ""; this.clientData = ""; this.wasSuccessful = false; } return responsePayloadJSON; } }
package com.vranec.minimax; public class ArtificialIntelligence { public BestMove negamax(Board board, int depth, Color color) { return negamax(board, depth, -Integer.MAX_VALUE, Integer.MAX_VALUE, color); } /** * * @param board * Current board state. * @param depth * Depth to search in. * @param color * Who is on the move. * @return */ public BestMove negamax(Board board, int depth, int alpha, int beta, Color color) { if (depth == 0 || board.isGameOver()) { return new BestMove(board.getBoardValue(color)); } BestMove bestMove = new BestMove(-Integer.MAX_VALUE); for (Board nextBoard : board.getNextBoards(color)) { BestMove nextBestMove = negamax(nextBoard, depth - 1, -beta, -alpha, color.getOtherColor()); int val = -nextBestMove.getValue(); if (val > bestMove.getValue()) { bestMove = nextBestMove; bestMove.setValue(val); bestMove.setBestBoard(nextBoard); alpha = Math.max(alpha, val); } if (alpha >= beta) { break; } } return bestMove; } }
package org.languagetool; import lombok.extern.slf4j.Slf4j; import org.languagetool.markup.AnnotatedText; import org.languagetool.rules.Rule; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Constructor; import java.util.*; @Slf4j /** * Information about premium-only rules. */ public abstract class Premium { private Optional<Properties> gitPremiumProps; public Premium() { try { InputStream in = JLanguageTool.getDataBroker().getAsStream("/git-premium.properties"); if (in != null) { Properties props = new Properties(); props.load(in); gitPremiumProps = Optional.of(props); } else { gitPremiumProps = Optional.empty(); } } catch (IOException e) { log.warn("Failed to read git-premium.properties file.", e); } } private static final List<String> tempNotPremiumRules = Arrays.asList(); public static boolean isTempNotPremium(Rule rule) { return tempNotPremiumRules.contains(rule.getId()); } public static boolean isPremiumStatusCheck(AnnotatedText text) { final String testRuleText = "languagetool testrule 8634756"; final String testRuleText2 = "The languagetool testrule 8634756."; return testRuleText2.equals(text.getOriginalText()) || testRuleText.equals(text.getOriginalText()); } public static Premium get() { String className = "org.languagetool.PremiumOn"; try { Class<?> aClass = JLanguageTool.getClassBroker().forName(className); Constructor<?> constructor = aClass.getConstructor(); return (Premium)constructor.newInstance(); } catch (ClassNotFoundException e) { // 'PremiumOn' doesn't exist, thus this is the non-premium version return new PremiumOff(); } catch (Exception e) { throw new RuntimeException("Object for class '" + className + "' could not be created", e); } } public static boolean isPremiumVersion() { String className = "org.languagetool.PremiumOn"; try { Class<?> aClass = JLanguageTool.getClassBroker().forName(className); Constructor<?> constructor = aClass.getConstructor(); constructor.newInstance(); return true; } catch (ClassNotFoundException e) { // doesn't exist, thus this is the non-premium version return false; } catch (Exception e) { throw new RuntimeException("Object for class '" + className + "' could not be created", e); } } public abstract boolean isPremiumRule(Rule rule); public String getBuildDate() { return gitPremiumProps.map(properties -> properties.getProperty("git.build.time")).orElse(null); } public String getShortGitId() { return gitPremiumProps.map(properties -> properties.getProperty("git.commit.id.abbrev")).orElse(null); } public String getVersion() { return gitPremiumProps.map(properties -> properties.getProperty("git.build.version")).orElse(null); } }
package com.sun.star.lib.uno.environments.remote; public interface IReceiver { /** * Send back a reply for a request. * * @param exception <CODE>true</CODE> if an exception (instead of a normal * result) is sent back. * @param threadId the thread ID of the request. * @param result the result of executing the request, or an exception thrown * while executing the request. */ void sendReply(boolean exception, ThreadId threadId, Object result); }
package org.se.lab.data; import org.apache.log4j.Logger; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import java.util.List; class UserDAOImpl implements UserDAO { private final Logger LOG = Logger.getLogger(UserDAOImpl.class); @PersistenceContext private EntityManager em; /* * CRUD Operations */ @Override public User insert(User article) { LOG.info("insert(" + article + ")"); em.persist(article); return article; } @Override public User update(User article) { LOG.info("update(" + article + ")"); return em.merge(article); } @Override public void delete(User article) { LOG.info("delete(" + article + ")"); em.remove(article); } @Override public User findById(int id) { LOG.info("findById(" + id + ")"); return em.find(User.class, id); } @SuppressWarnings("unchecked") @Override public List<User> findAll() { LOG.info("findAll()"); final String hql = "SELECT u FROM " + User.class.getName() + " AS u"; return em.createQuery(hql).getResultList(); } /* * Factory methods */ @Override public User createUser(String username, String password) { LOG.info("createArticle(\"" + username + "\"," + "***" +")"); User u = new User(); u.setUsername(username); u.setPassword(password); insert(u); return u; } @Override public User loadByUsername(String username) { //TODO implement return null; } }
import org.lwjgl.BufferUtils; import org.lwjgl.LWJGLException; import org.lwjgl.opengl.*; import java.nio.FloatBuffer; import static org.lwjgl.opengl.GL11.*; import static org.lwjgl.opengl.GL20.*; import static org.lwjgl.opengl.GL30.*; public class SuperBible1 { // Source code for vertex shader private final String vertexShaderSource = "#version 430 core \n" + "void main(void) \n" + "{ \n" + " gl_Position = vec4(0.0, 0.0, 0.5, 1.0); \n" + "} \n"; // Source code for fragment shader private final String fragmentShaderSource = "#version 430 core \n" + "out vec4 color; \n" + "void main(void) \n" + "{ \n" + " color = vec4(0.0, 0.8, 1.0, 1.0); \n" + "} \n"; public void start() { try { Display.setDisplayMode(new DisplayMode(800, 600)); Display.create(new PixelFormat(), createContextAttribs()); } catch (LWJGLException e) { e.printStackTrace(); System.exit(0); } long currentTime = System.currentTimeMillis(); // Simply clear the window with different colors glClearBuffer(GL_COLOR, 0, createClearFloatBuffer(currentTime)); compileShaders(vertexShaderSource, fragmentShaderSource); Display.update(); while (!Display.isCloseRequested()) { } Display.destroy(); } private int compileShaders(String vertexShaderSource, String fragmentShaderSource) { // Create and compile vertex shader int vertexShader = glCreateShader(GL_VERTEX_SHADER); glShaderSource(vertexShader, vertexShaderSource); glCompileShader(vertexShader); // Create and compile fragment shader int fragmentShader = glCreateShader(GL_FRAGMENT_SHADER); glShaderSource(fragmentShader, fragmentShaderSource); glCompileShader(fragmentShader); // Create program, attach shaders to it, and link it int program = glCreateProgram(); glAttachShader(program, vertexShader); glAttachShader(program, fragmentShader); glLinkProgram(program); // Delete the shaders as the program has them now glDeleteShader(vertexShader); glDeleteShader(fragmentShader); return program; } private FloatBuffer createClearFloatBuffer(long currentTime) { final float red[] = { (float) (Math.sin(currentTime) * 0.5f + 0.5f), (float) (Math.cos(currentTime) * 0.5f + 0.5f), 0.0f, 1.0f}; FloatBuffer floatBuffer = BufferUtils.createFloatBuffer(red.length).put(red); floatBuffer.flip(); return floatBuffer; } /** * Settings for mac os. */ private ContextAttribs createContextAttribs() { return new ContextAttribs(3, 2).withForwardCompatible(true).withProfileCore(true); } public static void main(String[] argv) { SuperBible1 superBible1 = new SuperBible1(); superBible1.start(); } }
package com.pyramidacceptors.ptalk.api; /** * Provides constant strings and enumerations used for API transactions<br> * between client code and the {@code ICommDevice} * <br> * @author <a href="mailto:cory@pyramidacceptors.com">Cory Todd</a> * @since 1.0.0.0 */ public class APIConstants { /** * String response for client code */ private static final String API_REVISION = "1.1.1.1"; /** * Global, default timeout unless otherwise specified */ public static final int COMM_TIMEOUT = 400; /** * These are all forwarded from the jSSC library */ public static final int BAUDRATE_110 = 110; public static final int BAUDRATE_300 = 300; public static final int BAUDRATE_600 = 600; public static final int BAUDRATE_1200 = 1200; public static final int BAUDRATE_4800 = 4800; public static final int BAUDRATE_9600 = 9600; public static final int BAUDRATE_14400 = 14400; public static final int BAUDRATE_19200 = 19200; public static final int BAUDRATE_38400 = 38400; public static final int BAUDRATE_57600 = 57600; public static final int BAUDRATE_115200 = 115200; public static final int BAUDRATE_128000 = 128000; public static final int BAUDRATE_256000 = 256000; public static final int DATABITS_5 = 5; public static final int DATABITS_6 = 6; public static final int DATABITS_7 = 7; public static final int DATABITS_8 = 8; public static final int STOPBITS_1 = 1; public static final int STOPBITS_2 = 2; public static final int STOPBITS_1_5 = 3; public static final int PARITY_NONE = 0; public static final int PARITY_ODD = 1; public static final int PARITY_EVEN = 2; public static final int PARITY_MARK = 3; public static final int PARITY_SPACE = 4; public static final int PURGE_RXABORT = 0x0002; public static final int PURGE_RXCLEAR = 0x0008; public static final int PURGE_TXABORT = 0x0001; public static final int PURGE_TXCLEAR = 0x0004; public static final int MASK_RXCHAR = 1; public static final int MASK_RXFLAG = 2; public static final int MASK_TXEMPTY = 4; public static final int MASK_CTS = 8; public static final int MASK_DSR = 16; public static final int MASK_RLSD = 32; public static final int MASK_BREAK = 64; public static final int MASK_ERR = 128; public static final int MASK_RING = 256; public static final int FLOWCONTROL_NONE = 0; public static final int FLOWCONTROL_RTSCTS_IN = 1; public static final int FLOWCONTROL_RTSCTS_OUT = 2; public static final int FLOWCONTROL_XONXOFF_IN = 4; public static final int FLOWCONTROL_XONXOFF_OUT = 8; public static final int ERROR_FRAME = 0x0008; public static final int ERROR_OVERRUN = 0x0002; public static final int ERROR_PARITY = 0x0004; /** * API Level assists in detecting how to talk to the target * {@code ICommDevice} * <br> * @author <a href="mailto:cory@pyramidacceptors.com">Cory Todd</a> * @since 1.0.0.0 */ public enum APILevel { /** * V1 devices are older and support standard RS-232 commands, nothing more */ V1, /** * V2 devices support extended features TODO. V1 is a subset of V2 */ V2 } /** * Enumerated bill directions specifically for a bill validator * type {@code ICommDevice}<br> * <br> * @author <a href="mailto:cory@pyramidacceptors.com">Cory Todd</a> * @since 1.0.0.0 */ public enum BillDirection { /** * No value has been assigned */ Unset, /** * Front of bill, left side fed first */ LeftUp, /** * Front of bill, right side fed first */ RightUp, /** * Observe of bill, left side fed first */ LeftDown, /** * Obverse of bill, right side fed first */ RightDown; /** * Convert a byte into an enumerated bill direction <br> * <br> * @param b byte to convert * @return BillDirection enumeration. Invalid values default to * {@code Unset} */ public static BillDirection fromByte(byte b) { switch(b) { case 0: return LeftUp; case 1: return RightUp; case 2: return LeftDown; case 3: return RightDown; default: return Unset; } } } /** * Enumerated bill enables for a bill validator {@code ICommDevice}.<br> * Pyramid designates up to twelve(12) slots for notes. <br> * RS-232 supports ONLY 7 notes. * <br> * @author <a href="mailto:cory@pyramidacceptors.com">Cory Todd</a> * @since 1.0.0.0 */ public enum BillNames { /** * No credit or non-credit value */ Invalid, Bill1, Bill2, Bill3, Bill4, Bill5, Bill6, Bill7, Bill8, Bill9, Bill10, Bill11, Bill12; /** * Convert a byte into an enumerated bill name <br> * <br> * @param b byte to convert * @return BillDirection enumeration. <br> * Invalid values default to {@code Invalid}. */ public static BillNames fromByte(byte b) { switch(b) { case 1: return Bill1; case 2: return Bill2; case 3: return Bill3; case 4: return Bill4; case 5: return Bill5; case 6: return Bill6; case 7: return Bill7; case 8: return Bill8; case 9: return Bill9; case 10: return Bill10; case 11: return Bill11; case 12: return Bill12; default: return Invalid; } } } private APIConstants() { } }
package com.fsck.k9.mailstore; import java.io.IOException; import java.io.OutputStream; import java.util.Date; import java.util.Set; import android.content.ContentValues; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.util.Log; import com.fsck.k9.Account; import com.fsck.k9.K9; import com.fsck.k9.activity.MessageReference; import com.fsck.k9.mail.Address; import com.fsck.k9.mail.Flag; import com.fsck.k9.mail.Folder; import com.fsck.k9.mail.MessagingException; import com.fsck.k9.mail.internet.MimeMessage; import com.fsck.k9.mailstore.LockableDatabase.DbCallback; import com.fsck.k9.mailstore.LockableDatabase.WrappedException; import com.fsck.k9.message.extractors.PreviewResult.PreviewType; public class LocalMessage extends MimeMessage { protected MessageReference mReference; private final LocalStore localStore; private long mId; private int mAttachmentCount; private String mSubject; private String mPreview = ""; private boolean mHeadersLoaded = false; private long mThreadId; private long mRootId; private long messagePartId; private String mimeType; private PreviewType previewType; private LocalMessage(LocalStore localStore) { this.localStore = localStore; } LocalMessage(LocalStore localStore, String uid, Folder folder) { this.localStore = localStore; this.mUid = uid; this.mFolder = folder; } void populateFromGetMessageCursor(Cursor cursor) throws MessagingException { final String subject = cursor.getString(0); this.setSubject(subject == null ? "" : subject); Address[] from = Address.unpack(cursor.getString(1)); if (from.length > 0) { this.setFrom(from[0]); } this.setInternalSentDate(new Date(cursor.getLong(2))); this.setUid(cursor.getString(3)); String flagList = cursor.getString(4); if (flagList != null && flagList.length() > 0) { String[] flags = flagList.split(","); for (String flag : flags) { try { this.setFlagInternal(Flag.valueOf(flag), true); } catch (Exception e) { if (!"X_BAD_FLAG".equals(flag)) { Log.w(K9.LOG_TAG, "Unable to parse flag " + flag); } } } } this.mId = cursor.getLong(5); this.setRecipients(RecipientType.TO, Address.unpack(cursor.getString(6))); this.setRecipients(RecipientType.CC, Address.unpack(cursor.getString(7))); this.setRecipients(RecipientType.BCC, Address.unpack(cursor.getString(8))); this.setReplyTo(Address.unpack(cursor.getString(9))); this.mAttachmentCount = cursor.getInt(10); this.setInternalDate(new Date(cursor.getLong(11))); this.setMessageId(cursor.getString(12)); String previewTypeString = cursor.getString(24); DatabasePreviewType databasePreviewType = DatabasePreviewType.fromDatabaseValue(previewTypeString); previewType = databasePreviewType.getPreviewType(); if (previewType == PreviewType.TEXT) { mPreview = cursor.getString(14); } else { mPreview = ""; } if (this.mFolder == null) { LocalFolder f = new LocalFolder(this.localStore, cursor.getInt(13)); f.open(LocalFolder.OPEN_MODE_RW); this.mFolder = f; } mThreadId = (cursor.isNull(15)) ? -1 : cursor.getLong(15); mRootId = (cursor.isNull(16)) ? -1 : cursor.getLong(16); boolean deleted = (cursor.getInt(17) == 1); boolean read = (cursor.getInt(18) == 1); boolean flagged = (cursor.getInt(19) == 1); boolean answered = (cursor.getInt(20) == 1); boolean forwarded = (cursor.getInt(21) == 1); setFlagInternal(Flag.DELETED, deleted); setFlagInternal(Flag.SEEN, read); setFlagInternal(Flag.FLAGGED, flagged); setFlagInternal(Flag.ANSWERED, answered); setFlagInternal(Flag.FORWARDED, forwarded); messagePartId = cursor.getLong(22); mimeType = cursor.getString(23); } long getMessagePartId() { return messagePartId; } @Override public String getMimeType() { return mimeType; } /* Custom version of writeTo that updates the MIME message based on localMessage * changes. */ @Override public void writeTo(OutputStream out) throws IOException, MessagingException { if (!mHeadersLoaded) { loadHeaders(); } super.writeTo(out); } public PreviewType getPreviewType() { return previewType; } public String getPreview() { return mPreview; } @Override public String getSubject() { return mSubject; } @Override public void setSubject(String subject) throws MessagingException { mSubject = subject; } @Override public void setMessageId(String messageId) { mMessageId = messageId; } @Override public void setUid(String uid) { super.setUid(uid); this.mReference = null; } @Override public boolean hasAttachments() { return (mAttachmentCount > 0); } public int getAttachmentCount() { return mAttachmentCount; } @Override public void setFrom(Address from) throws MessagingException { this.mFrom = new Address[] { from }; } @Override public void setReplyTo(Address[] replyTo) throws MessagingException { if (replyTo == null || replyTo.length == 0) { mReplyTo = null; } else { mReplyTo = replyTo; } } /* * For performance reasons, we add headers instead of setting them (see super implementation) * which removes (expensive) them before adding them */ @Override public void setRecipients(RecipientType type, Address[] addresses) throws MessagingException { if (type == RecipientType.TO) { if (addresses == null || addresses.length == 0) { this.mTo = null; } else { this.mTo = addresses; } } else if (type == RecipientType.CC) { if (addresses == null || addresses.length == 0) { this.mCc = null; } else { this.mCc = addresses; } } else if (type == RecipientType.BCC) { if (addresses == null || addresses.length == 0) { this.mBcc = null; } else { this.mBcc = addresses; } } else { throw new MessagingException("Unrecognized recipient type."); } } public void setFlagInternal(Flag flag, boolean set) throws MessagingException { super.setFlag(flag, set); } @Override public long getId() { return mId; } @Override public void setFlag(final Flag flag, final boolean set) throws MessagingException { try { this.localStore.database.execute(true, new DbCallback<Void>() { @Override public Void doDbWork(final SQLiteDatabase db) throws WrappedException, UnavailableStorageException { try { if (flag == Flag.DELETED && set) { delete(); } LocalMessage.super.setFlag(flag, set); } catch (MessagingException e) { throw new WrappedException(e); } /* * Set the flags on the message. */ ContentValues cv = new ContentValues(); cv.put("flags", LocalMessage.this.localStore.serializeFlags(getFlags())); cv.put("read", isSet(Flag.SEEN) ? 1 : 0); cv.put("flagged", isSet(Flag.FLAGGED) ? 1 : 0); cv.put("answered", isSet(Flag.ANSWERED) ? 1 : 0); cv.put("forwarded", isSet(Flag.FORWARDED) ? 1 : 0); db.update("messages", cv, "id = ?", new String[] { Long.toString(mId) }); return null; } }); } catch (WrappedException e) { throw(MessagingException) e.getCause(); } this.localStore.notifyChange(); } /* * If a message is being marked as deleted we want to clear out its content. Delete will not actually remove the * row since we need to retain the UID for synchronization purposes. */ private void delete() throws MessagingException { try { localStore.database.execute(true, new DbCallback<Void>() { @Override public Void doDbWork(final SQLiteDatabase db) throws WrappedException, UnavailableStorageException { ContentValues cv = new ContentValues(); cv.put("deleted", 1); cv.put("empty", 1); cv.putNull("subject"); cv.putNull("sender_list"); cv.putNull("date"); cv.putNull("to_list"); cv.putNull("cc_list"); cv.putNull("bcc_list"); cv.putNull("preview"); cv.putNull("reply_to_list"); cv.putNull("message_part_id"); db.update("messages", cv, "id = ?", new String[] { Long.toString(mId) }); try { ((LocalFolder) mFolder).deleteMessagePartsAndDataFromDisk(messagePartId); } catch (MessagingException e) { throw new WrappedException(e); } return null; } }); } catch (WrappedException e) { throw (MessagingException) e.getCause(); } localStore.notifyChange(); } /* * Completely remove a message from the local database * * TODO: document how this updates the thread structure */ @Override public void destroy() throws MessagingException { try { this.localStore.database.execute(true, new DbCallback<Void>() { @Override public Void doDbWork(final SQLiteDatabase db) throws WrappedException, UnavailableStorageException { try { LocalFolder localFolder = (LocalFolder) mFolder; localFolder.deleteMessagePartsAndDataFromDisk(messagePartId); deleteFulltextIndexEntry(db, mId); if (hasThreadChildren(db, mId)) { // This message has children in the thread structure so we need to // make it an empty message. ContentValues cv = new ContentValues(); cv.put("id", mId); cv.put("folder_id", localFolder.getId()); cv.put("deleted", 0); cv.put("message_id", getMessageId()); cv.put("empty", 1); db.replace("messages", null, cv); // Nothing else to do return null; } // Get the message ID of the parent message if it's empty long currentId = getEmptyThreadParent(db, mId); // Delete the placeholder message deleteMessageRow(db, mId); /* * Walk the thread tree to delete all empty parents without children */ while (currentId != -1) { if (hasThreadChildren(db, currentId)) { // We made sure there are no empty leaf nodes and can stop now. break; } // Get ID of the (empty) parent for the next iteration long newId = getEmptyThreadParent(db, currentId); // Delete the empty message deleteMessageRow(db, currentId); currentId = newId; } } catch (MessagingException e) { throw new WrappedException(e); } return null; } }); } catch (WrappedException e) { throw(MessagingException) e.getCause(); } this.localStore.notifyChange(); } /** * Get ID of the the given message's parent if the parent is an empty message. * * @param db * {@link SQLiteDatabase} instance to access the database. * @param messageId * The database ID of the message to get the parent for. * * @return Message ID of the parent message if there exists a parent and it is empty. * Otherwise {@code -1}. */ private long getEmptyThreadParent(SQLiteDatabase db, long messageId) { Cursor cursor = db.rawQuery( "SELECT m.id " + "FROM threads t1 " + "JOIN threads t2 ON (t1.parent = t2.id) " + "LEFT JOIN messages m ON (t2.message_id = m.id) " + "WHERE t1.message_id = ? AND m.empty = 1", new String[] { Long.toString(messageId) }); try { return (cursor.moveToFirst() && !cursor.isNull(0)) ? cursor.getLong(0) : -1; } finally { cursor.close(); } } /** * Check whether or not a message has child messages in the thread structure. * * @param db * {@link SQLiteDatabase} instance to access the database. * @param messageId * The database ID of the message to get the children for. * * @return {@code true} if the message has children. {@code false} otherwise. */ private boolean hasThreadChildren(SQLiteDatabase db, long messageId) { Cursor cursor = db.rawQuery( "SELECT COUNT(t2.id) " + "FROM threads t1 " + "JOIN threads t2 ON (t2.parent = t1.id) " + "WHERE t1.message_id = ?", new String[] { Long.toString(messageId) }); try { return (cursor.moveToFirst() && !cursor.isNull(0) && cursor.getLong(0) > 0L); } finally { cursor.close(); } } private void deleteFulltextIndexEntry(SQLiteDatabase db, long messageId) { String[] idArg = { Long.toString(messageId) }; db.delete("messages_fulltext", "id = ?", idArg); } /** * Delete a message from the 'messages' and 'threads' tables. * * @param db * {@link SQLiteDatabase} instance to access the database. * @param messageId * The database ID of the message to delete. */ private void deleteMessageRow(SQLiteDatabase db, long messageId) { String[] idArg = { Long.toString(messageId) }; // Delete the message db.delete("messages", "id = ?", idArg); // Delete row in 'threads' table // TODO: create trigger for 'messages' table to get rid of the row in 'threads' table db.delete("threads", "message_id = ?", idArg); } private void loadHeaders() throws MessagingException { mHeadersLoaded = true; getFolder().populateHeaders(this); } void loadHeadersIfNecessary() throws MessagingException { if (!mHeadersLoaded) { loadHeaders(); } } @Override public void setHeader(String name, String value) throws MessagingException { if (!mHeadersLoaded) loadHeaders(); super.setHeader(name, value); } @Override public String[] getHeader(String name) throws MessagingException { if (!mHeadersLoaded) loadHeaders(); return super.getHeader(name); } @Override public void removeHeader(String name) throws MessagingException { if (!mHeadersLoaded) loadHeaders(); super.removeHeader(name); } @Override public Set<String> getHeaderNames() throws MessagingException { if (!mHeadersLoaded) loadHeaders(); return super.getHeaderNames(); } @Override public LocalMessage clone() { LocalMessage message = new LocalMessage(this.localStore); super.copy(message); message.mId = mId; message.mAttachmentCount = mAttachmentCount; message.mSubject = mSubject; message.mPreview = mPreview; message.mHeadersLoaded = mHeadersLoaded; return message; } public long getThreadId() { return mThreadId; } public long getRootId() { return mRootId; } public Account getAccount() { return localStore.getAccount(); } public MessageReference makeMessageReference() { if (mReference == null) { mReference = new MessageReference(getFolder().getAccountUuid(), getFolder().getName(), mUid, null); } return mReference; } @Override protected void copy(MimeMessage destination) { super.copy(destination); if (destination instanceof LocalMessage) { ((LocalMessage)destination).mReference = mReference; } } @Override public LocalFolder getFolder() { return (LocalFolder) super.getFolder(); } public String getUri() { return "email://messages/" + getAccount().getAccountNumber() + "/" + getFolder().getName() + "/" + getUid(); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; if (!super.equals(o)) return false; final LocalMessage that = (LocalMessage) o; return !(getAccountUuid() != null ? !getAccountUuid().equals(that.getAccountUuid()) : that.getAccountUuid() != null); } @Override public int hashCode() { int result = super.hashCode(); result = 31 * result + (getAccountUuid() != null ? getAccountUuid().hashCode() : 0); return result; } private String getAccountUuid() { return getAccount().getUuid(); } public boolean isBodyMissing() { return getBody() == null; } }
/* * $Id$ * $URL$ */ package org.subethamail.wiser; import java.io.BufferedInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Properties; import javax.mail.MessagingException; import javax.mail.Session; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.subethamail.smtp.TooMuchDataException; import org.subethamail.smtp.helper.SimpleMessageListener; import org.subethamail.smtp.helper.SimpleMessageListenerAdapter; import org.subethamail.smtp.server.SMTPServer; /** * Wiser is a tool for unit testing applications that send mail. Your unit * tests can start Wiser, run tests which generate emails, then examine the * emails that Wiser received and verify their integrity. * * Wiser is not intended to be a "real" mail server and is not adequate * for that purpose; it simply stores all mail in memory. Use the * MessageHandlerFactory interface (optionally with the SimpleMessageListenerAdapter) * of SubEthaSMTP instead. * * @author Jon Stevens * @author Jeff Schnitzer */ public class Wiser implements SimpleMessageListener { private final static Logger log = LoggerFactory.getLogger(Wiser.class); SMTPServer server; protected List<WiserMessage> messages = Collections.synchronizedList(new ArrayList<WiserMessage>()); /** * Create a new SMTP server with this class as the listener. * The default port is 25. Call setPort()/setHostname() before * calling start(). */ public Wiser() { this.server = new SMTPServer(new SimpleMessageListenerAdapter(this)); } /** Convenience constructor */ public Wiser(int port) { this(); this.setPort(port); } /** * The port that the server should listen on. * @param port */ public void setPort(int port) { this.server.setPort(port); } /** * The hostname that the server should listen on. * @param hostname */ public void setHostname(String hostname) { this.server.setHostName(hostname); } /** Starts the SMTP Server */ public void start() { this.server.start(); } /** Stops the SMTP Server */ public void stop() { this.server.stop(); } /** A main() for this class. Starts up the server. */ public static void main(String[] args) throws Exception { Wiser wiser = new Wiser(); wiser.start(); } /** Always accept everything */ public boolean accept(String from, String recipient) { if (log.isDebugEnabled()) log.debug("Accepting mail from " + from + " to " + recipient); return true; } /** Cache the messages in memory */ public void deliver(String from, String recipient, InputStream data) throws TooMuchDataException, IOException { if (log.isDebugEnabled()) log.debug("Delivering mail from " + from + " to " + recipient); ByteArrayOutputStream out = new ByteArrayOutputStream(); data = new BufferedInputStream(data); // read the data from the stream int current; while ((current = data.read()) >= 0) { out.write(current); } byte[] bytes = out.toByteArray(); if (log.isDebugEnabled()) log.debug("Creating message from data with " + bytes.length + " bytes"); // create a new WiserMessage. this.messages.add(new WiserMessage(this, from, recipient, bytes)); } /** * Creates the JavaMail Session object for use in WiserMessage */ protected Session getSession() { return Session.getDefaultInstance(new Properties()); } /** * Returns the list of WiserMessages. * <p> * The number of mail transactions and the number of mails may be different. * If a message is received with multiple recipients in a single mail * transaction, then the list will contain more WiserMessage instances, one * for each recipient. */ public List<WiserMessage> getMessages() { return this.messages; } /** * @return the server implementation */ public SMTPServer getServer() { return this.server; } /** * For debugging purposes, dumps a rough outline of the messages to the output stream. */ public void dumpMessages(PrintStream out) throws MessagingException { out.println(" for (WiserMessage wmsg: this.getMessages()) wmsg.dumpMessage(out); out.println(" } }
package gui; import javafx.geometry.VPos; import javafx.scene.canvas.GraphicsContext; import javafx.scene.paint.Color; import javafx.scene.text.Font; public class Minimap { public static final int MINIMAP_Y = 700; public static final int MINIMAP_HEIGHT = 20; public static final int TEXT_Y = 740; public static final int TEXT_END_Y = 695; public static final int TEXT_SIZE = 10; public static final int DIVISION_LINE_HEIGHT = 715; public static final int CHAR_WIDTH = 3; private static Minimap minimap = new Minimap(); private int size; private int stepSize; private double amountVisible; private double width; private double value; private Minimap() { } /** * Getter for the singleton Minimap. * @return the minimap */ public static Minimap getInstance() { return minimap; } /** * Initialize the minimap. * @param sizeVal Size of the whole graph in number of nodes */ public void initialize(int sizeVal) { value = 0; amountVisible = 0; size = sizeVal; stepSize = computeDivisions(); width = Math.log10(size) * 100; } /** * Draw the minimap on screen. * @param gc The GraphicsContext object needed to draw the minimap */ public void draw(GraphicsContext gc) { gc.setStroke(Color.BLACK); gc.setLineWidth(1); gc.setFont(new Font("Arial", TEXT_SIZE)); drawMapBox(gc); drawDivisionLines(gc); drawViewBox(gc); } /** * Draw the small box that represents the sequence. * @param gc The GraphicsContext object needed to draw the minimap */ private void drawMapBox(GraphicsContext gc) { double x = gc.getCanvas().getWidth() / 2 - width / 2; gc.strokeRect(x, MINIMAP_Y, width, MINIMAP_HEIGHT); gc.strokeText("0", x, TEXT_Y); gc.strokeText(size + "", x + width - Integer.toString(size).length() * CHAR_WIDTH, TEXT_END_Y); } /** * Draw the interval lines. * @param gc The GraphicsContext object needed to draw the minimap */ private void drawDivisionLines(GraphicsContext gc) { double x = gc.getCanvas().getWidth() / 2 - width / 2; for (int i = stepSize; i < size; i += stepSize) { double division = x + valueToXCoordinate(i); gc.strokeLine(division, DIVISION_LINE_HEIGHT, division, MINIMAP_Y + MINIMAP_HEIGHT); gc.strokeText(i + "", division - Integer.toString(i).length() * CHAR_WIDTH, TEXT_Y); } } /** * Draw the box that represents the part of the sequence that is in view of the screen. * @param gc The GraphicsContext object needed to draw the minimap */ private void drawViewBox(GraphicsContext gc) { double x = gc.getCanvas().getWidth() / 2 - width / 2; gc.setStroke(Color.RED); gc.strokeRect(x + valueToXCoordinate(value), MINIMAP_Y, valueToXCoordinate(amountVisible), MINIMAP_HEIGHT); } /** * Convert a node to it's corresponding x coordinate in the minimap. * @param value Node id * @return X coordinate of the value */ private double valueToXCoordinate(double value) { return (value / (double) size) * width; } /** * Compute the stepsize of the intervals. * @return Interval stepsize */ private int computeDivisions() { String sizeStr = Integer.toString(size); int firstDigit = Integer.parseInt(sizeStr.substring(0, 1)); int step = firstDigit >= 5 ? 1 : 5; int zeros = sizeStr.substring(1, sizeStr.length() - 1).length(); return (int) (step * Math.pow(10, zeros)); } /** * Setter for the amount of nodes visible. * @param amountVisible Amount of nodes visible */ public void setAmountVisible(double amountVisible) { this.amountVisible = amountVisible; } /** * Setter for the value of the first node in screen, i.e. left side of the screen. * @param value ID of the first node */ public void setValue(double value) { this.value = value; } }
package com.yuvalshavit.effes.parser; import com.google.common.base.Supplier; import org.antlr.v4.runtime.CommonToken; import org.antlr.v4.runtime.Token; import java.util.ArrayDeque; import java.util.Deque; import java.util.Queue; public final class DenterHelper { private final Queue<Token> dentsBuffer = new ArrayDeque<>(); private final Deque<Integer> indentations = new ArrayDeque<>(); private final Supplier<Token> tokens; private final int nlToken; private final int indentToken; private final int dedentToken; private Token nextNonNL; public DenterHelper(Supplier<Token> tokens, int nlToken, int indentToken, int dedentToken) { this.tokens = tokens; this.nlToken = nlToken; this.indentToken = indentToken; this.dedentToken = dedentToken; } public Token nextToken() { initIfFirstRun(); Token t = pullNextToken(); final Token r; if (t.getType() == nlToken) { r = handleNewlineToken(t); } else if (t.getType() == Token.EOF && indentations.size() > 1) { r = unwindTo(0, t); nextNonNL = t; } else { r = t; } return r; } private void initIfFirstRun() { if (indentations.isEmpty()) { indentations.push(0); // First invocation. Look for the first non-NL. Enqueue it, and possibly an indentation if that non-NL // token doesn't start at char 0. Token firstRealToken; do { firstRealToken = tokens.get(); } while(firstRealToken.getType() == nlToken); nextNonNL = firstRealToken; if (firstRealToken.getCharPositionInLine() > 0) { indentations.push(firstRealToken.getCharPositionInLine()); dentsBuffer.add(createToken(indentToken, firstRealToken)); } } } private Token pullNextToken() { Token t; if (!dentsBuffer.isEmpty()) { t = dentsBuffer.remove(); } else if (nextNonNL != null) { t = nextNonNL; nextNonNL = null; } else { t = tokens.get(); } return t; } private Token handleNewlineToken(Token t) { // fast-forward to the next non-NL Token nextNext = tokens.get(); while (nextNext.getType() == nlToken) { t = nextNext; nextNext = tokens.get(); } // nextNext is now a non-NL token; queue it up for the next call to this method nextNonNL = nextNext; String nlText = t.getText(); int indent = nlText.length() - 1; // every NL has one \n char, so shorten the length to account for it if (indent > 0 && nlText.charAt(0) == '\r') { --indent; // If the NL also has a \r char, we should account for that as well } int prevIndent = indentations.peek(); final Token r; if (indent == prevIndent) { r = t; // just a newline } else if (indent > prevIndent) { r = createToken(indentToken, t); indentations.push(indent); } else { r = unwindTo(indent, t); } return r; } private Token createToken(int tokenType, Token copyFrom) { CommonToken r = new CommonToken(copyFrom); r.setType(tokenType); return r; } /** * Returns a DEDENT token, and also queues up additional DEDENTS as necessary. * @param targetIndent the "size" of the indentation (number of spaces) by the end * @param copyFrom the triggering token * @return a DEDENT token */ private Token unwindTo(int targetIndent, Token copyFrom) { assert dentsBuffer.isEmpty() : dentsBuffer; // To make things easier, we'll queue up ALL of the dedents, and then pop off the first one. // For example, here's how some text is analyzed: // Text : Indentation : Action : Indents Deque // [ baseline ] : 0 : nothing : [0] // [ --foo ] : 2 : INDENT : [0, 2] // [ baz ] : 0 : DEDENT x2 : [0] // [ --again ] : 2 : INDENT : [0, 1] // [ -weird ] : 1 : DEDENT,INDENT : [0, 1] // This method is only interested in the DEDENT actions, although it may also enqueue an INDENT as seen above. // (This will probably cause a parse error, but that's not our concern!) while (true) { int prevIndent = indentations.pop(); if (prevIndent == targetIndent) { break; } if (targetIndent > prevIndent) { // "weird" condition above indentations.push(prevIndent); // restore previous indentation, since we've indented from it dentsBuffer.add(createToken(indentToken, copyFrom)); break; } dentsBuffer.add(createToken(dedentToken, copyFrom)); } indentations.push(targetIndent); return dentsBuffer.remove(); } }
package hex.drf; import static water.util.Utils.*; import hex.ShuffleTask; import hex.gbm.*; import hex.gbm.DTree.DecidedNode; import hex.gbm.DTree.LeafNode; import hex.gbm.DTree.TreeModel.TreeStats; import hex.gbm.DTree.UndecidedNode; import java.util.Arrays; import java.util.Random; import jsr166y.ForkJoinTask; import water.*; import water.H2O.H2OCountedCompleter; import water.api.DRFProgressPage; import water.api.DocGen; import water.api.Request.API; import water.fvec.*; import water.util.*; import water.util.Log.Tag.Sys; // Random Forest Trees public class DRF extends SharedTreeModelBuilder { static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. @API(help = "Columns to randomly select at each level, or -1 for sqrt(#cols)", filter = Default.class, lmin=-1, lmax=100000) int mtries = -1; @API(help = "Sample rate, from 0. to 1.0", filter = Default.class, dmin=0, dmax=1) float sample_rate = 0.6666667f; @API(help = "Seed for the random number generator", filter = Default.class) long seed = 0x1321e74a0192470cL; // Only one hardcoded seed to receive the same results between runs @API(help = "Compute variable importance (true/false).", filter = Default.class ) boolean importance = false; // compute variable importance @API(help = "Computed number of split features") protected int _mtry; /** DRF model holding serialized tree and implementing logic for scoring a row */ public static class DRFModel extends DTree.TreeModel { static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. final int _mtries; final float _sample_rate; final long _seed; public DRFModel(Key key, Key dataKey, Key testKey, String names[], String domains[][], int ntrees, int max_depth, int min_rows, int nbins, int mtries, float sample_rate, long seed) { super(key,dataKey,testKey,names,domains,ntrees, max_depth, min_rows, nbins); _mtries = mtries; _sample_rate = sample_rate; _seed = seed; } public DRFModel(DRFModel prior, DTree[] trees, double err, long [][] cm, TreeStats tstats) { super(prior, trees, err, cm, tstats); _mtries = prior._mtries; _sample_rate = prior._sample_rate; _seed = prior._seed; } public DRFModel(DRFModel prior, float[] varimp) { super(prior, varimp); _mtries = prior._mtries; _sample_rate = prior._sample_rate; _seed = prior._seed; } @Override protected float[] score0(double data[], float preds[]) { float[] p = super.score0(data, preds); int ntrees = numTrees(); if (p.length==1) { if (ntrees>0) div(p, ntrees); } // regression - compute avg over all trees else { // classification float s = sum(p); if (s>0) div(p, s); // unify over all classes } return p; } @Override protected void generateModelDescription(StringBuilder sb) { DocGen.HTML.paragraph(sb,"mtries: "+_mtries+", Sample rate: "+_sample_rate+", Seed: "+_seed); } @Override protected void toJavaUnifyPreds(SB bodySb) { if (isClassifier()) { bodySb.i().p("float sum = 0;").nl(); bodySb.i().p("for(int i=1;i<preds.length; i++) sum += preds[i];").nl(); bodySb.i().p("for(int i=1; i<preds.length; i++) preds[i] = (float) preds[i] / sum;").nl(); } else bodySb.i().p("preds[1] = preds[1]/NTREES;").nl(); } } public Frame score( Frame fr ) { return ((DRFModel)UKV.get(dest())).score(fr); } @Override protected Log.Tag.Sys logTag() { return Sys.DRF__; } public DRF() { description = "Distributed RF"; ntrees = 50; max_depth = 50; min_rows = 1; } /** Return the query link to this page */ public static String link(Key k, String content) { RString rs = new RString("<a href='DRF.query?source=%$key'>%content</a>"); rs.replace("key", k.toString()); rs.replace("content", content); return rs.toString(); } // Compute a DRF tree. // Start by splitting all the data according to some criteria (minimize // variance at the leaves). Record on each row which split it goes to, and // assign a split number to it (for next pass). On *this* pass, use the // split-number to build a per-split histogram, with a per-histogram-bucket // variance. @Override protected void logStart() { Log.info("Starting DRF model build..."); super.logStart(); Log.info(" mtry: " + mtries); Log.info(" sample_rate: " + sample_rate); Log.info(" seed: " + seed); } @Override protected Status exec() { logStart(); buildModel(); return Status.Done; } @Override protected Response redirect() { return DRFProgressPage.redirect(this, self(), dest()); } @Override protected void init() { super.init(); // Initialize local variables _mtry = (mtries==-1) ? // classification: mtry=sqrt(_ncols), regression: mtry=_ncols/3 ( classification ? Math.max((int)Math.sqrt(_ncols),1) : Math.max(_ncols/3,1)) : mtries; if (!(1 <= _mtry && _mtry <= _ncols)) throw new IllegalArgumentException("Computed mtry should be in interval <1,#cols> but it is " + _mtry); if (!(0.0 < sample_rate && sample_rate <= 1.0)) throw new IllegalArgumentException("Sample rate should be interval (0,1> but it is " + sample_rate); } @Override protected void buildModel( final Frame fr, String names[], String domains[][], final Key outputKey, final Key dataKey, final Key testKey, final Timer t_build ) { DRFModel model = new DRFModel(outputKey,dataKey,testKey,names,domains,ntrees, max_depth, min_rows, nbins, mtries, sample_rate, seed); DKV.put(outputKey, model); // The RNG used to pick split columns Random rand = createRNG(seed); // Prepare working columns new SetWrkTask().doAll(fr); int tid = 0; DTree[] ktrees = null; // Prepare tree statistics TreeStats tstats = new TreeStats(); // Build trees until we hit the limit for( tid=0; tid<ntrees; tid++) { // At each iteration build K trees (K = nclass = response column domain size) // TODO: parallelize more? build more than k trees at each time, we need to care about temporary data // Idea: launch more DRF at once. Timer t_kTrees = new Timer(); ktrees = buildNextKTrees(fr,_mtry,sample_rate,rand); Log.info(Sys.DRF__, "Tree "+(tid+1)+"x"+_nclass+" produced in "+t_kTrees); if( cancelled() ) break; // If canceled during building, do not bulkscore // TODO: Do validation or OOBEE scoring only if trees are produced fast enough. tstats.updateBy(ktrees); model = doScoring(model, outputKey, fr, ktrees, tid, tstats,false); } // Do final scoring with all the trees. model = doScoring(model, outputKey, fr, ktrees, tid, tstats,true); // Compute variable importance if required if (classification && importance) { float varimp[] = doVarImp(model, fr); Log.info(Sys.DRF__,"Var. importance: "+Arrays.toString(varimp)); // Update the model model = new DRFModel(model, varimp); DKV.put(outputKey, model); } cleanUp(fr,t_build); // Shared cleanup } private DRFModel doScoring(DRFModel model, Key outputKey, Frame fr, DTree[] ktrees, int tid, TreeStats tstats, boolean finalScoring ) { Score sc = new Score().doIt(model, fr, validation, validation==null).report(Sys.DRF__,tid,ktrees); model = new DRFModel(model, finalScoring?null:ktrees, (float)sc.sum()/sc.nrows(), sc.cm(), tstats); DKV.put(outputKey, model); return model; } private float[] doVarImp(final DRFModel model, final Frame f) { // Score a dataset as usual but collects properties per tree. TreeVotes cx = TreeVotes.varimp(model, f, sample_rate); final double[] origAcc = cx.accuracy(); // original accuracy per tree final int ntrees = model.numTrees(); final float[] varimp = new float[_ncols]; // output variable importance assert origAcc.length == ntrees; // make sure that numbers of trees correspond // For each variable launch one FJ-task to compute variable importance. H2OCountedCompleter[] computers = new H2OCountedCompleter[_ncols]; for (int var=0; var<_ncols; var++) { final int variable = var; // WARNING: The code is shuffling all rows not only OOB rows. // Hence, after shuffling an OOB row can contain in shuffled column value from non-OOB row // The question is if it affects significatly var imp computers[var] = new H2OCountedCompleter() { @Override public void compute2() { Frame wf = new Frame(f); // create a copy of frame Vec varv = wf.vecs()[variable]; // vector which we use to measure variable importance Vec sv = ShuffleTask.shuffle(varv); // create a shuffled vector wf.replace(variable, sv); // replace a vector with shuffled vector // Compute oobee with shuffled data TreeVotes cd = TreeVotes.varimp(model, wf, sample_rate); double[] accdiff = cd.accuracy(); assert accdiff.length == origAcc.length; // compute decrease of accuracy for (int t=0; t<ntrees;t++ ) { accdiff[t] = origAcc[t] - accdiff[t]; } varimp[variable] = (float) avg(accdiff); // Remove shuffled vector UKV.remove(sv._key); tryComplete(); } }; } ForkJoinTask.invokeAll(computers); // after all varimp contains variable importance of all columns used by a model. return varimp; } /** Fill work columns: * - classification: set 1 in the corresponding wrk col according to row response * - regression: copy response into work column (there is only 1 work column) */ private class SetWrkTask extends MRTask2<SetWrkTask> { @Override public void map( Chunk chks[] ) { Chunk cy = chk_resp(chks); for( int i=0; i<cy._len; i++ ) { if( cy.isNA0(i) ) continue; if (classification) { int cls = (int)cy.at80(i); chk_work(chks,cls).set0(i,1.0f); } else { float pred = (float) cy.at0(i); chk_work(chks,0).set0(i,pred); } } } } // Build the next random k-trees private DTree[] buildNextKTrees(Frame fr, int mtrys, float sample_rate, Random rand) { // We're going to build K (nclass) trees - each focused on correcting // errors for a single class. final DTree[] ktrees = new DTree[_nclass]; // Use for all k-trees the same seed. NOTE: this is only to make a fair view for all k-trees long rseed = rand.nextLong(); for( int k=0; k<_nclass; k++ ) { // Initially setup as-if an empty-split had just happened assert (_distribution!=null && classification) || (_distribution==null && !classification); if( _distribution == null || _distribution[k] != 0 ) { ktrees[k] = new DRFTree(fr,_ncols,(char)nbins,(char)_nclass,min_rows,mtrys,rseed); new DRFUndecidedNode(ktrees[k],-1,DBinHistogram.initialHist(fr,_ncols,(char)nbins)); // The "root" node } } // Sample - mark the lines by putting 'OUT_OF_BAG' into nid(<klass>) vector for( int k=0; k<_nclass; k++) { if (ktrees[k] != null) new Sample(((DRFTree)ktrees[k]), sample_rate).doAll(vec_nids(fr,k)); } int[] leafs = new int[_nclass]; // Define a "working set" of leaf splits, from leafs[i] to tree._len for each tree i // One Big Loop till the ktrees are of proper depth. // Adds a layer to the trees each pass. int depth=0; for( ; depth<max_depth; depth++ ) { if( cancelled() ) return null; // Build K trees, one per class. // Fuse 2 conceptual passes into one: // Pass 1: Score a prior DHistogram, and make new DTree.Node assignments // to every row. This involves pulling out the current assigned Node, // "scoring" the row against that Node's decision criteria, and assigning // the row to a new child Node (and giving it an improved prediction). // Pass 2: Build new summary DHistograms on the new child Nodes every row // got assigned into. Collect counts, mean, variance, min, max per bin, // per column. ScoreBuildHistogram sbh = new ScoreBuildHistogram(ktrees,leafs).doAll(fr); //System.out.println(sbh.profString()); // Build up the next-generation tree splits from the current histograms. // Nearly all leaves will split one more level. This loop nest is // O( #active_splits * #bins * #ncols ) // but is NOT over all the data. boolean did_split=false; for( int k=0; k<_nclass; k++ ) { DTree tree = ktrees[k]; // Tree for class K if( tree == null ) continue; int tmax = tree.len(); // Number of total splits in tree K for( int leaf=leafs[k]; leaf<tmax; leaf++ ) { // Visit all the new splits (leaves) UndecidedNode udn = tree.undecided(leaf); udn._hs = sbh.getFinalHisto(k,leaf); //System.out.println("Class "+(domain!=null?domain[k]:k)+",\n Undecided node:"+udn); // Replace the Undecided with the Split decision DRFDecidedNode dn = new DRFDecidedNode((DRFUndecidedNode)udn); //System.out.println("--> Decided node: " + dn); //System.out.println(" > Split: " + dn._split + " Total rows: " + (dn._split.rowsLeft()+dn._split.rowsRight())); if( dn._split.col() == -1 ) udn.do_not_split(); else did_split = true; } leafs[k]=tmax; // Setup leafs for next tree level tree.depth++; // Next layer done } // If we did not make any new splits, then the tree is split-to-death if( !did_split ) break; } // Each tree bottomed-out in a DecidedNode; go 1 more level and insert // LeafNodes to hold predictions. for( int k=0; k<_nclass; k++ ) { DTree tree = ktrees[k]; if( tree == null ) continue; int leaf = leafs[k] = tree.len(); for( int nid=0; nid<leaf; nid++ ) { if( tree.node(nid) instanceof DecidedNode ) { DecidedNode dn = tree.decided(nid); for( int i=0; i<dn._nids.length; i++ ) { int cnid = dn._nids[i]; if( cnid == -1 || // Bottomed out (predictors or responses known constant) tree.node(cnid) instanceof UndecidedNode || // Or chopped off for depth (tree.node(cnid) instanceof DecidedNode && // Or not possible to split ((DecidedNode)tree.node(cnid))._split.col()==-1) ) { DRFLeafNode nleaf = new DRFLeafNode(tree,nid); dn._nids[i] = nleaf.nid(); // Mark a leaf here } } // Handle the trivial non-splitting tree if( nid==0 && dn._split.col() == -1 ) new DRFLeafNode(tree,-1,0); } } } // -- k-trees are done // Collect votes for the tree. CollectPreds gp = new CollectPreds(ktrees,leafs).doAll(fr); for( int k=0; k<_nclass; k++ ) { final DTree tree = ktrees[k]; if( tree == null ) continue; for( int i=0; i<tree.len()-leafs[k]; i++ ) { // setup prediction for k-tree's i-th leaf // for classification it is a weight of votes for the i-th class // for regression it is mean of rows' predictions in the leaf ((LeafNode)tree.node(leafs[k]+i)).pred( gp._voters[k][i] > 0 ? gp._votes[k][i] / gp._voters[k][i] : 0); } } // Tree <== f(Tree) // Nids <== 0 new MRTask2() { @Override public void map( Chunk chks[] ) { // For all tree/klasses for( int k=0; k<_nclass; k++ ) { final DTree tree = ktrees[k]; if( tree == null ) continue; final Chunk nids = chk_nids(chks,k); final Chunk ct = chk_tree(chks,k); for( int row=0; row<nids._len; row++ ) { int nid = (int)nids.at80(row); // Track only prediction for oob rows if (isOOBRow(nid)) { nid = oob2Nid(nid); // Setup Tree(i) - on the fly prediction of i-tree for row-th row ct.set0(row, (float)(ct.at0(row) + ((LeafNode)tree.node(nid)).pred() )); } // reset help column nids.set0(row,0); } } } }.doAll(fr); // Collect leaves stats for (int i=0; i<ktrees.length; i++) ktrees[i].leaves = ktrees[i].len() - leafs[i]; // DEBUG: Print the generated K trees // printGenerateTrees(ktrees); return ktrees; } // Read the 'tree' columns, do model-specific math and put the results in the // ds[] array, and return the sum. Dividing any ds[] element by the sum // turns the results into a probability distribution. @Override protected double score0( Chunk chks[], double ds[/*nclass*/], int row ) { double sum=0; for( int k=0; k<_nclass; k++ ) // Sum across of likelyhoods sum+=(ds[k]=chk_tree(chks,k).at0(row)); return sum; } // Collect and write predictions into leafs. private class CollectPreds extends MRTask2<CollectPreds> { final DTree _trees[]; // Read-only, shared (except at the histograms in the Nodes) final int _leafs[]; // Number of active leaves (per tree) // Per leaf: sum(votes); float _votes[/*tree/klass*/][/*tree-relative node-id*/]; long _voters[/*tree/klass*/][/*tree-relative node-id*/]; CollectPreds(DTree trees[], int leafs[]) { _leafs=leafs; _trees=trees; } @Override public void map( Chunk[] chks ) { _votes = new float[_nclass][]; _voters = new long [_nclass][]; // For all tree/klasses for( int k=0; k<_nclass; k++ ) { final DTree tree = _trees[k]; final int leaf = _leafs[k]; if( tree == null ) continue; // Empty class is ignored // A leaf-biased array of all active Tree leaves. final float votes [] = _votes [k] = new float[tree.len()-leaf]; final long voters[] = _voters[k] = new long [tree.len()-leaf]; final Chunk nids = chk_nids(chks,k); // Node-ids for this tree/class final Chunk vss = chk_work(chks,k); // Votes for this tree/class (saved as float by SetWrkTask!) // If we have all constant responses, then we do not split even the // root and the residuals should be zero. if( tree.root() instanceof LeafNode ) continue; for( int row=0; row<nids._len; row++ ) { // For all rows int nid = (int)nids.at80(row); // Get Node to decide from boolean oobrow = false; if (isOOBRow(nid)) { oobrow = true; nid = oob2Nid(nid); } // This is out-of-bag row - but we would like to track on-the-fly prediction for the row if( tree.node(nid) instanceof UndecidedNode ) // If we bottomed out the tree nid = tree.node(nid).pid(); // Then take parent's decision DecidedNode dn = tree.decided(nid); // Must have a decision point if( dn._split.col() == -1 ) // Unable to decide? dn = tree.decided(nid = tree.node(nid).pid()); // Then take parent's decision int leafnid = dn.ns(chks,row); // Decide down to a leafnode assert leaf <= leafnid && leafnid < tree.len(); // we cannot obtain unknown leaf assert tree.node(leafnid) instanceof LeafNode; nids.set0(row,(oobrow ? nid2Oob(leafnid) : leafnid)); // Note: I can which leaf/region I end up in, but I do not care for // the prediction presented by the tree. For GBM, we compute the // sum-of-residuals (and sum/abs/mult residuals) for all rows in the // leaf, and get our prediction from that. if (!oobrow) { float v = (float) vss.at0(row); // !!! SetWrkTask put info wrk columns only floats => so use them only here // How many rows in this leaf has predicted k-class. votes [leafnid-leaf] += v; // v=1 for classification if class == k else 0 (classification), regression v=response(Y) voters[leafnid-leaf] ++; // compute all rows in this leaf (perhaps we do not treat voters per k-tree since we sample inside k-trees with same sampling rate) } } } } @Override public void reduce( CollectPreds gp ) { Utils.add(_votes,gp._votes); Utils.add(_voters,gp._voters); } } // A standard DTree with a few more bits. Support for sampling during // training, and replaying the sample later on the identical dataset to // e.g. compute OOBEE. static class DRFTree extends DTree { final int _mtrys; // Number of columns to choose amongst in splits final long _seeds[]; // One seed for each chunk, for sampling final transient Random _rand; // RNG for split decisions & sampling DRFTree( Frame fr, int ncols, char nbins, char nclass, int min_rows, int mtrys, long seed ) { super(fr._names, ncols, nbins, nclass, min_rows, seed); _mtrys = mtrys; _rand = createRNG(seed); _seeds = new long[fr.vecs()[0].nChunks()]; for( int i=0; i<_seeds.length; i++ ) _seeds[i] = _rand.nextLong(); } // Return a deterministic chunk-local RNG. Can be kinda expensive. @Override public Random rngForChunk( int cidx ) { long seed = _seeds[cidx]; return createRNG(seed); } } // DRF DTree decision node: same as the normal DecidedNode, but specifies a // decision algorithm given complete histograms on all columns. // DRF algo: find the lowest error amongst a random mtry columns. static class DRFDecidedNode extends DecidedNode<DRFUndecidedNode> { DRFDecidedNode( DRFUndecidedNode n ) { super(n); } @Override public DRFUndecidedNode makeUndecidedNode(DBinHistogram[] nhists ) { return new DRFUndecidedNode(_tree,_nid,nhists); } // Find the column with the best split (lowest score). @Override public DTree.Split bestCol( DRFUndecidedNode u ) { DTree.Split best = new DTree.Split(-1,-1,false,Double.MAX_VALUE,Double.MAX_VALUE,0L,0L); if( u._hs == null ) return best; for( int i=0; i<u._scoreCols.length; i++ ) { int col = u._scoreCols[i]; DTree.Split s = u._hs[col].scoreMSE(col); if( s == null ) continue; if( s.se() < best.se() ) best = s; if( s.se() <= 0 ) break; // No point in looking further! } return best; } } // DRF DTree undecided node: same as the normal UndecidedNode, but specifies // a list of columns to score on now, and then decide over later. // DRF algo: pick a random mtry columns static class DRFUndecidedNode extends UndecidedNode { DRFUndecidedNode( DTree tree, int pid, DBinHistogram hs[] ) { super(tree,pid,hs); } // Randomly select mtry columns to 'score' in following pass over the data. @Override public int[] scoreCols( DHistogram[] hs ) { DRFTree tree = (DRFTree)_tree; int[] cols = new int[hs.length]; int len=0; // Gather all active columns to choose from. Ignore columns we // previously ignored, or columns with 1 bin (nothing to split), or // histogramed bin min==max (means the predictors are constant). for( int i=0; i<hs.length; i++ ) { if( hs[i]==null ) continue; // Ignore not-tracked cols if( hs[i].min() == hs[i].max() ) continue; // predictor min==max, does not distinguish if( hs[i].nbins() <= 1 ) continue; // cols with 1 bin (will not split) cols[len++] = i; // Gather active column } int choices = len; // Number of columns I can choose from if( choices == 0 ) { for( int i=0; i<hs.length; i++ ) { String s; if( hs[i]==null ) s="null"; else if( hs[i].min() == hs[i].max() ) s=hs[i].name()+"=min==max=="+hs[i].min(); else if( hs[i].nbins() <= 1 ) s=hs[i].name()+"=nbins=" +hs[i].nbins(); else s=hs[i].name()+"=unk"; } } assert choices > 0; // Draw up to mtry columns at random without replacement. for( int i=0; i<tree._mtrys; i++ ) { if( len == 0 ) break; // Out of choices! int idx2 = tree._rand.nextInt(len); int col = cols[idx2]; // The chosen column cols[idx2] = cols[--len]; // Compress out of array; do not choose again cols[len] = col; // Swap chosen in just after 'len' } assert choices - len > 0; return Arrays.copyOfRange(cols,len,choices); } } static class DRFLeafNode extends LeafNode { DRFLeafNode( DTree tree, int pid ) { super(tree,pid); } DRFLeafNode( DTree tree, int pid, int nid ) { super(tree,pid,nid); } // Insert just the predictions: a single byte/short if we are predicting a // single class, or else the full distribution. @Override protected AutoBuffer compress(AutoBuffer ab) { assert !Double.isNaN(pred()); return ab.put4f((float)pred()); } @Override protected int size() { return 4; } } // Deterministic sampling static class Sample extends MRTask2<Sample> { final DRFTree _tree; final float _rate; Sample( DRFTree tree, float rate ) { _tree = tree; _rate = rate; } @Override public void map( Chunk nids ) { Random rand = _tree.rngForChunk(nids.cidx()); for( int row=0; row<nids._len; row++ ) if( rand.nextFloat() >= _rate ) nids.set0(row, OUT_OF_BAG); // Flag row as being ignored by sampling } } }
package de.bmoth.backend.z3; import com.microsoft.z3.*; import de.bmoth.app.PersonalPreferences; import de.bmoth.backend.TranslationOptions; import de.bmoth.parser.Parser; import de.bmoth.parser.ast.AbstractVisitor; import de.bmoth.parser.ast.AstTransformationForZ3; import de.bmoth.parser.ast.nodes.*; import de.bmoth.parser.ast.nodes.ExpressionOperatorNode.ExpressionOperator; import de.bmoth.parser.ast.nodes.FormulaNode.FormulaType; import de.bmoth.parser.ast.types.*; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.stream.Collectors; /** * This class translates a FormulaNode of the parser to a z3 backend node. **/ public class FormulaToZ3Translator { private Context z3Context; // the context which is used to create z3 objects private final LinkedList<BoolExpr> constraintList = new LinkedList<>(); // For example, for the B keyword NATURAL an ordinary z3 identifier will be // created because there no corresponding keyword in z3. // Additionally, a constraint axiomatizing this identifier will be added to // this list. private int tempVariablesCounter = 0; // used to generate unique identifiers List<DeclarationNode> implicitDeclarations; FormulaNode formulaNode; private FuncDecl pow = null; private String createFreshTemporaryVariable() { this.tempVariablesCounter++; return "$t_" + this.tempVariablesCounter; } public List<DeclarationNode> getImplicitDeclarations() { return this.implicitDeclarations; } public List<Expr> getImplicitVariablesAsZ3Expression() { List<Expr> list = new ArrayList<>(); for (DeclarationNode decl : implicitDeclarations) { Expr mkConst = this.z3Context.mkConst(decl.getName(), bTypeToZ3Sort(decl.getType())); list.add(mkConst); } return list; } private FormulaToZ3Translator(Context z3Context, String formula) { this.z3Context = z3Context; formulaNode = Parser.getFormulaAsSemanticAst(formula); this.implicitDeclarations = formulaNode.getImplicitDeclarations(); } private FormulaToZ3Translator(Context z3Context) { this.z3Context = z3Context; } public static BoolExpr translateVariableEqualToExpr(String name, ExprNode value, Context z3Context) { ExprNode exprNode = AstTransformationForZ3.transformExprNode(value); return translateVariableEqualToExpr(name, exprNode, z3Context, new TranslationOptions()); } public static BoolExpr translateVariableEqualToExpr(String name, ExprNode value, Context z3Context, TranslationOptions opt) { FormulaToZ3Translator formulaToZ3Translator = new FormulaToZ3Translator(z3Context); FormulaToZ3TranslatorVisitor visitor = formulaToZ3Translator.new FormulaToZ3TranslatorVisitor(); Expr z3Value = visitor.visitExprNode(value, opt); Expr variable = z3Context.mkConst(name, z3Value.getSort()); return z3Context.mkEq(variable, z3Value); } public static BoolExpr translatePredicate(String formula, Context z3Context) { FormulaToZ3Translator formulaToZ3Translator = new FormulaToZ3Translator(z3Context, formula); if (formulaToZ3Translator.formulaNode.getFormulaType() != FormulaType.PREDICATE_FORMULA) { throw new RuntimeException("Expected predicate."); } PredicateNode predNode = AstTransformationForZ3 .transformSemanticNode((PredicateNode) formulaToZ3Translator.formulaNode.getFormula()); FormulaToZ3TranslatorVisitor visitor = formulaToZ3Translator.new FormulaToZ3TranslatorVisitor(); Expr constraint = visitor.visitPredicateNode(predNode, new TranslationOptions()); if (!(constraint instanceof BoolExpr)) { throw new RuntimeException("Invalid translation. Expected BoolExpr but found " + constraint.getClass()); } BoolExpr boolExpr = (BoolExpr) constraint; // adding all additional constraints to result for (BoolExpr bExpr : formulaToZ3Translator.constraintList) { boolExpr = z3Context.mkAnd(boolExpr, bExpr); } return boolExpr; } public static BoolExpr translatePredicate(PredicateNode pred, Context z3Context) { PredicateNode predNode = AstTransformationForZ3.transformSemanticNode(pred); return translatePredicate(predNode, z3Context, new TranslationOptions()); } public static BoolExpr translatePredicate(PredicateNode pred, Context z3Context, TranslationOptions opt) { PredicateNode predNode = AstTransformationForZ3.transformSemanticNode(pred); FormulaToZ3Translator formulaToZ3Translator = new FormulaToZ3Translator(z3Context); FormulaToZ3TranslatorVisitor formulaToZ3TranslatorVisitor = formulaToZ3Translator.new FormulaToZ3TranslatorVisitor(); BoolExpr boolExpr = (BoolExpr) formulaToZ3TranslatorVisitor.visitPredicateNode(predNode, opt); // adding all additional constraints to result for (BoolExpr bExpr : formulaToZ3Translator.constraintList) { boolExpr = z3Context.mkAnd(boolExpr, bExpr); } return boolExpr; } public static Sort bTypeToZ3Sort(Context z3Context, Type t) { FormulaToZ3Translator formulaToZ3Translator = new FormulaToZ3Translator(z3Context); return formulaToZ3Translator.bTypeToZ3Sort(t); } public Sort bTypeToZ3Sort(Type t) { if (t instanceof IntegerType) { return z3Context.getIntSort(); } if (t instanceof BoolType) { return z3Context.getBoolSort(); } if (t instanceof SetType) { SetType s = (SetType) t; Sort subSort = bTypeToZ3Sort(s.getSubtype()); return z3Context.mkSetSort(subSort); } if (t instanceof CoupleType) { CoupleType c = (CoupleType) t; Sort[] subSorts = new Sort[2]; subSorts[0] = bTypeToZ3Sort(c.getLeft()); subSorts[1] = bTypeToZ3Sort(c.getRight()); return z3Context.mkTupleSort(z3Context.mkSymbol("couple"), new Symbol[]{z3Context.mkSymbol("left"), z3Context.mkSymbol("right")}, subSorts); } if (t instanceof SequenceType) { SequenceType s = (SequenceType) t; Sort subSort = bTypeToZ3Sort(s.getSubtype()); Sort intType = z3Context.getIntSort(); Sort[] subSorts = new Sort[2]; subSorts[0] = z3Context.mkArraySort(intType, subSort); subSorts[1] = intType; return z3Context.mkTupleSort(z3Context.mkSymbol("sequence"), new Symbol[]{z3Context.mkSymbol("array"), z3Context.mkSymbol("size")}, subSorts); } throw new AssertionError("Missing Type Conversion: " + t.getClass()); } class FormulaToZ3TranslatorVisitor extends AbstractVisitor<Expr, TranslationOptions> { private String addPrimes(TranslationOptions ops, String name) { int numOfPrimes = ops.getPrimeLevel(); StringBuilder nameBuilder = new StringBuilder(name); while (numOfPrimes > 0) { nameBuilder.append("'"); numOfPrimes } return nameBuilder.toString(); } @Override public Expr visitIdentifierExprNode(IdentifierExprNode node, TranslationOptions ops) { Type type = node.getDeclarationNode().getType(); return z3Context.mkConst(addPrimes(ops, node.getName()), bTypeToZ3Sort(type)); } @Override public Expr visitCastPredicateExpressionNode(CastPredicateExpressionNode node, TranslationOptions expected) { return visitPredicateNode(node.getPredicate(), expected); } @Override public Expr visitIdentifierPredicateNode(IdentifierPredicateNode node, TranslationOptions ops) { return z3Context.mkBoolConst(node.getName()); } @Override public Expr visitPredicateOperatorWithExprArgs(PredicateOperatorWithExprArgsNode node, TranslationOptions ops) { final List<Expr> arguments = node.getExpressionNodes().stream().map(it -> visitExprNode(it, ops)).collect(Collectors.toList()); switch (node.getOperator()) { case EQUAL: return z3Context.mkEq(arguments.get(0), arguments.get(1)); case NOT_EQUAL: return z3Context.mkNot(z3Context.mkEq(arguments.get(0), arguments.get(1))); case ELEMENT_OF: return z3Context.mkSetMembership(arguments.get(0), (ArrayExpr) arguments.get(1)); case LESS_EQUAL: return z3Context.mkLe((ArithExpr) arguments.get(0), (ArithExpr) arguments.get(1)); case LESS: return z3Context.mkLt((ArithExpr) arguments.get(0), (ArithExpr) arguments.get(1)); case GREATER_EQUAL: return z3Context.mkGe((ArithExpr) arguments.get(0), (ArithExpr) arguments.get(1)); case GREATER: return z3Context.mkGt((ArithExpr) arguments.get(0), (ArithExpr) arguments.get(1)); case NOT_BELONGING: return z3Context.mkNot(z3Context.mkSetMembership(arguments.get(0), (ArrayExpr) arguments.get(1))); case INCLUSION: // a <: S return z3Context.mkSetSubset((ArrayExpr) arguments.get(0), (ArrayExpr) arguments.get(1)); case STRICT_INCLUSION: // a <<: S return z3Context.mkAnd(z3Context.mkNot(z3Context.mkEq(arguments.get(0), arguments.get(1))), z3Context.mkSetSubset((ArrayExpr) arguments.get(0), (ArrayExpr) arguments.get(1))); case NON_INCLUSION: return z3Context.mkNot(z3Context.mkSetSubset((ArrayExpr) arguments.get(0), (ArrayExpr) arguments.get(1))); case STRICT_NON_INCLUSION: return z3Context.mkNot(z3Context.mkAnd(z3Context.mkNot(z3Context.mkEq(arguments.get(0), arguments.get(1))), z3Context.mkSetSubset((ArrayExpr) arguments.get(0), (ArrayExpr) arguments.get(1)))); default: throw new AssertionError("Not implemented: " + node.getOperator()); } } @Override public Expr visitExprOperatorNode(ExpressionOperatorNode node, TranslationOptions ops) { List<ExprNode> expressionNodes = node.getExpressionNodes(); switch (node.getOperator()) { case PLUS: { ArithExpr left = (ArithExpr) visitExprNode(expressionNodes.get(0), ops); ArithExpr right = (ArithExpr) visitExprNode(expressionNodes.get(1), ops); return z3Context.mkAdd(left, right); } case UNARY_MINUS: { return z3Context.mkUnaryMinus((ArithExpr) visitExprNode(expressionNodes.get(0), ops)); } case MINUS: { ArithExpr left = (ArithExpr) visitExprNode(expressionNodes.get(0), ops); ArithExpr right = (ArithExpr) visitExprNode(expressionNodes.get(1), ops); return z3Context.mkSub(left, right); } case MOD: { IntExpr left = (IntExpr) visitExprNode(expressionNodes.get(0), ops); IntExpr right = (IntExpr) visitExprNode(expressionNodes.get(1), ops); return z3Context.mkMod(left, right); } case MULT: { ArithExpr left = (ArithExpr) visitExprNode(expressionNodes.get(0), ops); ArithExpr right = (ArithExpr) visitExprNode(expressionNodes.get(1), ops); return z3Context.mkMul(left, right); } case DIVIDE: { ArithExpr left = (ArithExpr) visitExprNode(expressionNodes.get(0), ops); ArithExpr right = (ArithExpr) visitExprNode(expressionNodes.get(1), ops); constraintList.add(z3Context.mkNot(z3Context.mkEq(right, z3Context.mkInt(0)))); return z3Context.mkDiv(left, right); } case POWER_OF: { ArithExpr left = (ArithExpr) visitExprNode(expressionNodes.get(0), ops); ArithExpr right = (ArithExpr) visitExprNode(expressionNodes.get(1), ops); if (pow == null) { pow = initPowerOf(); } return pow.apply(left, right); } case INTERVAL: { ArithExpr left = (ArithExpr) visitExprNode(expressionNodes.get(0), ops); ArithExpr right = (ArithExpr) visitExprNode(expressionNodes.get(1), ops); ArithExpr x = (ArithExpr) z3Context.mkConst(createFreshTemporaryVariable(), z3Context.getIntSort()); Expr T = z3Context.mkConst(createFreshTemporaryVariable(), bTypeToZ3Sort(node.getType())); BoolExpr leftLe = z3Context.mkLe(left, x); BoolExpr rightGe = z3Context.mkGe(right, x); BoolExpr interval = z3Context.mkAnd(leftLe, rightGe); BoolExpr member = z3Context.mkSetMembership(x, (ArrayExpr) T); BoolExpr equality = z3Context.mkEq(interval, member); Expr[] bound = new Expr[]{x}; Quantifier q = z3Context.mkForall(bound, equality, 1, null, null, null, null); constraintList.add(q); return T; } case INTEGER: { return z3Context.mkFullSet(z3Context.mkIntSort()); } case NATURAL1: { Type type = node.getType();// POW(INTEGER) // !x.(x >= 1 <=> x : NATURAL) Expr x = z3Context.mkConst("x", z3Context.getIntSort()); Expr natural1 = z3Context.mkConst("NATURAL1", bTypeToZ3Sort(type)); Expr[] bound = new Expr[]{x}; // x >= 1 BoolExpr a = z3Context.mkGe((ArithExpr) x, z3Context.mkInt(1)); // x : NATURAL BoolExpr b = z3Context.mkSetMembership(x, (ArrayExpr) natural1); // a <=> b BoolExpr body = z3Context.mkEq(a, b); Quantifier q = z3Context.mkForall(bound, body, 1, null, null, null, null); constraintList.add(q); return natural1; } case NATURAL: { Type type = node.getType();// POW(INTEGER) // !x.(x >= 0 <=> x : NATURAL) Expr x = z3Context.mkConst("x", z3Context.getIntSort()); Expr natural = z3Context.mkConst(ExpressionOperator.NATURAL.toString(), bTypeToZ3Sort(type)); Expr[] bound = new Expr[]{x}; // x >= 0 BoolExpr a = z3Context.mkGe((ArithExpr) x, z3Context.mkInt(0)); // x : NATURAL BoolExpr b = z3Context.mkSetMembership(x, (ArrayExpr) natural); // a <=> b BoolExpr body = z3Context.mkEq(a, b); Quantifier q = z3Context.mkForall(bound, body, 1, null, null, null, null); constraintList.add(q); return natural; } case FALSE: return z3Context.mkFalse(); case TRUE: return z3Context.mkTrue(); case BOOL: { return z3Context.mkFullSet(z3Context.mkBoolSort()); } case UNION: { ArrayExpr[] array = new ArrayExpr[expressionNodes.size()]; for (int i = 0; i < array.length; i++) { array[i] = (ArrayExpr) visitExprNode(expressionNodes.get(i), ops); } return z3Context.mkSetUnion(array); } case COUPLE: { CoupleType type = (CoupleType) node.getType(); TupleSort bTypeToZ3Sort = (TupleSort) bTypeToZ3Sort(type); Expr left = visitExprNode(node.getExpressionNodes().get(0), ops); Expr right = visitExprNode(node.getExpressionNodes().get(1), ops); return bTypeToZ3Sort.mkDecl().apply(left, right); } case DOMAIN: break; case INTERSECTION: { ArrayExpr left = (ArrayExpr) visitExprNode(expressionNodes.get(0), ops); ArrayExpr right = (ArrayExpr) visitExprNode(expressionNodes.get(1), ops); return z3Context.mkSetIntersection(left, right); } case RANGE: break; case LAST: { Expr expr = visitExprNode(expressionNodes.get(0), ops); DatatypeExpr d = (DatatypeExpr) expr; Expr[] args = d.getArgs(); ArrayExpr array = (ArrayExpr) args[0]; ArithExpr size = (ArithExpr) args[1]; // add WD constraint constraintList.add(z3Context.mkLe(z3Context.mkInt(1), size)); return z3Context.mkSelect(array, size); } case FRONT: { Expr expr = visitExprNode(expressionNodes.get(0), ops); DatatypeExpr d = (DatatypeExpr) expr; Expr[] args = d.getArgs(); ArrayExpr array = (ArrayExpr) args[0]; ArithExpr size = (ArithExpr) args[1]; constraintList.add(z3Context.mkLe(z3Context.mkInt(1), size)); TupleSort mkTupleSort = (TupleSort) bTypeToZ3Sort(node.getType()); return mkTupleSort.mkDecl().apply(array, z3Context.mkSub(size, z3Context.mkInt(1))); } case TAIL: break; case CONC: break; case EMPTY_SET: // this is not missing! it is equal to an empty set // enumeration below case SET_ENUMERATION: { SetType type = (SetType) node.getType(); Type subType = type.getSubtype(); ArrayExpr z3Set = z3Context.mkEmptySet(bTypeToZ3Sort(subType)); for (ExprNode exprNode : expressionNodes) { z3Set = z3Context.mkSetAdd(z3Set, visitExprNode(exprNode, ops)); } return z3Set; } case SET_SUBTRACTION: { ArrayExpr left = (ArrayExpr) visitExprNode(expressionNodes.get(0), ops); ArrayExpr right = (ArrayExpr) visitExprNode(expressionNodes.get(1), ops); return z3Context.mkSetDifference(left, right); } case CONCAT: case DIRECT_PRODUCT: case DOMAIN_RESTRICTION: case DOMAIN_SUBTRACTION: case GENERALIZED_INTER: break; case GENERALIZED_UNION: { // union(S) // return Res // !(e).(e : Res <=> #(s).(s : S & e : s) SetType setType = (SetType) node.getType(); Expr S = visitExprNode(expressionNodes.get(0), ops); Expr res = z3Context.mkConst(createFreshTemporaryVariable(), bTypeToZ3Sort(setType)); Expr s = z3Context.mkConst(createFreshTemporaryVariable(), bTypeToZ3Sort(setType)); Expr e = z3Context.mkConst(createFreshTemporaryVariable(), bTypeToZ3Sort(setType.getSubtype())); BoolExpr eIsInRes = z3Context.mkSetMembership(e, (ArrayExpr) res); BoolExpr sIsInS = z3Context.mkSetMembership(s, (ArrayExpr) S); BoolExpr eIsIns = z3Context.mkSetMembership(e, (ArrayExpr) s); Quantifier exists = z3Context.mkExists(new Expr[]{s}, z3Context.mkAnd(sIsInS, eIsIns), 1, null, null, null, null); Quantifier q = z3Context.mkForall(new Expr[]{e}, z3Context.mkEq(eIsInRes, exists), 1, null, null, null, null); constraintList.add(q); return res; } case EMPTY_SEQUENCE: { Sort intType = z3Context.getIntSort(); Type type = ((SequenceType) node.getType()).getSubtype(); Sort rangeType = bTypeToZ3Sort(type); ArrayExpr a = z3Context.mkArrayConst(createFreshTemporaryVariable(), intType, rangeType); TupleSort mkTupleSort = (TupleSort) bTypeToZ3Sort(node.getType()); return mkTupleSort.mkDecl().apply(a, z3Context.mkInt(expressionNodes.size())); } case SEQ_ENUMERATION: { Sort intType = z3Context.getIntSort(); Type type = ((SequenceType) node.getType()).getSubtype(); Sort rangeType = bTypeToZ3Sort(type); ArrayExpr a = z3Context.mkArrayConst(createFreshTemporaryVariable(), intType, rangeType); for (int i = 0; i < expressionNodes.size(); i++) { int j = i + 1; IntNum index = z3Context.mkInt(j); Expr value = visitExprNode(expressionNodes.get(i), ops); a = z3Context.mkStore(a, index, value); } TupleSort mkTupleSort = (TupleSort) bTypeToZ3Sort(node.getType()); return mkTupleSort.mkDecl().apply(a, z3Context.mkInt(expressionNodes.size())); } case FIRST: { Expr expr = visitExprNode(expressionNodes.get(0), ops); DatatypeExpr d = (DatatypeExpr) expr; Expr[] args = d.getArgs(); ArrayExpr array = (ArrayExpr) args[0]; ArithExpr size = (ArithExpr) args[1]; // add WD constraint constraintList.add(z3Context.mkLe(z3Context.mkInt(1), size)); return z3Context.mkSelect(array, z3Context.mkInt(1)); } case FUNCTION_CALL: { Expr expr = visitExprNode(expressionNodes.get(0), ops); DatatypeExpr d = (DatatypeExpr) expr; Expr[] args = d.getArgs(); ArrayExpr array = (ArrayExpr) args[0]; ArithExpr size = (ArithExpr) args[1]; ArithExpr index = (ArithExpr) visitExprNode(expressionNodes.get(1), ops); // add WD constraint constraintList .add(z3Context.mkAnd(z3Context.mkGe(index, z3Context.mkInt(1)), z3Context.mkLe(index, size))); return z3Context.mkSelect(array, index); } case CARD: { break; } case INSERT_FRONT: case INSERT_TAIL: case OVERWRITE_RELATION: case INVERSE_RELATION: { SetType nType = (SetType) node.getType(); CoupleType subType = (CoupleType) nType.getSubtype(); CoupleType revType = new CoupleType(subType.getRight(), subType.getLeft()); TupleSort subSort = (TupleSort) bTypeToZ3Sort(subType); TupleSort revSort = (TupleSort) bTypeToZ3Sort(revType); ArrayExpr expr = (ArrayExpr) visitExprNode(expressionNodes.get(0), ops); Expr tempLeft = z3Context.mkConst(createFreshTemporaryVariable(), bTypeToZ3Sort(subType.getLeft())); Expr tempRight = z3Context.mkConst(createFreshTemporaryVariable(), bTypeToZ3Sort(subType.getRight())); ArrayExpr tempConstant = (ArrayExpr) z3Context.mkConst(createFreshTemporaryVariable(), bTypeToZ3Sort(new SetType(revType))); BoolExpr lrInExpr = z3Context.mkSetMembership(subSort.mkDecl().apply(tempLeft, tempRight), expr); BoolExpr rlInTempExpr = z3Context.mkSetMembership(revSort.mkDecl().apply(tempRight, tempLeft), tempConstant); BoolExpr equality = z3Context.mkEq(lrInExpr, rlInTempExpr); Expr[] bound = new Expr[]{tempLeft, tempRight}; Quantifier q = z3Context.mkForall(bound, equality, 2, null, null, null, null); constraintList.add(q); return tempConstant; } case RANGE_RESTRICTION: case RANGE_SUBTRATION: case RESTRICT_FRONT: case RESTRICT_TAIL: break; case SEQ: break; case SEQ1: break; case ISEQ: break; case ISEQ1: break; case CARTESIAN_PRODUCT: { ArrayExpr left = (ArrayExpr) visitExprNode(expressionNodes.get(0), ops); ArrayExpr right = (ArrayExpr) visitExprNode(expressionNodes.get(1), ops); SetType setType = (SetType) node.getType(); CoupleType coupleType = (CoupleType) setType.getSubtype(); TupleSort bTypeToZ3Sort = (TupleSort) bTypeToZ3Sort(coupleType); ArithExpr leftExpr = (ArithExpr) z3Context.mkConst(createFreshTemporaryVariable(), bTypeToZ3Sort(coupleType.getLeft())); ArithExpr rightExpr = (ArithExpr) z3Context.mkConst(createFreshTemporaryVariable(), bTypeToZ3Sort(coupleType.getRight())); ArrayExpr tempConstant = (ArrayExpr) z3Context.mkConst(createFreshTemporaryVariable(), bTypeToZ3Sort(node.getType())); Expr couple = bTypeToZ3Sort.mkDecl().apply(leftExpr, rightExpr); BoolExpr xInLeft = z3Context.mkSetMembership(leftExpr, left); BoolExpr yInRight = z3Context.mkSetMembership(rightExpr, right); BoolExpr coupleInCartesian = z3Context.mkSetMembership(couple, tempConstant); BoolExpr cartesian = z3Context.mkAnd(xInLeft, yInRight); BoolExpr equality = z3Context.mkEq(cartesian, coupleInCartesian); Expr[] bound = new Expr[]{leftExpr, rightExpr}; Quantifier q = z3Context.mkForall(bound, equality, 2, null, null, null, null); constraintList.add(q); return tempConstant; } case INT: { Type type = node.getType();// POW(INTEGER) int maxInt = PersonalPreferences.getIntPreference(PersonalPreferences.IntPreference.MAX_INT); int minInt = PersonalPreferences.getIntPreference(PersonalPreferences.IntPreference.MIN_INT); // !x.((x >= MIN_INT & x <= MAX_INT) <=> x : INT) Expr integer = z3Context.mkConst(ExpressionOperator.INT.toString(), bTypeToZ3Sort(type)); Expr x = z3Context.mkConst("x", z3Context.getIntSort()); Expr[] bound = new Expr[]{x}; // x >= MIN_INT BoolExpr a = z3Context.mkGe((ArithExpr) x, z3Context.mkInt(minInt)); // x :INT BoolExpr b = z3Context.mkSetMembership(x, (ArrayExpr) integer); // x <= max_int BoolExpr c = z3Context.mkLe((ArithExpr) x, z3Context.mkInt(maxInt)); // a <=> b <=> c BoolExpr body = z3Context.mkEq(z3Context.mkAnd(a, c), b); Quantifier q = z3Context.mkForall(bound, body, 1, null, null, null, null); constraintList.add(q); return integer; } case MAXINT: { int maxInt = PersonalPreferences.getIntPreference(PersonalPreferences.IntPreference.MAX_INT); return z3Context.mkInt(maxInt); } case MININT: { int minInt = PersonalPreferences.getIntPreference(PersonalPreferences.IntPreference.MIN_INT); return z3Context.mkInt(minInt); } case NAT: { Type type = node.getType();// POW(INTEGER) int maxInt = PersonalPreferences.getIntPreference(PersonalPreferences.IntPreference.MAX_INT); // !x.((x >= 0 & x <= MAX_INT) <=> x : NAT) Expr x = z3Context.mkConst("x", z3Context.getIntSort()); Expr nat = z3Context.mkConst(ExpressionOperator.NAT.toString(), bTypeToZ3Sort(type)); Expr[] bound = new Expr[]{x}; // x >= 0 BoolExpr a = z3Context.mkGe((ArithExpr) x, z3Context.mkInt(0)); // x : NAT BoolExpr b = z3Context.mkSetMembership(x, (ArrayExpr) nat); // x <= max_int BoolExpr c = z3Context.mkLe((ArithExpr) x, z3Context.mkInt(maxInt)); // a <=> b <=> c BoolExpr body = z3Context.mkEq(z3Context.mkAnd(a, c), b); Quantifier q = z3Context.mkForall(bound, body, 1, null, null, null, null); constraintList.add(q); return nat; } default: break; } throw new AssertionError("Not implemented: " + node.getOperator()); } private FuncDecl initPowerOf() { // create function declaration FuncDecl powerOf = z3Context.mkFuncDecl(ExpressionOperator.POWER_OF.toString(), new Sort[]{z3Context.mkIntSort(), z3Context.mkIntSort()}, z3Context.mkIntSort()); // create arguments & bounds Expr a = z3Context.mkConst("a", z3Context.getIntSort()); Expr b = z3Context.mkConst("b", z3Context.getIntSort()); Expr[] bound = new Expr[]{a, b}; // pow( a, b / 2 ) * pow( a, b / 2 ) Expr expEven = z3Context.mkMul((ArithExpr) powerOf.apply(a, z3Context.mkDiv((ArithExpr) b, z3Context.mkInt(2))), (ArithExpr) powerOf.apply(a, z3Context.mkDiv((ArithExpr) b, z3Context.mkInt(2)))); // a * pow( a, b - 1 ) Expr expOdd = z3Context.mkMul((ArithExpr) a, (ArithExpr) powerOf.apply(a, z3Context.mkSub((ArithExpr) b, z3Context.mkInt(1)))); // b % 2 == 0 ? expEven : expOdd Expr expEvenOdd = z3Context.mkITE(z3Context.mkEq(z3Context.mkInt(0), z3Context.mkMod((IntExpr) b, z3Context.mkInt(2))), expEven, expOdd); // b == 0 ? 1 : expEvenOdd Expr expZero = z3Context.mkITE(z3Context.mkEq(z3Context.mkInt(0), b), z3Context.mkInt(1), expEvenOdd); // pow( a, b ) = expZero Expr body = z3Context.mkEq(powerOf.apply(a, b), expZero); // prepare pattern Pattern[] patterns = new Pattern[]{z3Context.mkPattern(powerOf.apply(a, b))}; // annotate recursive function Symbol recFun = z3Context.mkSymbol(":rec-fun"); BoolExpr powConstraint = z3Context.mkForall(bound, body, bound.length, patterns, null, recFun, null); constraintList.add(powConstraint); return powerOf; } @Override public Expr visitNumberNode(NumberNode node, TranslationOptions ops) { return z3Context.mkInt(node.getValue()); } @Override public Expr visitPredicateOperatorNode(PredicateOperatorNode node, TranslationOptions ops) { List<PredicateNode> predicateArguments = node.getPredicateArguments(); switch (node.getOperator()) { case AND: { BoolExpr[] list = new BoolExpr[predicateArguments.size()]; for (int i = 0; i < list.length; i++) { list[i] = (BoolExpr) visitPredicateNode(predicateArguments.get(i), ops); } return z3Context.mkAnd(list); } case OR: { BoolExpr[] list = new BoolExpr[predicateArguments.size()]; for (int i = 0; i < list.length; i++) { list[i] = (BoolExpr) visitPredicateNode(predicateArguments.get(i), ops); } return z3Context.mkOr(list); } case IMPLIES: { BoolExpr left = (BoolExpr) visitPredicateNode(predicateArguments.get(0), ops); BoolExpr right = (BoolExpr) visitPredicateNode(predicateArguments.get(1), ops); return z3Context.mkImplies(left, right); } case EQUIVALENCE: { BoolExpr left = (BoolExpr) visitPredicateNode(predicateArguments.get(0), ops); BoolExpr right = (BoolExpr) visitPredicateNode(predicateArguments.get(1), ops); return z3Context.mkEq(left, right); } case NOT: { BoolExpr child = (BoolExpr) visitPredicateNode(predicateArguments.get(0), ops); return z3Context.mkNot(child); } case TRUE: return z3Context.mkTrue(); case FALSE: return z3Context.mkFalse(); default: break; } // TODO throw new AssertionError("Not implemented: " + node.getOperator()); } @Override public Expr visitSelectSubstitutionNode(SelectSubstitutionNode node, TranslationOptions opt) { throw new AssertionError("Not reachable"); } @Override public Expr visitSingleAssignSubstitution(SingleAssignSubstitutionNode node, TranslationOptions opt) { throw new AssertionError("Not reachable"); } @Override public Expr visitParallelSubstitutionNode(ParallelSubstitutionNode node, TranslationOptions opt) { throw new AssertionError("Not reachable"); } @Override public Expr visitQuantifiedExpressionNode(QuantifiedExpressionNode node, TranslationOptions opt) { switch (node.getOperator()) { case SET_COMPREHENSION: { // {e| P} // return T // !(e).(e : T <=> P ) Expr P = visitPredicateNode(node.getPredicateNode(), opt); Expr T = z3Context.mkConst(createFreshTemporaryVariable(), bTypeToZ3Sort(node.getType())); Expr[] array = new Expr[node.getDeclarationList().size()]; for (int i = 0; i < array.length; i++) { DeclarationNode decl = node.getDeclarationList().get(i); Expr e = z3Context.mkConst(decl.getName(), bTypeToZ3Sort(decl.getType())); array[i] = e; } Expr tuple = null; if (array.length > 1) { TupleSort tupleSort = (TupleSort) bTypeToZ3Sort(((SetType) node.getType()).getSubtype()); tuple = tupleSort.mkDecl().apply(array); } else { tuple = array[0]; } Expr[] bound = array; BoolExpr a = z3Context.mkSetMembership(tuple, (ArrayExpr) T); // a <=> P BoolExpr body = z3Context.mkEq(a, P); Quantifier q = z3Context.mkForall(bound, body, array.length, null, null, null, null); constraintList.add(q); return T; } case QUANTIFIED_INTER: case QUANTIFIED_UNION: break; default: break; } throw new AssertionError("Implement: " + node.getClass()); } @Override public Expr visitQuantifiedPredicateNode(QuantifiedPredicateNode node, TranslationOptions opt) { switch (node.getOperator()) { case EXISTENTIAL_QUANTIFICATION: { Expr[] identifiers = new Expr[node.getDeclarationList().size()]; for (int i = 0; i < node.getDeclarationList().size(); i++) { DeclarationNode declNode = node.getDeclarationList().get(i); identifiers[i] = z3Context.mkConst(declNode.getName(), bTypeToZ3Sort(declNode.getType())); } Expr predicate = visitPredicateNode(node.getPredicateNode(), opt); Quantifier q = z3Context.mkExists(identifiers, predicate, identifiers.length, null, null, null, null); return q; } case UNIVERSAL_QUANTIFICATION: Expr[] identifiers = new Expr[node.getDeclarationList().size()]; for (int i = 0; i < node.getDeclarationList().size(); i++) { DeclarationNode declNode = node.getDeclarationList().get(i); identifiers[i] = z3Context.mkConst(declNode.getName(), bTypeToZ3Sort(declNode.getType())); } Expr predicate = visitPredicateNode(node.getPredicateNode(), opt); return z3Context.mkForall(identifiers, predicate, identifiers.length, null, null, null, null); default: throw new AssertionError("Implement: " + node.getClass()); } } @Override public Expr visitAnySubstitution(AnySubstitutionNode node, TranslationOptions opt) { throw new AssertionError("Not reachable"); } } }
package ru.r2cloud.jradio.lrpt; import java.util.ArrayList; import java.util.List; public class Vcdu { public static final int SIZE = 892; public static final int VITERBI_SIZE = (SIZE / 4 + 32) * 4; public static final int VITERBI_TAIL_SIZE = (VITERBI_SIZE + 1) * 16; private static final int VCDU_HEADER_LENGTH = 10; private static final int PRIMARY_HEADER_LENGTH = 6; private static final int SECONDARY_HEADER_LENGTH = 8; private int version; private VcduId id; private int counter; private byte signalling; private InSdu insertZone; private Mpdu mPdu; private List<Packet> packets = new ArrayList<>(); private Packet partial; private byte[] data; public void readExternal(Vcdu previous, byte[] data) { this.data = data; version = (data[0] & 0xFF) >> 6; id = new VcduId(); id.setSpacecraftId(data[0] & 0b0011_1111 + (byte) (data[1] >> 6)); id.setVirtualChannelId(data[1] & 0b0011_1111); counter = (data[2] & 0xFF) << 16 | (data[3] & 0xFF) << 8 | (data[4] & 0xFF); signalling = data[5]; insertZone = new InSdu(); insertZone.setEncryption(data[6] == (byte) 0xFF); insertZone.setKeyNumber(data[7]); mPdu = new Mpdu(); mPdu.setSpareBits((byte) (data[8] >> 3)); mPdu.setHeaderFirstPointer(((data[8] & 0b0000_0111) << 8) | (data[9] & 0xFF)); Packet previousPartial = null; if (previous != null && previous.counter + 1 == counter) { previousPartial = previous.partial; } if (mPdu.getHeaderFirstPointer() != 0b111_1111_1111) { if (mPdu.getHeaderFirstPointer() != 0 && previousPartial != null) { byte[] newUserData = new byte[previousPartial.getUserData().length + mPdu.getHeaderFirstPointer()]; System.arraycopy(previousPartial.getUserData(), 0, newUserData, 0, previousPartial.getUserData().length); System.arraycopy(data, VCDU_HEADER_LENGTH, newUserData, previousPartial.getUserData().length, mPdu.getHeaderFirstPointer()); int userDataIndex = 0; // primary header was not read if (previousPartial.getVersion() == -1) { readPrimaryHeader(newUserData, 0, previousPartial); userDataIndex += PRIMARY_HEADER_LENGTH; } // primary header was read, but secondary header was not. if (previousPartial.isSecondaryHeader() && previousPartial.getNumberOfDays() == -1) { readSecondaryHeader(newUserData, userDataIndex, previousPartial); userDataIndex += SECONDARY_HEADER_LENGTH; } if (userDataIndex != 0) { byte[] userDataWithoutHeaders = new byte[newUserData.length - userDataIndex]; System.arraycopy(newUserData, userDataIndex, userDataWithoutHeaders, 0, userDataWithoutHeaders.length); previousPartial.setUserData(userDataWithoutHeaders); } else { previousPartial.setUserData(newUserData); } // sometimes user data cannot be recovered even if VCDU is next int expectedLength = previousPartial.getLength() + 1; if (previousPartial.isSecondaryHeader()) { expectedLength -= SECONDARY_HEADER_LENGTH; } if (previousPartial.getUserData().length == expectedLength) { packets.add(previousPartial); } } int index = VCDU_HEADER_LENGTH + mPdu.getHeaderFirstPointer(); // 6 is for minimum header size while (data.length >= index + PRIMARY_HEADER_LENGTH) { Packet packet = new Packet(); readPrimaryHeader(data, index, packet); index += PRIMARY_HEADER_LENGTH; // corrupted packet. discard the rest of vcdu if (packet.getLength() == 0) { index = data.length; continue; } // for +1 see the length field description int userDataLength = packet.getLength() + 1; if (packet.isSecondaryHeader() && data.length >= index + SECONDARY_HEADER_LENGTH) { readSecondaryHeader(data, index, packet); userDataLength -= SECONDARY_HEADER_LENGTH; index += SECONDARY_HEADER_LENGTH; } byte[] userData; if (index + userDataLength > data.length) { userData = new byte[data.length - index]; System.arraycopy(data, index, userData, 0, userData.length); packet.setUserData(userData); partial = packet; } else { userData = new byte[userDataLength]; System.arraycopy(data, index, userData, 0, userData.length); packet.setUserData(userData); packets.add(packet); } index += userData.length; } // primary header doesn't fit // check for partial packet is a safecheck only. shouldnt happen if (data.length - index > 0 && partial == null) { byte[] userData = new byte[data.length - index]; System.arraycopy(data, index, userData, 0, userData.length); partial = new Packet(); partial.setUserData(userData); } } else { if (previousPartial != null) { byte[] newUserData = new byte[previousPartial.getUserData().length + data.length - 10]; System.arraycopy(previousPartial.getUserData(), 0, newUserData, 0, previousPartial.getUserData().length); System.arraycopy(data, VCDU_HEADER_LENGTH, newUserData, previousPartial.getUserData().length, data.length - 10); previousPartial.setUserData(newUserData); this.partial = previousPartial; } } } private static void readPrimaryHeader(byte[] data, int index, Packet packet) { // 000 (CCSDS packet Version number 1) packet.setVersion((byte) ((data[index] & 0xFF) >> 5)); // This bit shall be always set to 1 to indicate the presence of a secondary header. packet.setSecondaryHeader(((data[index] & 0xFF) & (1 << 3)) > 0); // This field defines the data route between two users application endpoints packet.setApid(((data[index] & 0b0000_0111) << 8) | (data[index + 1]) & 0xFF); // This flag is set to 11 indicating that the packet contains unsegmented User data. packet.setSequence((byte) ((data[index + 2] & 0xFF) >> 6)); // This field is a modulo 16384 counter, which numbers the packets packet.setSequenceCount(((data[index + 2] & 0b0011_1111) << 8) | (data[index + 3] & 0xFF)); // This field contains a sequential binary count "C" that expresses the length of the Secondary Header and the User Data. The value of "C" is the length (in octets) minus 1. packet.setLength((data[index + 4] & 0xFF) << 8 | (data[index + 5] & 0xFF)); } private static void readSecondaryHeader(byte[] data, int index, Packet packet) { packet.setNumberOfDays((data[index] & 0xFF) << 8 | (data[index + 1] & 0xFF)); packet.setMillisecondOfDay(((long) data[index + 2] & 0xFF) << 24 | (data[index + 3] & 0xFF) << 16 | (data[index + 4] & 0xFF) << 8 | (data[index + 5] & 0xFF)); packet.setMicrosecondOfMillisecond((data[index + 6] & 0xFF) << 8 | (data[index + 7] & 0xFF)); } public byte[] getData() { return data; } public int getVersion() { return version; } public VcduId getId() { return id; } public int getCounter() { return counter; } public byte getSignalling() { return signalling; } public InSdu getInsertZone() { return insertZone; } public Mpdu getmPdu() { return mPdu; } public List<Packet> getPackets() { return packets; } public Packet getPartial() { return partial; } }
package com.sharparam.sharpmemory.models; import com.sharparam.sharpmemory.Difficulty; import com.sharparam.sharpmemory.SharpMemory; import com.sharparam.sharpmemory.State; import com.sharparam.sharpmemory.events.FieldEventListener; import com.sharparam.sharpmemory.events.FieldEventType; import com.sharparam.sharpmemory.helpers.BrickHelper; import com.sharparam.sharpmemory.helpers.RandomHelper; import javafx.scene.image.Image; import java.util.ArrayList; import java.util.HashMap; import java.util.Timer; import java.util.TimerTask; public class FieldModel { /** * Delay until a pair of flipped bricks is checked. */ private static final int TRY_DELAY = 1000; /** * Difficulty modifiers control the number of bricks placed on the field. */ private static final HashMap<Difficulty, Integer> DIFF_MODIFIERS = new HashMap<Difficulty, Integer>(3) { { put(Difficulty.EASY, 1); put(Difficulty.MEDIUM, 2); put(Difficulty.HARD, 3); } }; /** * Registered event listeners. */ private final ArrayList<FieldEventListener> fieldEventListeners = new ArrayList<FieldEventListener>(); /** * Timer for delaying the match check. */ private Timer tryTimer; /** * The bricks that are on this field. */ private BrickModel[] bricks; /** * True if the user has just flipped two bricks and the system is waiting for the * try delay to expire before checking the bricks, false otherwise. */ private boolean tryInProgress = false; /** * Initializes a new instance of this FieldModel with the given array of bricks. * @param bricks Array of bricks to initialize with. */ public FieldModel(BrickModel[] bricks) { this.bricks = bricks; } /** * Initializes a new instance of this FieldModel with the given list of bricks. * @param bricks List of bricks to initialize with. */ public FieldModel(ArrayList<BrickModel> bricks) { this.bricks = (BrickModel[]) bricks.toArray(); } /** * Initializes a new instance of this FieldModel with the default amount of bricks, * generated by the randomize methods. */ public FieldModel() { this(5); } /** * Initializes a new instance of this FieldModel with the specified amount of bricks, * generated by the randomize methods. * @param brickCount Number of bricks to initialize. */ public FieldModel(int brickCount) { // brickCount is the number of UNIQUE bricks // this has to be doubled because each one has a dupe Difficulty diff = SharpMemory.getInstance().getDifficulty(); bricks = new BrickModel[brickCount * 2 * DIFF_MODIFIERS.get(diff)]; randomizeBricks(diff); } /** * Returns whether a try is currently in progress by the user. * @return True if a try is in progress, false otherwise. */ public boolean isTryInProgress() { return tryInProgress; } /** * Adds an event listener to this FieldModel. * @param listener Instance of a listener interface. */ public void addEventListener(FieldEventListener listener) { fieldEventListeners.add(listener); } /** * Sends the specified event to all event listeners. * @param type Type of event to send. */ private void sendEvent(FieldEventType type) { for (FieldEventListener listener : fieldEventListeners) listener.handle(type); } /** * Gets whether this FieldModel contains a brick at the specified index. * @param index Index to check. * @return True if a brick exists at the index, false otherwise. */ public boolean hasBrick(int index) { return index >= 0 && index < bricks.length; } /** * Gets the brick at the specified index. * @param index Index to get. * @return The BrickModel at the specified index. */ public BrickModel getBrick(int index) throws ArrayIndexOutOfBoundsException { return bricks[index]; } /** * Gets the number of bricks in this FieldModel. * @return Number of bricks. */ public int getBrickCount() { return bricks.length; } /** * Gets the number of bricks that are currently in play. * That is, not cleared. * @return Number of active bricks. */ public int getActiveBrickCount() { int count = 0; for (BrickModel brick : bricks) if (!brick.isCleared()) count++; return count; } /** * Gets the state of the brick at the specified index. * @param index Index to check. * @return State of the brick. */ public State getBrickState(int index) { return hasBrick(index) ? getBrick(index).getState() : State.INVALID; } /** * Checks if two bricks match each other. * @param a Brick A. * @param b Brick B. * @return True if they match, false otherwise. * @deprecated Use the equals method on BrickModel. */ @Deprecated public boolean isMatch(BrickModel a, BrickModel b) { return a.getImage() == b.getImage(); } /** * This removes the specified BrickModel and it's dupe version. * @param brick The BrickModel object to remove. */ public void clearBrick(BrickModel brick) { for (BrickModel b : bricks) if (b.equals(brick)) b.clear(); } /** * Clears the pair of bricks if they are a match. * @param a Brick A. * @param b Brick B. */ public void clearIfMatch(BrickModel a, BrickModel b) { sendEvent(FieldEventType.TRY); if (!isMatch(a, b)) { sendEvent(FieldEventType.FAIL); return; } clearBrick(a); clearBrick(b); // Just to be on the safe side, but this is probably redundant. sendEvent(FieldEventType.CLEAR); } /** * Gets the number of bricks that are currently faced up. * @return Number of face-up bricks. */ public int getFacedUpCount() { int count = 0; for (BrickModel brick : bricks) if (brick.getState() == State.FACE_UP) count++; return count; } /** * Gets the number of bricks that have been cleared. * @return Number of cleared bricks. */ public int getClearedCount() { int count = 0; for (BrickModel brick : bricks) if (brick.isCleared()) count++; return count; } /** * Flips a brick, making it face up. * @param brick The brick to flip. */ public void flipBrick(BrickModel brick) { if (brick.isCleared() || brick.getState() == State.FACE_UP || brick.getState() == State.INVALID) return; brick.flip(); checkBricks(); } /** * Flips the brick at the specified index, if it exists. * @param index Brick index to flip. */ public void flipBrick(int index) { if (hasBrick(index)) flipBrick(getBrick(index)); } /** * Runs a check on all bricks and applies necessary operations. * Makes sure that brick matching is run when == 2 bricks have been flipped. * Resets brick states if > 2 bricks are flipped or after a brick match run. */ public void checkBricks() { if (getFacedUpCount() < 2) return; ArrayList<BrickModel> facedUp = new ArrayList<BrickModel>(); for (BrickModel brick : bricks) if (brick.getState() == State.FACE_UP) facedUp.add(brick); if (facedUp.size() > 2) { resetBrickStates(); return; } if (tryInProgress) return; tryInProgress = true; final BrickModel a = facedUp.get(0); final BrickModel b = facedUp.get(1); // Workaround to make application close properly if (tryTimer == null) tryTimer = new Timer(); // We want to run the match check and reset only after a set amount of time // this way the user will be able to see the bricks before they are cleared or reset. tryTimer.schedule(new TimerTask() { @Override public void run() { clearIfMatch(a, b); resetBrickStates(); if (getClearedCount() == getBrickCount()) sendEvent(FieldEventType.ALL_BRICKS_CLEARED); tryTimer.cancel(); tryTimer = null; } }, TRY_DELAY); } /** * Resets all bricks to their default state. */ public void resetBrickStates() { for (BrickModel brick : bricks) if (!brick.isCleared()) brick.faceDown(); tryInProgress = false; } /** * Calls randomizeBricks with EASY as argument. * @see #randomizeBricks(com.sharparam.sharpmemory.Difficulty) */ private void randomizeBricks() { randomizeBricks(Difficulty.EASY); } /** * Places out random bricks on the board based on predefined lists. * @param diff Difficulty to use. */ private void randomizeBricks(Difficulty diff) { String diffString = diff.toString().toLowerCase(); randomizeBricks(diffString); } /** * Randomizes bricks based on difficulty. * @param diff Name of difficulty, corresponds to directories in resources/images */ private void randomizeBricks(String diff) { Image[] images = new Image[getBrickCount()]; int halfCount = getBrickCount() / 2; for (int i = 0; i < halfCount; i++) { Image image = BrickHelper.getImage("/images/" + diff + "/" + i + ".png"); images[i] = image; images[i + halfCount] = image; } randomizeImages(images); for (int i = 0; i < bricks.length; i++) bricks[i] = new BrickModel(images[i]); } /** * Randomizes the given image array, shuffling the elements. * @param images Image array to randomize. */ private void randomizeImages(Image[] images) { int index; Image temp; for (int i = images.length - 1; i > 0; i { index = RandomHelper.RNG.nextInt(i + 1); temp = images[index]; images[index] = images[i]; images[i] = temp; } } }
package seedu.address.ui; import javafx.scene.Node; import javafx.scene.Scene; import javafx.scene.layout.AnchorPane; import javafx.scene.web.WebView; import javafx.stage.Stage; import seedu.address.commons.util.FxViewUtil; import seedu.address.commons.core.LogsCenter; import java.util.logging.Logger; /** * Controller for a help page */ public class HelpWindow extends UiPart { private static final Logger logger = LogsCenter.getLogger(HelpWindow.class); private static final String ICON = "/images/help_icon.png"; private static final String FXML = "HelpWindow.fxml"; private static final String TITLE = "Help"; private static final String USERGUIDE_URL = "https://github.com/CS2103AUG2016-T15-C2/main/blob/master/docs/UserGuide.md#command-summary"; private AnchorPane mainPane; private Stage dialogStage; public static HelpWindow load(Stage primaryStage) { logger.fine("Showing help page about the application."); HelpWindow helpWindow = UiPartLoader.loadUiPart(primaryStage, new HelpWindow()); helpWindow.configure(); return helpWindow; } @Override public void setNode(Node node) { mainPane = (AnchorPane) node; } @Override public String getFxmlPath() { return FXML; } private void configure(){ Scene scene = new Scene(mainPane); //Null passed as the parent stage to make it non-modal. dialogStage = createDialogStage(TITLE, null, scene); dialogStage.setMaximized(true); //TODO: set a more appropriate initial size setIcon(dialogStage, ICON); WebView browser = new WebView(); browser.getEngine().load(USERGUIDE_URL); FxViewUtil.applyAnchorBoundaryParameters(browser, 0.0, 0.0, 0.0, 0.0); mainPane.getChildren().add(browser); } public void show() { dialogStage.showAndWait(); } }
package com.skelril.nitro.entity; import org.spongepowered.api.block.BlockTypes; import org.spongepowered.api.data.key.Keys; import org.spongepowered.api.entity.Entity; import org.spongepowered.api.world.Location; import org.spongepowered.api.world.World; import java.util.Optional; public class SafeTeleportHelper { public static Optional<Location<World>> getSafeDest(Location<World> dest) { Location<World> startingDest = dest; while (dest.getY() > 0 && dest.getBlockType() == BlockTypes.AIR) { dest = dest.add(0, -1, 0); } dest.add(0, 1, 0); // Move one back up to account for air // If its not air, restart at the starting destination, we failed if (dest.getBlockType() != BlockTypes.AIR) { dest = startingDest; // Move up until we find air or run out of world space while (dest.getBlockType() != BlockTypes.AIR) { // There is no free area in this column, abort if (dest.getY() == dest.getExtent().getBlockMax().getY()) { return Optional.empty(); } dest = dest.add(0, 1, 0); } } return Optional.of(dest); } public static Optional<Location<World>> getSafeDest(Entity entity, Location<World> dest) { Optional<Boolean> optIsFlying = entity.get(Keys.IS_FLYING); if (!optIsFlying.isPresent() || !optIsFlying.get()) { dest = getSafeDest(dest).orElse(null); } return Optional.ofNullable(dest); } public static Optional<Location<World>> teleport(Entity entity, Location<World> dest) { Optional<Location<World>> optDest = getSafeDest(entity, dest); if (optDest.isPresent()) { entity.setLocation(optDest.get()); } return optDest; } }
package soaba.core.config; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.nio.file.Files; import java.util.ArrayList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import soaba.core.api.IDatapoint; import soaba.core.api.IDatapoint.ACCESSTYPE; import soaba.core.api.IDatapoint.DATATYPE; import soaba.core.api.IGatewayDriver; import soaba.core.gateways.drivers.KNXGatewayDriver; import soaba.core.models.Datapoint; import flexjson.JSON; import flexjson.JSONDeserializer; import flexjson.JSONSerializer; public class AppConfig { private static final String GATEWAY_NUCLEUS_14 = "172.20.70.241"; private static final String GATEWAY_LAB_158 = "172.20.70.209"; private static final String APP_CONFIG_FILE = "resources/soaba.config"; private static AppConfig instance; private static final Logger logger = LoggerFactory.getLogger(AppConfig.class); @JSON(include = true) private List<IGatewayDriver> gateways = new ArrayList<IGatewayDriver>(); @JSON(include = true) private List<IDatapoint> datapoints = new ArrayList<IDatapoint>(); private AppConfig() { /* singleton class */ } public static AppConfig getInstance() { if (instance != null) return instance; return instance = new AppConfig().init(); } public AppConfig init() { logger.info("calling AppConfig#init()"); String gwNucleus14 = null; String gwLab158 = null; /** * Gateways Registration */ gateways.add(new KNXGatewayDriver("KNX Gateway Lab 1.58", gwLab158 = GATEWAY_LAB_158)); gateways.add(new KNXGatewayDriver("KNX Gateway Nucleus 14", gwNucleus14 = GATEWAY_NUCLEUS_14)); /** * Datapoints Registration */ String prefix = null; /** MIT - LAB 1.58 **/ if (gwLab158 != null) { // lights prefix = "EnergyLab "; datapoints.add(new Datapoint(gwLab158, prefix + "All Lights", ACCESSTYPE.WRITE_ONLY, DATATYPE.PERCENTAGE, null, "0/1/8")); datapoints.add(new Datapoint(gwLab158, prefix + "Light Blackboard", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/7/1", "0/1/0")); datapoints.add(new Datapoint(gwLab158, prefix + "Light Middle1", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/7/21", "0/1/2")); datapoints.add(new Datapoint(gwLab158, prefix + "Light Middle2", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/7/41", "0/1/4")); datapoints.add(new Datapoint(gwLab158, prefix + "Light TV", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/7/61", "0/1/6")); // blinds datapoints.add(new Datapoint(gwLab158, prefix + "All Blinds", ACCESSTYPE.WRITE_ONLY, DATATYPE.BIT, null, "0/2/12")); datapoints.add(new Datapoint(gwLab158, prefix + "Blind1", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/2/0", "0/2/3")); datapoints.add(new Datapoint(gwLab158, prefix + "Blind2", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/2/13", "0/2/6")); datapoints.add(new Datapoint(gwLab158, prefix + "Blind3", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/2/14", "0/2/9")); // door datapoints.add(new Datapoint(gwLab158, prefix + "Door", ACCESSTYPE.WRITE_ONLY, DATATYPE.BIT, null, "0/3/0")); // meteo station sensors datapoints.add(new Datapoint(gwLab158, prefix + "CO2", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/4/0", null)); datapoints.add(new Datapoint(gwLab158, prefix + "Humidity", ACCESSTYPE.READ_ONLY, DATATYPE.PERCENTAGE, "0/4/1", null)); datapoints.add(new Datapoint(gwLab158, prefix + "Temperature", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/4/3", null)); datapoints.add(new Datapoint(gwLab158, prefix + "Temperature Door", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/4/5", null)); datapoints.add(new Datapoint(gwLab158, prefix + "Lux", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/4/4", null)); // hvac datapoints.add(new Datapoint(gwLab158, prefix + "HVAC ONOFF", ACCESSTYPE.READ_WRITE, DATATYPE.BIT, "1/0/8", "1/0/0")); datapoints.add(new Datapoint(gwLab158, prefix + "HVAC Mode", ACCESSTYPE.READ_WRITE, DATATYPE.BIT, "1/0/9", "1/0/1")); // meteo station (bus Q.E. floor 1) prefix = "Meteo Station BUS[Q.E] Floor1 - "; datapoints.add(new Datapoint(gwLab158, prefix + "Luminosity - East Sensor", "Campus East Luminosity", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/6/5", null, "W")); datapoints.add(new Datapoint(gwLab158, prefix + "Luminosity - South Sensor", "Campus South Luminosity", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/6/6", null, "W")); datapoints.add(new Datapoint(gwLab158, prefix + "Luminosity - West Sensor", "Campus West Luminosity", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/6/7", null, "W")); datapoints.add(new Datapoint(gwLab158, prefix + "Luminosity - Crepuscular Sensor", "Crepuscular Luminosity", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/6/8", null, "W")); datapoints.add(new Datapoint(gwLab158, prefix + "Wind Speed Warn Interval", ACCESSTYPE.READ_WRITE, DATATYPE.TINY_NUMBER, "0/6/9", "0/6/9")); datapoints.add(new Datapoint(gwLab158, prefix + "Wind Speed Sensor", "Wind Speed", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/6/10", null, "Km/h")); datapoints.add(new Datapoint(gwLab158, prefix + "Outside Temp. Sensor", "Outside Temperature", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/6/11", null, "Cº")); datapoints.add(new Datapoint(gwLab158, prefix + "Rain Sensor", ACCESSTYPE.READ_ONLY, DATATYPE.BIT, "0/6/13", null)); datapoints.add(new Datapoint(gwLab158, prefix + "Outside Temp. Sensor Precision", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/6/16", null)); datapoints.add(new Datapoint(gwLab158, prefix + "Max. Temp Reached Precision", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/6/19", null)); datapoints.add(new Datapoint(gwLab158, prefix + "Min. Temp Reached Precision", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/6/20", null)); datapoints.add(new Datapoint(gwLab158, prefix + "Relative Hum. Sensor Precision", "Relative Humidity", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/6/22", null, "%")); datapoints.add(new Datapoint(gwLab158, prefix + "Dew Point", "Dew Point", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/6/25", null, "Cº")); datapoints.add(new Datapoint(gwLab158, prefix + "Absolute Humidity", "Absolute Humidity", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/6/27", null, "Kg/m^3")); datapoints.add(new Datapoint(gwLab158, prefix + "Exterior Entalpia", "Exterior Entalpia", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/6/28", null, "J/Kg")); datapoints.add(new Datapoint(gwLab158, prefix + "Global Solar Radiation Sensor", "Global Solar Radiation", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/6/29", null, "W/m^2")); } /** MIT - NUCLEUS 14 **/ if (gwNucleus14 != null) { // lights datapoints.add(new Datapoint(gwNucleus14, "2-N14 - All Lights", ACCESSTYPE.WRITE_ONLY, DATATYPE.BIT, null, "0/0/1")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.02 - Lights", ACCESSTYPE.READ_WRITE, DATATYPE.BIT, "0/0/2", "0/0/2")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.04 - Lights", ACCESSTYPE.READ_WRITE, DATATYPE.BIT, "0/0/3", "0/0/3")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.06 - Lights", ACCESSTYPE.READ_WRITE, DATATYPE.BIT, "0/0/4", "0/0/4")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.08 - Lights", ACCESSTYPE.READ_WRITE, DATATYPE.BIT, "0/0/5", "0/0/5")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.10 - Lights", ACCESSTYPE.READ_WRITE, DATATYPE.BIT, "0/0/6", "0/0/6")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.12 - Lights", ACCESSTYPE.READ_WRITE, DATATYPE.BIT, "0/0/7", "0/0/7")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.14 - Lights", ACCESSTYPE.READ_WRITE, DATATYPE.BIT, "0/0/8", "0/0/8")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.16 - Lights", ACCESSTYPE.READ_WRITE, DATATYPE.BIT, "0/0/9", "0/0/9")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.18 - Lights", ACCESSTYPE.READ_WRITE, DATATYPE.BIT, "0/0/10", "0/0/10")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.20 - Lights", ACCESSTYPE.READ_WRITE, DATATYPE.BIT, "0/0/11", "0/0/11")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.22 - Lights", ACCESSTYPE.READ_WRITE, DATATYPE.BIT, "0/0/12", "0/0/12")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.24 - Lights", ACCESSTYPE.READ_WRITE, DATATYPE.BIT, "0/0/13", "0/0/13")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.26 - Lights", ACCESSTYPE.READ_WRITE, DATATYPE.BIT, "0/0/14", "0/0/14")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.28 - Lights", ACCESSTYPE.READ_WRITE, DATATYPE.BIT, "0/0/15", "0/0/15")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.1.1E/2E - Lights Circuit", ACCESSTYPE.WRITE_ONLY, DATATYPE.BIT, null, "0/0/16")); // energy and general purpose sensors prefix = "2-N14 - "; datapoints.add(new Datapoint(gwNucleus14, prefix + "Energy Meter - Circ. A - Hall Lights", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/2/0", null)); datapoints.add(new Datapoint(gwNucleus14, prefix + "Energy Meter - Circ. B - Hall Lights", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/2/1", null)); datapoints.add(new Datapoint(gwNucleus14, prefix + "Energy Meter - Circ. C - HVAC Supply", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/2/2", null)); datapoints.add(new Datapoint(gwNucleus14, prefix + "Energy Meter - Circ. D - HVAC Supply", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/2/3", null)); datapoints.add(new Datapoint(gwNucleus14, prefix + "Energy Time Counter - Circ. A - Hall Lights", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/2/4", null)); datapoints.add(new Datapoint(gwNucleus14, prefix + "Energy Time Counter - Circ. B - Hall Lights", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/2/5", null)); datapoints.add(new Datapoint(gwNucleus14, prefix + "Energy Time Counter - Circ. C - HVAC Supply", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/2/6", null)); datapoints.add(new Datapoint(gwNucleus14, prefix + "Energy Time Counter - Circ. D - HVAC Supply", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/2/7", null)); datapoints.add(new Datapoint(gwNucleus14, prefix + "Status - Circ. A - Hall Lights", ACCESSTYPE.READ_ONLY, DATATYPE.BIT, "0/2/12", null)); datapoints.add(new Datapoint(gwNucleus14, prefix + "Status - Circ. B - Hall Lights", ACCESSTYPE.READ_ONLY, DATATYPE.BIT, "0/2/13", null)); datapoints.add(new Datapoint(gwNucleus14, prefix + "Status - Circ. C - HVAC Supply", ACCESSTYPE.READ_ONLY, DATATYPE.BIT, "0/2/14", null)); datapoints.add(new Datapoint(gwNucleus14, prefix + "Status - Circ. D - HVAC Supply", ACCESSTYPE.READ_WRITE, DATATYPE.BIT, "0/2/15", "0/2/15")); datapoints.add(new Datapoint(gwNucleus14, prefix + "Luminosity - Hall - North Sensor", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/2/16", null)); datapoints.add(new Datapoint(gwNucleus14, prefix + "Luminosity - Hall - Middle Sensor", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/2/17", null)); datapoints.add(new Datapoint(gwNucleus14, prefix + "Luminosity - Hall - South Sensor", ACCESSTYPE.READ_ONLY, DATATYPE.TINY_NUMBER, "0/2/18", null)); // hvac hot H2O valves datapoints.add(new Datapoint(gwNucleus14, "2-N14.02 - HVAC - Hot H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/14", "0/1/14")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.04 - HVAC - Hot H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/15", "0/1/15")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.06 - HVAC - Hot H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/16", "0/1/16")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.08 - HVAC - Hot H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/17", "0/1/17")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.10 - HVAC - Hot H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/18", "0/1/18")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.12 - HVAC - Hot H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/19", "0/1/19")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.14 - HVAC - Hot H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/20", "0/1/20")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.16 - HVAC - Hot H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/21", "0/1/21")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.18 - HVAC - Hot H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/22", "0/1/22")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.20 - HVAC - Hot H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/23", "0/1/23")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.24 - HVAC - Hot H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/24", "0/1/24")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.26 - HVAC - Hot H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/25", "0/1/25")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.28 - HVAC - Hot H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/26", "0/1/26")); // hvac cold H2O valves datapoints.add(new Datapoint(gwNucleus14, "2-N14.02 - HVAC - Cold H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/27", "0/1/27")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.04 - HVAC - Cold H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/28", "0/1/28")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.06 - HVAC - Cold H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/29", "0/1/29")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.08 - HVAC - Cold H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/30", "0/1/30")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.10 - HVAC - Cold H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/31", "0/1/31")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.12 - HVAC - Cold H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/32", "0/1/32")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.14 - HVAC - Cold H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/33", "0/1/33")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.16 - HVAC - Cold H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/34", "0/1/34")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.18 - HVAC - Cold H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/35", "0/1/35")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.20 - HVAC - Cold H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/36", "0/1/36")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.24 - HVAC - Cold H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/37", "0/1/37")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.26 - HVAC - Cold H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/38", "0/1/38")); datapoints.add(new Datapoint(gwNucleus14, "2-N14.28 - HVAC - Cold H2O Valve", ACCESSTYPE.READ_WRITE, DATATYPE.PERCENTAGE, "0/1/39", "0/1/39")); //TODO: add more datapoints of N14 } // loads the config from disk if it exists File f = new File(APP_CONFIG_FILE); if (f.exists()){ logger.info("AppConfig#init() :: configuration found on disk, loading from file."); AppConfig config = AppConfig.load(); // DEVELOPER MODE: use this code if typically changes are made on the code itself. logger.info("AppConfig#init() :: updating disk configuration with code based configuration."); config.update(this); logger.info("AppConfig#init() :: file system configuration updated."); // PRODUCTION MODE: or use this code if typically changes are made on the running app config file. //logger.info("AppConfig#init() :: updating code based configuration with disk configuration."); //this.update(config); //logger.info("AppConfig#init() :: configuration updated."); logger.info( String.format("AppConfig#init() :: configuration summary%n - datapoints count: %d%n - gateways count: %d", config.datapoints != null ? config.datapoints.size() : 0, config.gateways != null ? config.gateways.size() : 0)); AppConfig.save(config); return config; } // persists the config file to disk logger.info("AppConfig#init() :: configuration not found, generating new file to disk."); AppConfig.save(this); return this; } public List<IGatewayDriver> getGateways() { return gateways; } public List<IDatapoint> getDatapoints() { return datapoints; } public void update(AppConfig newConfig){ for(IDatapoint newDP : newConfig.datapoints){ IDatapoint origin = findDatapointByName(newDP.getName()); if(origin != null){ logger.info(String.format("AppConfig#update() :: updating datapoint '%s'.", newDP.getName())); origin.setDescription(newDP.getDescription()); origin.setAccessType(newDP.getAccessType()); origin.setDataType(newDP.getDataType()); origin.setGatewayAddress(newDP.getGatewayAddress()); origin.setReadAddress(newDP.getReadAddress()); origin.setWriteAddress(newDP.getWriteAddress()); } else { logger.info(String.format("AppConfig#update() :: adding new datapoint '%s'.", newDP.getName())); this.datapoints.add(newDP); } } } /** * Stores the AppConfig to disk persistence. * * @param theConfig the configuration file to persist in disk */ private static void save(AppConfig theConfig) { JSONSerializer serializer = new JSONSerializer().prettyPrint(true); try { FileWriter writer = new FileWriter(APP_CONFIG_FILE); serializer.deepSerialize(theConfig, writer); writer.close(); } catch (IOException e) { System.err.println(e.getMessage()); logger.error("AppConfig#save(theConfig)", e); } } /** * Loads AppConfig from disk persistence. */ private static AppConfig load() { JSONDeserializer<AppConfig> serializer = new JSONDeserializer<AppConfig>(); AppConfig result = null; try { result = serializer .use("datapoints", ArrayList.class) .use("datapoints.values", Datapoint.class) .use("gateways", ArrayList.class) .use("gateways.values", KNXGatewayDriver.class) .deserialize(new String(Files.readAllBytes(java.nio.file.Paths.get(APP_CONFIG_FILE)))); } catch (IOException e) { System.err.println(e.getMessage()); logger.error("AppConfig#load()", e); } return result; } /** * Searchs for a datapoint by their id, read address or write address, the first to match * returns the underlyining datapoint. * * @param dpointIdOrAddress, the id, read address or write address of the datapoint to be found * @return the datapoint found or null if none matches the query */ public IDatapoint findDatapoint(String dpointIdOrAddress) { for (IDatapoint dp : datapoints) if (dp.getId().equals(dpointIdOrAddress) || (dp.getReadAddress() != null && dp.getReadAddress().equals(dpointIdOrAddress)) || (dp.getWriteAddress() != null && dp.getWriteAddress().equals(dpointIdOrAddress))) return dp; return null; } public IDatapoint findDatapointByName(String name) { for (IDatapoint dp : datapoints) if (dp.getName().equalsIgnoreCase(name)) return dp; return null; } public IGatewayDriver findGateway(String gatewayAddress) { for (IGatewayDriver gateway : gateways) if (gateway.getAddress().equalsIgnoreCase(gatewayAddress)) return gateway; return null; } public void setGateways(List<IGatewayDriver> gateways) { this.gateways = gateways; } public void setDatapoints(List<IDatapoint> datapoints) { this.datapoints = datapoints; } }
package com.thanglequoc.aqicalculator; import java.util.Optional; /** * A calculator use to calculate AQI from pollutant concentration, support both * <b>regular AQI</b> calculation and <b>Nowcast AQI</b> calculation. This * object is intended to be a singleton object to avoid perfomance issue. * <p> * To use the <i>AQICalculator</i> object, get its instance by calling * <tt>getAQICalculatorInstance() </tt> method directly * * @author ThangLeQuoc * */ public class AQICalculator { private PollutantsBreakpointGenerator breakpointGenerator; private AQIMessageGenerator messageGenerator; private PollutantsBreakpoint pollutantsBreakpoint; private PollutantBreakpoint pollutantBreakpoint; private Optional<PollutantConcentration> targetPollutantConcentration; private NowcastCalculator nowcastCalculator; private PollutantConcentrationTruncator truncator; private static AQICalculator uniqueAQICalculatorInstance; /** * Gets the AQI calculator instance. * * @return the AQI calculator instance */ public static AQICalculator getAQICalculatorInstance() { if (uniqueAQICalculatorInstance == null) { uniqueAQICalculatorInstance = new AQICalculator(); } return uniqueAQICalculatorInstance; } /** * Instantiates a new AQI calculator. */ private AQICalculator() { /* * AQI Calculator will generate the following thing * PollutantBreakpointGenerator: Generator to get the breakpoints table * from JSON File PollutantsBreakpoint: Store a list of pollutant * breakpoint Nowcast Calculator: calculator to get avgAQI at present * for PM10, PM2.5, Ozone */ //TODO: Handle this exception try { this.breakpointGenerator = new PollutantsBreakpointGenerator(); this.messageGenerator = new AQIMessageGenerator(); } catch (Exception e) { e.printStackTrace(); } this.pollutantsBreakpoint = breakpointGenerator.getPollutantsBreakpoint(); this.nowcastCalculator = new NowcastCalculator(); this.truncator = new PollutantConcentrationTruncator(); } private int calculateAQI(String pollutantCode, double avgConcentration) { pollutantBreakpoint = this.pollutantsBreakpoint.getPollutantBreakpointByCode(pollutantCode); if (avgConcentration < 0) { return -1; } else { double truncatedConcentration = this.truncator .getTruncatedPollutantConcentrationOnPollutantCode(pollutantCode, avgConcentration); // find the target Concentration with it corresponding Index level targetPollutantConcentration = pollutantBreakpoint .getConcentrationRangeWithAvgConcentration(truncatedConcentration); if (targetPollutantConcentration.isPresent()) { int i_high = targetPollutantConcentration.get().getIndex().getMaxIndex(); int i_low = targetPollutantConcentration.get().getIndex().getMinIndex(); double c_low = targetPollutantConcentration.get().getMinConcentration(); double c_high = targetPollutantConcentration.get().getMaxConcentration(); // perform the calculation formula double result = (i_high - i_low) / (c_high - c_low) * (avgConcentration - c_low) + i_low; // round it to the nearest integer, and return return (int) Math.round(result); } else return -1; } } /** * Gets the aqi. * * @param pollutantCode * the pollutant code * @param avgConcentration * the avg concentration * @return the aqi */ public AQIResult getAQI(String pollutantCode, double avgConcentration) { pollutantBreakpoint = this.pollutantsBreakpoint.getPollutantBreakpointByCode(pollutantCode); double truncatedConcentration = this.truncator.getTruncatedPollutantConcentrationOnPollutantCode(pollutantCode, avgConcentration); // find the target Concentration with it corresponding Index level targetPollutantConcentration = pollutantBreakpoint .getConcentrationRangeWithAvgConcentration(truncatedConcentration); int aqi = -1; String category = InvalidMessage.INVALID_CATEGORY.getLiteral(); String generalAQIMessage = InvalidMessage.INVALID_GENERAL_MESSAGE.getLiteral(); String healthEffectsStatement = InvalidMessage.INVALID_HEALTH_EFFECTS_STATEMENTS_MESSAGE.getLiteral(); String guidanceStatement = InvalidMessage.INVALID_GUIDANCE_MESSAGE.getLiteral(); if (targetPollutantConcentration.isPresent()) { int i_high = targetPollutantConcentration.get().getIndex().getMaxIndex(); int i_low = targetPollutantConcentration.get().getIndex().getMinIndex(); double c_low = targetPollutantConcentration.get().getMinConcentration(); double c_high = targetPollutantConcentration.get().getMaxConcentration(); double result = (i_high - i_low) / (c_high - c_low) * (avgConcentration - c_low) + i_low; aqi = calculateAQI(pollutantCode, avgConcentration); GeneralAQIMessage generalMessage = messageGenerator.getGeneralAQIMessageObjectOnAQILevel(aqi); SpecificAQILevelMessage specificAQILevelMessage = messageGenerator .getSpecifcAQILevelMessageOnAQILevelOfPollutant(pollutantCode, aqi); category = generalMessage.getCategory(); generalAQIMessage = generalMessage.getMessage(); healthEffectsStatement = specificAQILevelMessage.getHealthEffectsStatements(); guidanceStatement = specificAQILevelMessage.getGuidance(); } return new AQIResult(aqi, category, generalAQIMessage, healthEffectsStatement, guidanceStatement); } /** * Gets the nowcast AQIResult object * * @param pollutantCode * the pollutant code * @param data * the data * @return the nowcast AQI */ public AQIResult getNowcastAQI(String pollutantCode, double[] data) { pollutantBreakpoint = this.pollutantsBreakpoint.getPollutantBreakpointByCode(pollutantCode); double nowcastConcentration = nowcastCalculator.getNowcastConcentration(pollutantCode, data); int aqi = -1; String category = InvalidMessage.INVALID_CATEGORY.getLiteral(); String generalAQIMessage = InvalidMessage.INVALID_GENERAL_MESSAGE.getLiteral(); String healthEffectsStatement = InvalidMessage.INVALID_HEALTH_EFFECTS_STATEMENTS_MESSAGE.getLiteral(); String guidanceStatement = InvalidMessage.INVALID_GUIDANCE_MESSAGE.getLiteral(); // check if the nowcast has a valid data , if not, return aqi = -1 if (nowcastConcentration < 0) { return new AQIResult(aqi, category, generalAQIMessage, healthEffectsStatement, guidanceStatement); } else { // find the target Concentration with it corresponding Index level targetPollutantConcentration = pollutantBreakpoint .getConcentrationRangeWithAvgConcentration(nowcastConcentration); if (targetPollutantConcentration.isPresent()) { int i_high = targetPollutantConcentration.get().getIndex().getMaxIndex(); int i_low = targetPollutantConcentration.get().getIndex().getMinIndex(); double c_low = targetPollutantConcentration.get().getMinConcentration(); double c_high = targetPollutantConcentration.get().getMaxConcentration(); double result = (i_high - i_low) / (c_high - c_low) * (nowcastConcentration - c_low) + i_low; aqi = (int) Math.round(result); GeneralAQIMessage generalMessage = messageGenerator.getGeneralAQIMessageObjectOnAQILevel(aqi); SpecificAQILevelMessage specificAQILevelMessage = messageGenerator .getSpecifcAQILevelMessageOnAQILevelOfPollutant(pollutantCode, aqi); category = generalMessage.getCategory(); generalAQIMessage = generalMessage.getMessage(); healthEffectsStatement = specificAQILevelMessage.getHealthEffectsStatements(); guidanceStatement = specificAQILevelMessage.getGuidance(); } return new AQIResult(aqi, category, generalAQIMessage, healthEffectsStatement, guidanceStatement); } } }
package de.retest.recheck.report; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.stream.Collectors; import de.retest.recheck.ignore.Filter; import de.retest.recheck.report.action.ActionReplayData; import de.retest.recheck.ui.actions.ExceptionWrapper; import de.retest.recheck.ui.actions.TargetNotFoundException; import de.retest.recheck.ui.descriptors.Element; import de.retest.recheck.ui.descriptors.SutState; import de.retest.recheck.ui.diff.AttributesDifference; import de.retest.recheck.ui.diff.ElementDifference; import de.retest.recheck.ui.diff.IdentifyingAttributesDifference; import de.retest.recheck.ui.diff.LeafDifference; import de.retest.recheck.ui.diff.RootElementDifference; import de.retest.recheck.ui.diff.StateDifference; public class TestReportFilter { private TestReportFilter() { } public static TestReport filter( final TestReport report, final Filter filter ) { final TestReport newTestReport = new TestReport(); for ( final SuiteReplayResult suiteReplayResult : report.getSuiteReplayResults() ) { newTestReport.addSuite( TestReportFilter.filter( suiteReplayResult, filter ) ); } return newTestReport; } static SuiteReplayResult filter( final SuiteReplayResult suiteReplayResult, final Filter filter ) { final SuiteReplayResult newSuiteReplayResult = new SuiteReplayResult( suiteReplayResult.getSuite(), suiteReplayResult.getSuiteNr(), suiteReplayResult.getGroundState() ); for ( final TestReplayResult testReplayResult : suiteReplayResult.getTestReplayResults() ) { newSuiteReplayResult.addTest( TestReportFilter.filter( testReplayResult, filter ) ); } return newSuiteReplayResult; } static TestReplayResult filter( final TestReplayResult testReplayResult, final Filter filter ) { final TestReplayResult newTestReplayResult = new TestReplayResult( testReplayResult.getName(), testReplayResult.getTestNr() ); for ( final ActionReplayResult actionReplayResult : testReplayResult.getActionReplayResults() ) { newTestReplayResult.addAction( TestReportFilter.filter( actionReplayResult, filter ) ); } return newTestReplayResult; } static ActionReplayResult filter( final ActionReplayResult actionReplayResult, final Filter filter ) { final ActionReplayData data = ActionReplayData.withTarget( actionReplayResult.getDescription(), actionReplayResult.getTargetComponent(), actionReplayResult.getGoldenMasterPath() ); final ExceptionWrapper error = actionReplayResult.getThrowableWrapper(); final TargetNotFoundException targetNotFound = (TargetNotFoundException) actionReplayResult.getTargetNotFoundException(); final StateDifference newStateDifference = filter( actionReplayResult.getStateDifference(), filter ); final long actualDuration = actionReplayResult.getDuration(); final SutState actualState = new SutState( actionReplayResult.getWindows() ); final ActionReplayResult newActionReplayResult = ActionReplayResult.createActionReplayResult( data, error, targetNotFound, newStateDifference, actualDuration, actualState ); return newActionReplayResult; } static StateDifference filter( final StateDifference stateDifference, final Filter filter ) { final List<RootElementDifference> newRootElementDifferences = filter( stateDifference.getRootElementDifferences(), filter ); final StateDifference newStateDifference = new StateDifference( newRootElementDifferences, stateDifference.getDurationDifference() ); return newStateDifference; } static List<RootElementDifference> filter( final List<RootElementDifference> rootElementDifferences, final Filter filter ) { final List<RootElementDifference> newRootElementDifferences = new ArrayList<>(); for ( final RootElementDifference rootElementDifference : rootElementDifferences ) { newRootElementDifferences.add( filter( rootElementDifference, filter ) ); } return newRootElementDifferences; } static RootElementDifference filter( final RootElementDifference rootElementDifference, final Filter filter ) { final ElementDifference newElementDifference = filter( rootElementDifference.getElementDifference(), filter ); final RootElementDifference newRootElementDifference = new RootElementDifference( newElementDifference, rootElementDifference.getExpectedDescriptor(), rootElementDifference.getActualDescriptor() ); return newRootElementDifference; } static ElementDifference filter( final ElementDifference elementDiff, final Filter filter ) { AttributesDifference attributesDifference = elementDiff.getAttributesDifference(); LeafDifference identifyingAttributesDifference = elementDiff.getIdentifyingAttributesDifference(); Collection<ElementDifference> childDifferences = elementDiff.getChildDifferences(); if ( elementDiff.hasAttributesDifferences() ) { attributesDifference = filter( elementDiff.getElement(), elementDiff.getAttributesDifference(), filter ); } if ( elementDiff.hasIdentAttributesDifferences() ) { identifyingAttributesDifference = filter( elementDiff.getElement(), (IdentifyingAttributesDifference) elementDiff.getIdentifyingAttributesDifference(), filter ); } if ( !elementDiff.getChildDifferences().isEmpty() ) { childDifferences = filter( elementDiff.getChildDifferences(), filter ); } final ElementDifference newElementDiff = new ElementDifference( elementDiff.getElement(), attributesDifference, identifyingAttributesDifference, elementDiff.getExpectedScreenshot(), elementDiff.getActualScreenshot(), childDifferences ); return newElementDiff; } static Collection<ElementDifference> filter( final Collection<ElementDifference> elementDifferences, final Filter filter ) { final List<ElementDifference> newElementDifferences = new ArrayList<>(); for ( final ElementDifference elementDifference : elementDifferences ) { newElementDifferences.add( filter( elementDifference, filter ) ); } return newElementDifferences; } static IdentifyingAttributesDifference filter( final Element element, final IdentifyingAttributesDifference identAttributesDiff, final Filter filter ) { return identAttributesDiff.getAttributeDifferences().stream() .filter( diff -> !filter.matches( element, diff ) ) .collect( Collectors.collectingAndThen( Collectors.toList(), diffs -> new IdentifyingAttributesDifference( element.getIdentifyingAttributes(), diffs ) ) ); } public static AttributesDifference filter( final Element element, final AttributesDifference attributesDiff, final Filter filter ) { return attributesDiff.getDifferences().stream() .filter( diff -> !filter.matches( element, diff ) ) .collect( Collectors.collectingAndThen( Collectors.toList(), AttributesDifference::new ) ); } }
package water; import hex.ConfusionMatrix; import hex.VariableImportance; import java.util.Arrays; import java.util.HashMap; import javassist.*; import water.api.DocGen; import water.api.Request.API; import water.fvec.*; import water.util.Log.Tag.Sys; import water.util.Log; import water.util.Utils; /** * A Model models reality (hopefully). * A model can be used to 'score' a row, or a collection of rows on any * compatible dataset - meaning the row has all the columns with the same names * as used to build the mode. */ public abstract class Model extends Iced { static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. /** Key associated with this Model, if any. */ @API(help="Key associated with Model") public final Key _selfKey; /** Dataset key used to *build* the model, for models for which this makes * sense, or null otherwise. Not all models are built from a dataset (eg * artificial models), or are built from a single dataset (various ensemble * models), so this key has no *mathematical* significance in the model but * is handy during common model-building and for the historical record. */ @API(help="Datakey used to *build* the model") public final Key _dataKey; /** Columns used in the model and are used to match up with scoring data * columns. The last name is the response column name. */ @API(help="Column names used to build the model") public final String _names[]; /** Categorical/factor/enum mappings, per column. Null for non-enum cols. * The last column holds the response col enums. */ @API(help="Column names used to build the model") public final String _domains[][]; /** Full constructor from frame: Strips out the Vecs to just the names needed * to match columns later for future datasets. */ public Model( Key selfKey, Key dataKey, Frame fr ) { this(selfKey,dataKey,fr.names(),fr.domains()); } /** Full constructor */ public Model( Key selfKey, Key dataKey, String names[], String domains[][] ) { if( domains == null ) domains=new String[names.length+1][]; assert domains.length==names.length; assert names.length > 1; assert names[names.length-1] != null; // Have a valid response-column name? _selfKey = selfKey; _dataKey = dataKey; _names = names; _domains = domains; } /** Simple shallow copy constructor to a new Key */ public Model( Key selfKey, Model m ) { this(selfKey,m._dataKey,m._names,m._domains); } /** Called when deleting this model, to cleanup any internal keys */ public void delete() { UKV.remove(_selfKey); } public String responseName() { return _names[ _names.length-1]; } public String[] classNames() { return _domains[_domains.length-1]; } public boolean isClassifier() { return classNames() != null ; } public int nclasses() { String cns[] = classNames(); return cns==null ? 1 : cns.length; } /** For classifiers, confusion matrix on validation set. */ public ConfusionMatrix cm() { return null; } /** Variable importance of individual variables measured by this model. */ public VariableImportance varimp() { return null; } /** Bulk score the frame 'fr', producing a Frame result; the 1st Vec is the * predicted class, the remaining Vecs are the probability distributions. * For Regression (single-class) models, the 1st and only Vec is the * prediction value. Also passed in a flag describing how hard we try to * adapt the frame. */ public Frame score( Frame fr, boolean exact ) { // Adapt the Frame layout - returns adapted frame and frame containing only // newly created vectors Frame[] adaptFrms = adapt(fr,exact); // Adapted frame containing all columns - mix of original vectors from fr // and newly created vectors serving as adaptors Frame adaptFrm = adaptFrms[0]; // Contains only newly created vectors. The frame eases deletion of these vectors. Frame onlyAdaptFrm = adaptFrms[1]; Vec v = adaptFrm.anyVec().makeZero(); // If the model produces a classification/enum, copy the domain into the // result vector. v._domain = _domains[_domains.length-1]; adaptFrm.add("predict",v); if( nclasses() > 1 ) for( int c=0; c<nclasses(); c++ ) adaptFrm.add(classNames()[c],adaptFrm.anyVec().makeZero()); new MRTask2() { @Override public void map( Chunk chks[] ) { double tmp[] = new double[_names.length]; float preds[] = new float[nclasses()]; Chunk p = chks[_names.length-1]; for( int i=0; i<p._len; i++ ) { float[] out = score0(chks,i,tmp,preds); if( nclasses() > 1 ) { if( Float.isNaN(out[0]) ) p.setNA0(i); else p.set0(i, Utils.maxIndex(out)); for( int c=0; c<nclasses(); c++ ) chks[_names.length+c].set0(i,out[c]); } else { p.set0(i,out[0]); } } } }.doAll(adaptFrm); // Return just the output columns int x=_names.length-1, y=adaptFrm.numCols(); Frame output = adaptFrm.extractFrame(x, y); // Delete manually only vectors which i created :-/ onlyAdaptFrm.remove(); return output; } /** Single row scoring, on a compatible Frame. */ public final float[] score( Frame fr, boolean exact, int row ) { double tmp[] = new double[fr.numCols()]; for( int i=0; i<tmp.length; i++ ) tmp[i] = fr.vecs()[i].at(row); return score(fr.names(),fr.domains(),exact,tmp); } /** Single row scoring, on a compatible set of data. Fairly expensive to adapt. */ public final float[] score( String names[], String domains[][], boolean exact, double row[] ) { return score(adapt(names,domains,exact),row,new float[nclasses()]); } /** Single row scoring, on a compatible set of data, given an adaption vector */ public final float[] score( int map[][], double row[], float[] preds ) { int[] colMap = map[map.length-1]; // Column mapping is the final array assert colMap.length == _names.length-1 : " "+Arrays.toString(colMap)+" "+Arrays.toString(_names); double tmp[] = new double[colMap.length]; // The adapted data for( int i=0; i<colMap.length; i++ ) { // Column mapping, or NaN for missing columns double d = colMap[i]==-1 ? Double.NaN : row[colMap[i]]; if( map[i] != null ) { // Enum mapping int e = (int)d; if( e < 0 || e >= map[i].length ) d = Double.NaN; // User data is out of adapt range else { e = map[i][e]; d = e==-1 ? Double.NaN : (double)e; } } tmp[i] = d; } return score0(tmp,preds); // The results. } /** Build an adaption array. The length is equal to the Model's vector * length minus the response plus a column mapping. Each inner array is a * domain map from data domains to model domains - or null for non-enum * columns, or null for identity mappings. The extra final int[] is the * column mapping itself, mapping from model columns to data columns. or -1 * if missing. * If 'exact' is true, will throw if there are: * any columns in the model but not in the input set; * any enums in the data that the model does not understand * any enums returned by the model that the data does not have a mapping for. * If 'exact' is false, these situations will use or return NA's instead. */ private int[][] adapt( String names[], String domains[][], boolean exact, boolean dropResponse ) { int maplen = dropResponse ? _names.length : _names.length+1; int map[][] = new int[maplen][]; // Build the column mapping: cmap[model_col] == user_col, or -1 if missing. int cmap[] = map[maplen-1] = new int[maplen-1]; HashMap<String,Integer> m = new HashMap<String, Integer>(); for( int d = 0; d < names.length ; ++d) m.put(names[d], d); for( int c = 0; c < maplen-1; ++c) { Integer I = m.get(_names[c]); cmap[c] = I==null ? -1 : I; // Check for data missing model column } // Make sure all are compatible for( int c=0; c<cmap.length; c++ ) { int d = cmap[c]; // Matching data column if( d == -1 ) { // Column was missing from data if( exact ) throw new IllegalArgumentException("Model requires a column called "+_names[c]); continue; // Cannot check domains of missing columns } // Now do domain mapping String ms[] = _domains[c]; // Model enum String ds[] = domains[d]; // Data enum if( ms == ds ) { // Domains trivially equal? } else if( ms == null && ds != null ) { throw new IllegalArgumentException("Incompatible column: '" + _names[c] + "', expected (trained on) numeric, was passed a categorical"); } else if( ms != null && ds == null ) { if( exact ) throw new IllegalArgumentException("Incompatible column: '" + _names[c] + "', expected (trained on) categorical, was passed a numeric"); throw H2O.unimpl(); // Attempt an asEnum? } else if( !Arrays.deepEquals(ms, ds) ) { map[c] = getDomainMapping(_names[c], ms, ds, exact); } else { // null mapping is equal to identity mapping } } return map; } private int[][] adapt( String names[], String domains[][], boolean exact ) { return adapt(names, domains, exact, true); } /** Build an adapted Frame from the given Frame. Useful for efficient bulk * scoring of a new dataset to an existing model. Same adaption as above, * but expressed as a Frame instead of as an int[][]. The returned Frame * does not have a response column. * It returns a <b>two element array</b> containing an adapted frame and a * frame which contains only vectors which where adapted (the purpose of the * second frame is to delete all adapted vectors with deletion of the * frame). */ public Frame[] adapt( Frame fr, boolean exact, boolean dropResponse ) { String frnames[] = fr.names(); Vec frvecs[] = fr.vecs(); int map[][] = adapt(frnames,fr.domains(),exact,dropResponse); int cmap[] = dropResponse ? map[_names.length-1] : map[_names.length]; Vec vecs[] = dropResponse ? new Vec[_names.length-1] : new Vec[_names.length]; int avCnt = 0; for( int c=0; c<cmap.length; c++ ) if (map[c] != null) avCnt++; Vec[] avecs = new Vec[avCnt]; // list of adapted vectors String[] anames = new String[avCnt]; // names of adapted vectors avCnt = 0; for( int c=0; c<cmap.length; c++ ) { // iterate over columns int d = cmap[c]; // Data index if( d == -1 ) throw H2O.unimpl(); // Swap in a new all-NA Vec else if( map[c] == null ) { // No or identity domain map? vecs[c] = frvecs[d]; // Just use the Vec as-is } else { // Domain mapping - creates a new vector vecs[c] = avecs[avCnt] = frvecs[d].makeTransf(map[c]); anames[avCnt] = frnames[d]; avCnt++; } } String[] vnames = dropResponse ? Arrays.copyOf(_names,_names.length-1) : _names.clone(); return new Frame[] { new Frame(vnames,vecs), new Frame(anames, avecs) }; } public Frame[] adapt( Frame fr, boolean exact) { return adapt(fr, exact, true); } /** Returns a mapping between values domains for a given column. */ public static int[] getDomainMapping(String colName, String[] modelDom, String[] dom, boolean exact) { int emap[] = new int[dom.length]; HashMap<String,Integer> md = new HashMap<String, Integer>(); for( int i = 0; i < modelDom.length; i++) md.put(modelDom[i], i); for( int i = 0; i < dom.length; i++) { Integer I = md.get(dom[i]); if( I==null && exact ) Log.warn(Sys.SCORM, "Column "+colName+" was not trained with factor '"+dom[i]+"' which appears in the data"); emap[i] = I==null ? -1 : I; } for( int i = 0; i < dom.length; i++) assert emap[i]==-1 || modelDom[emap[i]].equals(dom[i]); return emap; } /** Bulk scoring API for one row. Chunks are all compatible with the model, * and expect the last Chunks are for the final distribution & prediction. * Default method is to just load the data into the tmp array, then call * subclass scoring logic. */ protected float[] score0( Chunk chks[], int row_in_chunk, double[] tmp, float[] preds ) { assert chks.length>=_names.length; // Last chunk is for the response for( int i=0; i<_names.length; i++ ) tmp[i] = chks[i].at0(row_in_chunk); return score0(tmp,preds); } /** Subclasses implement the scoring logic. The data is pre-loaded into a * re-used temp array, in the order the model expects. The predictions are * loaded into the re-used temp array, which is also returned. */ protected abstract float[] score0(double data[/*ncols*/], float preds[/*nclasses*/]); // Version where the user has just ponied-up an array of data to be scored. // Data must be in proper order. Handy for JUnit tests. public double score(double [] data){ return Utils.maxIndex(score0(data,new float[nclasses()])); } /** Return a String which is a valid Java program representing a class that * implements the Model. The Java is of the form: * <pre> * class UUIDxxxxModel { * public static final String NAMES[] = { ....column names... } * public static final String DOMAINS[][] = { ....domain names... } * // Pass in data in a double[], pre-aligned to the Model's requirements. * // Jam predictions into the preds[] array; preds[0] is reserved for the * // main prediction (class for classifiers or value for regression), * // and remaining columns hold a probability distribution for classifiers. * float[] predict( double data[], float preds[] ); * double[] map( HashMap<String,Double> row, double data[] ); * // Does the mapping lookup for every row, no allocation * float[] predict( HashMap<String,Double> row, double data[], float preds[] ); * // Allocates a double[] for every row * float[] predict( HashMap<String,Double> row, float preds[] ); * // Allocates a double[] and a float[] for every row * float[] predict( HashMap<String,Double> row ); * } * </pre> */ public String toJava() { SB sb = new SB(); sb.p("\n"); sb.p("class ").p(_selfKey.toString()).p(" {\n"); toJavaNAMES(sb); toJavaNCLASSES(sb); toJavaInit(sb); sb.p("\n"); toJavaPredict(sb); sb.p(TOJAVA_MAP); sb.p(TOJAVA_PREDICT_MAP); sb.p(TOJAVA_PREDICT_MAP_ALLOC1); sb.p(TOJAVA_PREDICT_MAP_ALLOC2); sb.p("}\n"); return sb.toString(); } // Same thing as toJava, but as a Javassist CtClass private CtClass makeCtClass() throws CannotCompileException { CtClass clz = ClassPool.getDefault().makeClass(_selfKey.toString()); clz.addField(CtField.make(toJavaNAMES (new SB()).toString(),clz)); clz.addField(CtField.make(toJavaNCLASSES(new SB()).toString(),clz)); toJavaInit(clz); // Model-specific top-level goodness clz.addMethod(CtMethod.make(toJavaPredict(new SB()).toString(),clz)); clz.addMethod(CtMethod.make(TOJAVA_MAP,clz)); clz.addMethod(CtMethod.make(TOJAVA_PREDICT_MAP,clz)); clz.addMethod(CtMethod.make(TOJAVA_PREDICT_MAP_ALLOC1,clz)); clz.addMethod(CtMethod.make(TOJAVA_PREDICT_MAP_ALLOC2,clz)); return clz; } private SB toJavaNAMES( SB sb ) { return sb.p(" public static final String []NAMES = new String[] ").p(_names).p(";\n"); } private SB toJavaNCLASSES( SB sb ) { return sb.p(" public static final int NCLASSES = ").p(nclasses()).p(";\n"); } // Override in subclasses to provide some top-level model-specific goodness protected void toJavaInit(SB sb) { }; protected void toJavaInit(CtClass ct) { }; // Override in subclasses to provide some inside 'predict' call goodness protected void toJavaPredictBody(SB sb) { throw new IllegalArgumentException("This model type does not support conversion to Java"); } // Wrapper around the main predict call, including the signature and return value private SB toJavaPredict(SB sb) { sb.p(" // Pass in data in a double[], pre-aligned to the Model's requirements.\n"); sb.p(" // Jam predictions into the preds[] array; preds[0] is reserved for the\n"); sb.p(" // main prediction (class for classifiers or value for regression),\n"); sb.p(" // and remaining columns hold a probability distribution for classifiers.\n"); sb.p(" float[] predict( double data[], float preds[] ) {\n"); toJavaPredictBody(sb); sb.p(" return preds;\n"); sb.p(" }\n"); return sb; } private static final String TOJAVA_MAP = " // Takes a HashMap mapping column names to doubles. Looks up the column\n"+ " // names needed by the model, and places the doubles into the data array in\n"+ " // the order needed by the model. Missing columns use NaN.\n"+ " double[] map( java.util.HashMap row, double data[] ) {\n"+ " for( int i=0; i<NAMES.length-1; i++ ) {\n"+ " Double d = (Double)row.get(NAMES[i]);\n"+ " data[i] = d==null ? Double.NaN : d;\n"+ " }\n"+ " return data;\n"+ " }\n"; private static final String TOJAVA_PREDICT_MAP = " // Does the mapping lookup for every row, no allocation\n"+ " float[] predict( java.util.HashMap row, double data[], float preds[] ) {\n"+ " return predict(map(row,data),preds);\n"+ " }\n"; private static final String TOJAVA_PREDICT_MAP_ALLOC1 = " // Allocates a double[] for every row\n"+ " float[] predict( java.util.HashMap row, float preds[] ) {\n"+ " return predict(map(row,new double[NAMES.length]),preds);\n"+ " }\n"; private static final String TOJAVA_PREDICT_MAP_ALLOC2 = " // Allocates a double[] and a float[] for every row\n"+ " float[] predict( java.util.HashMap row ) {\n"+ " return predict(map(row,new double[NAMES.length]),new float[NCLASSES+1]);\n"+ " }\n"; // Can't believe this wasn't done long long ago protected static class SB { public final StringBuilder _sb = new StringBuilder(); public SB p( String s ) { _sb.append(s); return this; } public SB p( float s ) { _sb.append(s); return this; } public SB p( char s ) { _sb.append(s); return this; } public SB p( int s ) { _sb.append(s); return this; } public SB indent( int d ) { for( int i=0; i<d; i++ ) p(" "); return this; } // Convert a String[] into a valid Java String initializer SB p( String[] ss ) { p('{'); for( int i=0; i<ss.length-1; i++ ) p('"').p(ss[i]).p("\","); if( ss.length > 0 ) p('"').p(ss[ss.length-1]).p('"'); return p('}'); } @Override public String toString() { return _sb.toString(); } } // Convenience method for testing: build Java, convert it to a class & // execute it: compare the results of the new class's (JIT'd) scoring with // the built-in (interpreted) scoring on this dataset. Throws if there // is any error (typically an AssertionError). public void testJavaScoring( Frame fr ) { try { //System.out.println(toJava()); Class clz = ClassPool.getDefault().toClass(makeCtClass()); Object modelo = clz.newInstance(); } catch( CannotCompileException cce ) { throw new Error(cce); } catch( InstantiationException cce ) { throw new Error(cce); } catch( IllegalAccessException cce ) { throw new Error(cce); } } }
package swf.app; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import swf.accel.model.AccelerationData; import swf.model.TimeSeries; import swf.nnc.NearestNeighbourClassificator; import swf.transformer.SubTransformer; public class GestureDistanceInfo implements Evaluator { private NearestNeighbourClassificator<TimeSeries<AccelerationData>> nearestNeighbourClassificator; public GestureDistanceInfo( NearestNeighbourClassificator<TimeSeries<AccelerationData>> nearestNeighbourClassificator ) { this.nearestNeighbourClassificator = nearestNeighbourClassificator; } /** * Checks if the library gestures match the given gestures. */ public String evaluate(List<TimeSeries<AccelerationData>> timeSeriesList) { Iterator<TimeSeries<AccelerationData>> iterator = timeSeriesList.iterator(); String output = " ____ _______ __\n" + " / __ \\/_ __/ | / /\n" + " / / / / / / | | /| / / \n" + " / /_/ / / / | |/ |/ / \n" + "/_____/ /_/ |__/|__/ \n" + " \n"; String format = "| Record %d | %d | %d | %d | %d | %d | %d | %d | %d |\n"; int recordIndex = 1; String top = "+ output += top; while (iterator.hasNext()) { int[] result = this.evaluateTimeSeries(iterator.next()); output += String.format( format, recordIndex, result[0], result[1], result[2], result[3], result[4], result[5], result[6], result[7] ); recordIndex++; } return output + top; } private int[] evaluateTimeSeries(TimeSeries<AccelerationData> timeSeries) { LinkedList<TimeSeries<AccelerationData>> library = new LinkedList<TimeSeries<AccelerationData>>(); LinkedList<TimeSeries<AccelerationData>> gestures = new LinkedList<TimeSeries<AccelerationData>>(); for (int i = 1; i < 9; i++) { SubTransformer<AccelerationData> subTransformer = new SubTransformer<AccelerationData>("START " + i, "END " + i); library.add(subTransformer.transform(timeSeries)); subTransformer = new SubTransformer<AccelerationData>("START " + (i + 8), "END " + (i + 8)); gestures.add(subTransformer.transform(timeSeries)); } int[] result = new int[8]; for (int i = 0; i < 8; i++) { TimeSeries<AccelerationData> gesture = gestures.get(i); result[i] = library.indexOf( this.nearestNeighbourClassificator.searchNearestNeighbour(gesture, library) ); } return result; } }
// of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // all copies or substantial portions of the Software. // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. package com.uber.jenkins.phabricator; import com.uber.jenkins.phabricator.coverage.CodeCoverageMetrics; import com.uber.jenkins.phabricator.utils.CommonUtils; import com.uber.jenkins.phabricator.utils.Logger; import hudson.model.Result; class CommentBuilder { private static final String UBERALLS_TAG = "uberalls"; private final Logger logger; private final CodeCoverageMetrics currentCoverage; private final StringBuilder comment; private final String buildURL; private final Result result; private final boolean preserveFormatting; public CommentBuilder(Logger logger, Result result, CodeCoverageMetrics currentCoverage, String buildURL, boolean preserveFormatting) { this.logger = logger; this.result = result; this.currentCoverage = currentCoverage; this.buildURL = buildURL; this.preserveFormatting = preserveFormatting; this.comment = new StringBuilder(); } /** * Get the final comment to post to Phabricator * @return */ public String getComment() { return comment.toString(); } /** * Determine whether to attempt to process coverage * @return */ public boolean hasCoverageAvailable() { return currentCoverage != null && currentCoverage.getLineCoveragePercent() > 0.0f; } /** * Query uberalls for parent coverage and add appropriate comment * @param parentCoverage the parent coverage returned from uberalls * @param baseCommit * @param branchName the name of the current branch */ public void processParentCoverage(CodeCoverageMetrics parentCoverage, String baseCommit, String branchName) { if (parentCoverage == null) { logger.info(UBERALLS_TAG, "unable to find coverage for parent commit"); return; } Float lineCoveragePercent = currentCoverage.getLineCoveragePercent(); logger.info(UBERALLS_TAG, "line coverage: " + lineCoveragePercent); logger.info(UBERALLS_TAG, "found parent coverage as " + parentCoverage.getLineCoveragePercent()); float coverageDelta = lineCoveragePercent - parentCoverage.getLineCoveragePercent(); String coverageDeltaDisplay = String.format("%.3f", coverageDelta); String lineCoverageDisplay = String.format("%.3f", lineCoveragePercent); if (coverageDelta > 0) { comment.append("Coverage increased (+" + coverageDeltaDisplay + "%) to " + lineCoverageDisplay + "%"); } else if (coverageDelta < 0) { comment.append("Coverage decreased (" + coverageDeltaDisplay + "%) to " + lineCoverageDisplay + "%"); } else { comment.append("Coverage remained the same (" + lineCoverageDisplay + "%)"); } comment.append(" when pulling **" + branchName + "** into "); comment.append(baseCommit.substring(0, 7)); comment.append("."); } public void processBuildResult(boolean commentOnSuccess, boolean commentWithConsoleLinkOnFailure, boolean runHarbormaster) { if (result == Result.SUCCESS) { if (comment.length() == 0 && (commentOnSuccess || !runHarbormaster)) { comment.append("Build is green"); } } else if (result == Result.UNSTABLE) { comment.append("Build is unstable"); } else if (result == Result.FAILURE) { if (!runHarbormaster || commentWithConsoleLinkOnFailure) { comment.append("Build has FAILED"); } } else if (result == Result.ABORTED) { comment.append("Build was aborted"); } else { logger.info(UBERALLS_TAG, "Unknown build status " + result.toString()); } } /** * Add user-defined content via a .phabricator-comment file * @param customComment the contents of the file */ public void addUserComment(String customComment) { if (CommonUtils.isBlank(customComment)) { return; } // Ensure we separate previous parts of the comment with newlines if (hasComment()) { comment.append("\n\n"); } if (preserveFormatting) { comment.append(String.format("%s\n", customComment)); } else { comment.append(String.format("```\n%s\n```\n\n", customComment)); } } /** * Determine if there exists a comment already * @return */ public boolean hasComment() { return comment.length() > 0; } /** * Add a build link to the comment */ public void addBuildLink() { comment.append(String.format(" %s for more details.", buildURL)); } /** * Add a build failure message to the comment */ public void addBuildFailureMessage() { comment.append(String.format("\n\nLink to build: %s", buildURL)); comment.append(String.format("\nSee console output for more information: %sconsole", buildURL)); } }
package tigase.cert; import tigase.util.Algorithms; import tigase.util.Base64; import java.io.*; import java.security.*; import java.security.cert.*; import java.security.cert.Certificate; import java.security.interfaces.RSAPrivateKey; import java.security.spec.InvalidKeySpecException; import java.security.spec.PKCS8EncodedKeySpec; import java.util.*; import java.util.logging.Logger; import javax.crypto.Cipher; import javax.security.auth.x500.X500Principal; import sun.security.x509.*; /** * Created: Sep 22, 2010 3:09:01 PM * * @author <a href="mailto:artur.hefczyc@tigase.org">Artur Hefczyc</a> * @version $Rev$ */ public abstract class CertificateUtil { private static final String BEGIN_CERT = " private static final String BEGIN_KEY = " private static final String BEGIN_RSA_KEY = " private static final String ENCRIPT_TEST = "--encript-test"; private static final String ENCRIPT_TEST_SHORT = "-et"; private static final String END_CERT = " private static final String END_KEY = " private static final String END_RSA_KEY = " protected static final byte[] ID_ON_XMPPADDR = new byte[] { 0x06, 0x08, 0x2B, 0x06, 0x01, 0x05, 0x05, 0x07, 0x08, 0x05 }; private static final String KEY_PAIR = "--key-pair"; private static final String KEY_PAIR_SHORT = "-kp"; private static final String LOAD_CERT = "--load-cert"; private static final String LOAD_CERT_SHORT = "-lc"; private static final String LOAD_DER_PRIVATE_KEY = "--load-der-priv-key"; private static final String LOAD_DER_PRIVATE_KEY_SHORT = "-ldpk"; private static final Logger log = Logger.getLogger(CertificateUtil.class.getName()); private static final String PRINT_PROVIDERS = "--print-providers"; private static final String PRINT_PROVIDERS_SHORT = "-pp"; private static final String PRINT_SERVICES = "--print-services"; private static final String PRINT_SERVICES_SHORT = "-ps"; private static final String SELF_SIGNED_CERT = "--self-signed-cert"; private static final String SELF_SIGNED_CERT_SHORT = "-ssc"; private static final String STORE_CERT = "--store-cert"; private static final String STORE_CERT_SHORT = "-sc"; private static void appendName(StringBuilder sb, String prefix, String value) { if (value != null) { if (sb.length() > 0) { sb.append(", "); } sb.append(prefix).append('=').append(value); } } private static int calculateLength(byte[] buffer, int start) throws ArrayIndexOutOfBoundsException { int offset = start + 1; int b = (buffer[offset] & 0xff); if (b < 0x80) return (b); int result = 0; offset++; int len = b - 0x80; for (int i = 0; i < len; i++) { b = (buffer[(i + offset)] & 0xff); result = (result << 8) + b; } return result; } private final static int calculateOffset(byte[] buffer, int offset) throws ArrayIndexOutOfBoundsException { int b = (buffer[(offset + 1)] & 0xff); if (b < 0x80) return (offset + 2); int len = b - 0x80; return (offset + len + 2); } /** * Method description * * * * @param size * @param password * * @return * * @throws NoSuchAlgorithmException */ public static KeyPair createKeyPair(int size, String password) throws NoSuchAlgorithmException { KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA"); keyPairGenerator.initialize(size); KeyPair keyPair = keyPairGenerator.genKeyPair(); return keyPair; } /** * Method description * * * * @param email * @param domain * @param organizationUnit * @param organization * @param city * @param state * @param country * @param keyPair * @return * * @throws CertificateException * @throws IOException * @throws NoSuchAlgorithmException * @throws InvalidKeyException * @throws NoSuchProviderException * @throws SignatureException */ public static X509Certificate createSelfSignedCertificate(String email, String domain, String organizationUnit, String organization, String city, String state, String country, KeyPair keyPair) throws CertificateException, IOException, NoSuchAlgorithmException, InvalidKeyException, NoSuchProviderException, SignatureException { X509CertInfo certInfo = new X509CertInfo(); CertificateVersion certVersion = new CertificateVersion(); certInfo.set(X509CertInfo.VERSION, certVersion); Date firstDate = new Date(); Date lastDate = new Date(firstDate.getTime() + 365 * 24 * 60 * 60 * 1000L); CertificateValidity interval = new CertificateValidity(firstDate, lastDate); certInfo.set(X509CertInfo.VALIDITY, interval); certInfo.set(X509CertInfo.SERIAL_NUMBER, new CertificateSerialNumber((int) (firstDate.getTime() / 1000))); StringBuilder subject = new StringBuilder(1024); appendName(subject, "CN", domain); appendName(subject, "CN", "*." + domain); appendName(subject, "EMAILADDRESS", email); appendName(subject, "OU", organizationUnit); appendName(subject, "O", organization); appendName(subject, "L", city); appendName(subject, "ST", state); appendName(subject, "C", country); X500Name issuerName = new X500Name(subject.toString()); CertificateIssuerName certIssuer = new CertificateIssuerName(issuerName); CertificateSubjectName certSubject = new CertificateSubjectName(issuerName); certInfo.set(X509CertInfo.ISSUER, certIssuer); certInfo.set(X509CertInfo.SUBJECT, certSubject); // certInfo.set(X509CertInfo.ISSUER + "." + // CertificateSubjectName.DN_NAME, issuerName); AlgorithmId algorithm = new AlgorithmId(AlgorithmId.sha1WithRSAEncryption_oid); CertificateAlgorithmId certAlgorithm = new CertificateAlgorithmId(algorithm); certInfo.set(X509CertInfo.ALGORITHM_ID, certAlgorithm); CertificateX509Key certPublicKey = new CertificateX509Key(keyPair.getPublic()); certInfo.set(X509CertInfo.KEY, certPublicKey); // certInfo.set(X509CertInfo.ALGORITHM_ID + "." + // CertificateAlgorithmId.ALGORITHM, algorithm); X509CertImpl newCert = new X509CertImpl(certInfo); newCert.sign(keyPair.getPrivate(), "SHA1WithRSA"); return newCert; } private static void encriptTest() throws Exception { // KeyPair test: // 1. Generating key pair: System.out.print("Generating key pair..."); System.out.flush(); KeyPair keyPair = createKeyPair(1024, "secret"); System.out.println(" done."); // Encryption/decription test byte[] inputText = "Encription test...".getBytes(); Cipher cipher = Cipher.getInstance("RSA"); System.out.println("Encripting text: " + new String(inputText)); cipher.init(Cipher.ENCRYPT_MODE, keyPair.getPublic()); byte[] cipherText = cipher.doFinal(inputText); System.out.println("Encripted text: " + Algorithms.bytesToHex(cipherText)); cipher.init(Cipher.DECRYPT_MODE, keyPair.getPrivate()); byte[] plainText = cipher.doFinal(cipherText); System.out.println("Decripted text: " + new String(plainText)); } /** * Method description * * * @param entry * * @return * * @throws CertificateEncodingException */ public static String exportToPemFormat(CertificateEntry entry) throws CertificateEncodingException { StringBuilder sb = new StringBuilder(4096); if ((entry.getCertChain() != null) && (entry.getCertChain().length > 0)) { byte[] bytes = entry.getCertChain()[0].getEncoded(); String b64 = Base64.encode(bytes); sb.append(BEGIN_CERT).append('\n').append(b64).append('\n').append(END_CERT).append('\n'); } if (entry.getPrivateKey() != null) { byte[] bytes = entry.getPrivateKey().getEncoded(); String b64 = Base64.encode(bytes); sb.append(BEGIN_KEY).append('\n').append(b64).append('\n').append(END_KEY).append('\n'); } if ((entry.getCertChain() != null) && (entry.getCertChain().length > 1)) { for (int i = 1; i < entry.getCertChain().length; i++) { byte[] bytes = entry.getCertChain()[i].getEncoded(); String b64 = Base64.encode(bytes); sb.append(BEGIN_CERT).append('\n').append(b64).append('\n').append(END_CERT).append('\n'); } } return sb.toString(); } private static String extractValue(byte[] buffer, byte[] id) { try { if (buffer[0] != 0x30) return null; int len = calculateLength(buffer, 0); int offset = calculateOffset(buffer, 0); for (int i = 0; i < id.length; i++) { int j = offset + i; if (j >= len) return null; if (id[i] != buffer[j]) return null; } int valStart = offset + id.length; int pos = calculateOffset(buffer, valStart); while (pos < buffer.length) { byte d = buffer[pos]; int cmp = calculateOffset(buffer, pos); int l = calculateLength(buffer, pos); if (d == 0x0c || d == 0x16) { return new String(buffer, cmp, l); } pos = cmp; } return null; } catch (ArrayIndexOutOfBoundsException e) { return null; } } public static List<String> extractXmppAddrs(final X509Certificate x509Certificate) { final ArrayList<String> result = new ArrayList<String>(); try { Collection<List<?>> altNames = x509Certificate.getSubjectAlternativeNames(); if (altNames == null) return result; for (List<?> item : altNames) { Integer type = (Integer) item.get(0); if (type == 0) { byte[] buffer = (byte[]) item.get(1); String jid = extractValue(buffer, ID_ON_XMPPADDR); if (jid != null) { result.add(jid); } } } return result; } catch (Exception e) { return result; } } /** * Method description * * * @param cert * * @return */ public static ArrayList<String> getCertAltCName( X509Certificate cert ) { try { ArrayList<String> result = new ArrayList<>(); Collection<List<?>> subjectAlternativeNames = cert.getSubjectAlternativeNames(); for ( List list : subjectAlternativeNames ) { // we are only interested in dNSName if ( list.get( 0 ).equals( 2 ) ){ result.add( list.get( 1 ).toString() ); } } return result; } catch ( CertificateParsingException e ) { return null; } } /** * Method description * * * @param cert * * @return */ public static String getCertCName(X509Certificate cert) { X500Principal princ = cert.getSubjectX500Principal(); String name = princ.getName(); String[] all = name.split(","); for (String n : all) { String[] ns = n.trim().split("="); if (ns[0].equals("CN")) { return ns[1]; } } return null; } /** * Method description * * * @param cert * * @return */ public static boolean isExpired(X509Certificate cert) { try { cert.checkValidity(); return false; } catch (Exception e) { return true; } } /** * Method description * * * @param cert * * @return */ public static boolean isSelfSigned(X509Certificate cert) { return cert.getIssuerDN().equals(cert.getSubjectDN()); } private static void keyPairTest() throws Exception { // KeyPair test: // 1. Generating key pair: System.out.print("Generating key pair..."); System.out.flush(); KeyPair keyPair = createKeyPair(1024, "secret"); System.out.println(" done, private key: " + keyPair.getPrivate() + ", public key: " + keyPair.getPublic()); } /** * Method description * * * @param file * * @return * * @throws CertificateException * @throws FileNotFoundException * @throws IOException * @throws InvalidKeySpecException * @throws NoSuchAlgorithmException */ public static CertificateEntry loadCertificate(File file) throws FileNotFoundException, IOException, CertificateException, NoSuchAlgorithmException, InvalidKeySpecException { return parseCertificate(new FileReader(file)); } /** * Method description * * * @param file * * @return * * * @throws CertificateException * @throws FileNotFoundException * @throws IOException * @throws InvalidKeySpecException * @throws NoSuchAlgorithmException */ public static CertificateEntry loadCertificate(String file) throws FileNotFoundException, IOException, CertificateException, NoSuchAlgorithmException, InvalidKeySpecException { return loadCertificate(new File(file)); } /** * Method description * * * @param file * * @return * * @throws FileNotFoundException * @throws IOException * @throws InvalidKeySpecException * @throws NoSuchAlgorithmException */ public static PrivateKey loadPrivateKeyFromDER(File file) throws FileNotFoundException, IOException, NoSuchAlgorithmException, InvalidKeySpecException { DataInputStream dis = new DataInputStream(new FileInputStream(file)); byte[] privKeyBytes = new byte[(int) file.length()]; dis.read(privKeyBytes); dis.close(); KeyFactory keyFactory = KeyFactory.getInstance("RSA"); PKCS8EncodedKeySpec privSpec = new PKCS8EncodedKeySpec(privKeyBytes); RSAPrivateKey privKey = (RSAPrivateKey) keyFactory.generatePrivate(privSpec); return privKey; } /** * Method description * * * @param args * * @throws Exception */ public static void main(String[] args) throws Exception { if ((args != null) && (args.length > 0)) { if (args[0].equals(PRINT_PROVIDERS) || args[0].equals(PRINT_PROVIDERS_SHORT)) { printProviders(false); } if (args[0].equals(PRINT_SERVICES) || args[0].equals(PRINT_SERVICES_SHORT)) { printProviders(true); } if (args[0].equals(KEY_PAIR) || args[0].equals(KEY_PAIR_SHORT)) { keyPairTest(); } if (args[0].equals(ENCRIPT_TEST) || args[0].equals(ENCRIPT_TEST_SHORT)) { encriptTest(); } if (args[0].equals(SELF_SIGNED_CERT) || args[0].equals(SELF_SIGNED_CERT_SHORT)) { selfSignedCertTest(); } if (args[0].equals(LOAD_CERT) || args[0].equals(LOAD_CERT_SHORT)) { String file = args[1]; CertificateEntry ce = loadCertificate(file); System.out.println(ce.toString()); } if (args[0].equals(STORE_CERT) || args[0].equals(STORE_CERT_SHORT)) { String file = args[1]; // Certificate String email = "artur.hefczyc@tigase.org"; String domain = "tigase.org"; String ou = "XMPP Service"; String o = "Tigase.org"; String l = "Cambourne"; String st = "Cambridgeshire"; String c = "UK"; KeyPair keyPair = createKeyPair(1024, "secret"); X509Certificate cert = createSelfSignedCertificate(email, domain, ou, o, l, st, c, keyPair); CertificateEntry entry = new CertificateEntry(); entry.setPrivateKey(keyPair.getPrivate()); entry.setCertChain(new Certificate[] { cert }); storeCertificate(file, entry); } if (args[0].equals(LOAD_DER_PRIVATE_KEY) || args[0].equals(LOAD_DER_PRIVATE_KEY_SHORT)) { String file = args[1]; PrivateKey key = loadPrivateKeyFromDER(new File(file)); System.out.println(key.toString()); } } else { printHelp(); } } /** * Method description * * * @param data * * @return * * @throws CertificateException * @throws IOException * @throws InvalidKeySpecException * @throws NoSuchAlgorithmException */ public static CertificateEntry parseCertificate(Reader data) throws IOException, CertificateException, NoSuchAlgorithmException, InvalidKeySpecException { BufferedReader br = new BufferedReader(data); StringBuilder sb = new StringBuilder(4096); List<X509Certificate> certs = new ArrayList<X509Certificate>(); PrivateKey privateKey = null; String line; boolean addToBuffer = false; while ((line = br.readLine()) != null) { if (line.contains(BEGIN_CERT) || line.contains(BEGIN_KEY) || line.contains(BEGIN_RSA_KEY)) { addToBuffer = true; } else if (line.contains(END_CERT)) { addToBuffer = false; byte[] bytes = Base64.decode(sb.toString()); ByteArrayInputStream bais = new ByteArrayInputStream(bytes); CertificateFactory cf = CertificateFactory.getInstance("X.509"); while (bais.available() > 0) { Certificate cert = cf.generateCertificate(bais); certs.add((X509Certificate) cert); } sb = new StringBuilder(4096); } else if (line.contains(END_KEY)) { addToBuffer = false; byte[] bytes = Base64.decode(sb.toString()); PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(bytes); KeyFactory keyFactory = KeyFactory.getInstance("RSA"); privateKey = keyFactory.generatePrivate(keySpec); sb = new StringBuilder(4096); } else if (line.contains(END_RSA_KEY)) { addToBuffer = false; byte[] bytes = Base64.decode(sb.toString()); RSAPrivateKeyDecoder decoder = new RSAPrivateKeyDecoder(bytes); privateKey = decoder.getPrivateKey(); sb = new StringBuilder(4096); } else if (addToBuffer) sb.append(line); } CertificateEntry entry = new CertificateEntry(); entry.setCertChain(certs.toArray(new Certificate[certs.size()])); entry.setPrivateKey(privateKey); return entry; } public static Certificate[] sort(Certificate[] chain) { List<Certificate> res = sort(new ArrayList<Certificate>(Arrays.asList(chain))); return res.toArray(new Certificate[res.size()]); } public static List<Certificate> sort(List<Certificate> certs) { Certificate rt = null; for (Certificate x509Certificate : certs) { Principal i = ((X509Certificate) x509Certificate).getIssuerDN(); Principal s = ((X509Certificate) x509Certificate).getSubjectDN(); if (i.equals(s)) rt = x509Certificate; } if (rt == null) throw new RuntimeException("Can't find root certificate in chain!"); ArrayList<Certificate> res = new ArrayList<Certificate>(); certs.remove(rt); res.add(rt); while (!certs.isEmpty()) { boolean found = false; for (Certificate x509Certificate : certs) { Principal i = ((X509Certificate) x509Certificate).getIssuerDN(); if (i.equals(((X509Certificate) rt).getSubjectDN())) { rt = x509Certificate; found = true; break; } } if (found) { certs.remove(rt); res.add(0, rt); } else { throw new RuntimeException("Can't find certificate " + ((X509Certificate) rt).getSubjectDN() + " in chain. Verify that all entries are correct and match against each other!"); } } return res; } private static void printHelp() { System.out.println(CertificateUtil.class.getName() + " test code."); System.out.println("You can run following tests:"); System.out.println(" " + PRINT_PROVIDERS + " | " + PRINT_PROVIDERS_SHORT + " - prints all supported providers"); System.out.println(" " + PRINT_SERVICES + " | " + PRINT_SERVICES_SHORT + " - print all supported services"); System.out.println(" " + KEY_PAIR + " | " + KEY_PAIR_SHORT + " - generate a key pair and print the result"); System.out.println(" " + ENCRIPT_TEST + " | " + ENCRIPT_TEST_SHORT + " - encript simple text with public key, decript with private"); System.out.println(" " + SELF_SIGNED_CERT + " | " + SELF_SIGNED_CERT_SHORT + " - generate self signed certificate"); System.out.println(" " + LOAD_CERT + " file.pem | " + LOAD_CERT_SHORT + " file.pem - load certificate from file"); System.out.println(" " + STORE_CERT + " file.pem | " + STORE_CERT_SHORT + " file.pem - generate self-signed certificate and save it to the given pem file"); System.out.println(" " + LOAD_DER_PRIVATE_KEY + " | " + LOAD_DER_PRIVATE_KEY_SHORT + " file.der - load private key from DER file."); } /** * Method description * * * @param includeServices */ private static void printProviders(boolean includeServices) { // Initialization, basic information Provider[] providers = Security.getProviders(); if ((providers != null) && (providers.length > 0)) { for (Provider provider : providers) { System.out.println(provider.getName() + "\t" + provider.getInfo()); if (includeServices) { for (Provider.Service service : provider.getServices()) { System.out.println("\t" + service.getAlgorithm()); } } } } else { System.out.println("No security providers found!"); } } private static void selfSignedCertTest() throws Exception { KeyPair keyPair = createKeyPair(1024, "secret"); // Certificate String email = "artur.hefczyc@tigase.org"; String domain = "tigase.org"; String ou = "XMPP Service"; String o = "Tigase.org"; String l = "Cambourne"; String st = "Cambridgeshire"; String c = "UK"; System.out.println("Creating self-signed certificate for issuer: " + domain); X509Certificate cert = createSelfSignedCertificate(email, domain, ou, o, l, st, c, keyPair); System.out.print("Checking certificate validity today..."); System.out.flush(); cert.checkValidity(); System.out.println(" done."); System.out.print("Checking certificate validity yesterday..."); System.out.flush(); try { cert.checkValidity(new Date(System.currentTimeMillis() - (1000 * 3600 * 24))); System.out.println(" error."); } catch (CertificateNotYetValidException e) { System.out.println(" not valid!"); } System.out.print("Verifying certificate with public key..."); System.out.flush(); cert.verify(keyPair.getPublic()); System.out.println(" done."); System.out.println(cert.toString()); } /** * Method description * * * @param file * @param entry * * @throws CertificateEncodingException * @throws IOException */ public static void storeCertificate(String file, CertificateEntry entry) throws CertificateEncodingException, IOException { String pemFormat = exportToPemFormat(entry); File f = new File(file); if (f.exists()) f.renameTo(new File(file + ".bak")); FileWriter fw = new FileWriter(f, false); fw.write(pemFormat); fw.close(); } /** * Method description * * * * @param chain * @param revocationEnabled * * @param trustKeystore * @return * * * @throws CertificateException * @throws NoSuchAlgorithmException * @throws KeyStoreException * @throws InvalidAlgorithmParameterException */ public static CertCheckResult validateCertificate(Certificate[] chain, KeyStore trustKeystore, boolean revocationEnabled) throws NoSuchAlgorithmException, KeyStoreException, InvalidAlgorithmParameterException, CertificateException { CertPathValidator certPathValidator = CertPathValidator.getInstance(CertPathValidator.getDefaultType()); X509CertSelector selector = new X509CertSelector(); PKIXBuilderParameters params = new PKIXBuilderParameters(trustKeystore, selector); params.setRevocationEnabled(false); List<Certificate> certList = Arrays.asList(chain); CertPath certPath = CertificateFactory.getInstance("X.509").generateCertPath(certList); try { certPathValidator.validate(certPath, params); return CertCheckResult.trusted; } catch (CertPathValidatorException ex) { if (isExpired((X509Certificate) chain[0])) { return CertCheckResult.expired; } if ((chain.length == 1) && isSelfSigned((X509Certificate) chain[0])) { return CertCheckResult.self_signed; } else { return CertCheckResult.untrusted; } } } } // ~ Formatted in Sun Code Convention
// Triple Play - utilities for use in PlayN-based games package tripleplay.game; import java.util.ArrayList; import java.util.List; import playn.core.PlayN; import playn.core.Game; /** * Manages a stack of screens. The stack supports useful manipulations: pushing a new screen onto * the stack, replacing the screen at the top of the stack with a new screen, popping a screen from * the stack. * * <p> Care is taken to preserve stack invariants even in the face of errors thrown by screens when * being added, removed, shown or hidden. Users can override {@link #handleError} and either simply * log the error, or rethrow it if they would prefer that a screen failure render their entire * screen stack unusable. </p> */ public abstract class ScreenStack { public void push (Screen screen) { if (_screens.contains(screen)) { throw new IllegalArgumentException("Cannot add screen to stack twice."); } if (!_screens.isEmpty()) hide(top()); add(screen); } public void replace (Screen screen) { if (_screens.contains(screen)) { throw new IllegalArgumentException("Cannot add screen to stack twice."); } if (!_screens.isEmpty()) removeTop(); add(screen); } /** * Removes the specified screen from the stack. If it is the currently visible screen, it will * first be hidden, and the next screen below in the stack will be made visible. */ public boolean remove (Screen screen) { if (top() == screen) { removeTop(); show(top()); return true; } else { boolean removed = _screens.remove(screen); if (removed) { try { screen.wasRemoved(); } catch (RuntimeException e) { handleError(e); } } return removed; } } /** * Updates the currently visible screen. A screen stack client should call this method from * {@link Game#update}. */ public void update (float delta) { if (!_screens.isEmpty()) top().update(delta); } /** * Paints the currently visible screen. A screen stack client should call this method from * {@link Game#paint}. */ public void paint (float alpha) { if (!_screens.isEmpty()) top().paint(alpha); } protected Screen top () { return _screens.get(0); } protected void add (Screen screen) { _screens.add(0, screen); try { screen.wasAdded(); } catch (RuntimeException e) { handleError(e); } show(screen); } protected void show (Screen screen) { PlayN.graphics().rootLayer().add(screen.layer); try { screen.wasShown(); } catch (RuntimeException e) { handleError(e); } } protected void hide (Screen screen) { PlayN.graphics().rootLayer().remove(screen.layer); try { screen.wasHidden(); } catch (RuntimeException e) { handleError(e); } } protected void removeTop () { hide(top()); Screen screen = _screens.remove(0); try { screen.wasRemoved(); } catch (RuntimeException e) { handleError(e); } } /** Called if any exceptions are thrown by the screen callback functions. */ protected abstract void handleError (RuntimeException error); /** Containts the stacked screens from top-most, to bottom-most. */ protected final List<Screen> _screens = new ArrayList<Screen>(); }
package com.zuehlke.carrera.javapilot.Utils; import com.zuehlke.carrera.relayapi.messages.PenaltyMessage; import org.apache.commons.math3.stat.regression.SimpleRegression; public class Segment{ TurnStateRecognizer.TurnState turnState; private static final int init_throttle_time=300; private static final int max_penalty_speed=300; private static final int min_penalty_speed=200; private static final int MAX_POWER_STRAIGHT=180; private static final int MAX_POWER_CURVE=150; private int max_power = 150; private int velocity_step=20; private int min_penalized_time = Integer.MAX_VALUE; private boolean penalized=false; private int number_of_penalties=0; private double top_speed; private double last_speed; private double current_penalty_speed; private double sharpness; private boolean stopped; private int throttle_time; private double target_speed; private long initDuration; private int initClocks; SimpleRegression stats; public Segment(TurnStateRecognizer.TurnState turnState){ this.turnState = turnState; top_speed = 0.9 * min_penalty_speed; target_speed = top_speed + velocity_step; last_speed = 0; current_penalty_speed = max_penalty_speed; sharpness = 0; recalcMaxPower(); stopped=true; throttle_time = init_throttle_time; stats = new SimpleRegression(); } private void recalcMaxPower() { //max_power = (int)(power_limit * (0.5 + 0.5 * sharpness)); max_power=150; } public TurnStateRecognizer.TurnState getTurnState() { return turnState; } public double getTopSpeed() { return top_speed; } public int getStep(){ return velocity_step; } public double getTargetSpeed(){ return target_speed; } public double getSharpness() { return sharpness; } public long getInitDuration() { return initDuration; } public void setInitDuration(long initDuration) { this.initDuration = initDuration; } public void setInitClocks(int initClocks) { this.initClocks = initClocks; } public int getInitClocks() { return initClocks; } public void setSharpness(double sharpness) { this.sharpness = sharpness; recalcMaxPower(); } public void recordNewData(int throttleTime, double velocityD, boolean stopped){ if(!penalized){ if(turnState== TurnStateRecognizer.TurnState.Straight){ throttleTime+=60; } else { throttle_time+=40; } }else { if(throttle_time<0.75*min_penalized_time){ throttle_time+=Math.min(25,(min_penalized_time-throttle_time)/2); } } top_speed = Math.min(target_speed,max_penalty_speed); target_speed=Math.min(top_speed+velocity_step,max_penalty_speed); if(!stopped) { stats.addData(throttleTime, velocityD); } } public int getThrottleTime(double last_recorded_velocity) { //double targetDelta = target_speed - last_recorded_velocity; //double slope = stats.getSlope(); //double intercept = stats.getIntercept(); //int timeByStat = 0; // if (stats.getN() >= 2) // timeByStat = (int) ((targetDelta - intercept) / slope); // throttle_time = (stopped || stats.getN() < 2) ? Math.max(timeByStat, throttle_time) : timeByStat; return throttle_time; } public int get_max_power(){ if(turnState == TurnStateRecognizer.TurnState.Straight) return MAX_POWER_STRAIGHT; else return MAX_POWER_CURVE; } public boolean isPenalized(){ return penalized; } public void penalize(PenaltyMessage msg) { penalized=true; number_of_penalties++; velocity_step = 0; target_speed = 0.95 * msg.getSpeedLimit(); top_speed = target_speed; min_penalized_time=throttle_time; if(turnState == TurnStateRecognizer.TurnState.Straight) throttle_time=(int)(0.7*throttle_time); else throttle_time=(int)(0.7*throttle_time); } @Override public String toString(){ // return "\n" + turnState.toString() + ":mp=" + this.max_power + ":shrp=" + this.sharpness; // return turnState.toString(); return turnState.toString() + ":" + this.getInitClocks() + ""; } }
package de.braintags.vertx.util.security; import java.util.List; import io.vertx.core.json.JsonObject; public class JWTOptions { private static final IllegalArgumentException INCOMPATIBLE_CONFIG = new IllegalArgumentException( "Can only set expiresIn to seconds or minutes"); private JsonObject header; private String algorithm = "HS256"; private boolean noTimestamp = false; private int expiresInMinutes; private int expiresInSeconds; private List<String> audience; private String issuer; private String subject; public JsonObject getHeader() { return header; } public JWTOptions setHeader(final JsonObject header) { this.header = header; return this; } public String getAlgorithm() { return algorithm; } public JWTOptions setAlgorithm(final String algorithm) { this.algorithm = algorithm; return this; } public boolean isNoTimestamp() { return noTimestamp; } public JWTOptions setNoTimestamp(final boolean noTimestamp) { this.noTimestamp = noTimestamp; return this; } public long getExpiresInMinutes() { return expiresInMinutes; } public JWTOptions setExpiresInMinutes(final int expiresInMinutes) { if (expiresInSeconds != 0) { throw INCOMPATIBLE_CONFIG; } this.expiresInMinutes = expiresInMinutes; return this; } public long getExpiresInSeconds() { return expiresInSeconds; } public JWTOptions setExpiresInSeconds(final int expiresInSeconds) { if (expiresInMinutes != 0) { throw INCOMPATIBLE_CONFIG; } this.expiresInSeconds = expiresInSeconds; return this; } public List<String> getAudience() { return audience; } public JWTOptions setAudience(final List<String> audience) { this.audience = audience; return this; } public String getIssuer() { return issuer; } public JWTOptions setIssuer(final String issuer) { this.issuer = issuer; return this; } public String getSubject() { return subject; } public JWTOptions setSubject(final String subject) { this.subject = subject; return this; } public io.vertx.ext.jwt.JWTOptions asNativeOptions() { io.vertx.ext.jwt.JWTOptions options = new io.vertx.ext.jwt.JWTOptions(); options.setAlgorithm(algorithm); options.setNoTimestamp(noTimestamp); if (header != null) { options.setHeader(header); } if (audience != null) { options.setAudience(audience); } if (issuer != null) { options.setIssuer(issuer); } if (subject != null) { options.setSubject(subject); } if (expiresInMinutes != 0) { options.setExpiresInMinutes(expiresInMinutes); } if (expiresInSeconds != 0) { options.setExpiresInSeconds(expiresInSeconds); } return options; } }
package gov.nasa.jpl.mbee.mdk.emf; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.nomagic.magicdraw.core.Project; import com.nomagic.magicdraw.openapi.uml.ReadOnlyElementException; import com.nomagic.magicdraw.openapi.uml.SessionManager; import com.nomagic.magicdraw.uml.transaction.RepositoryModelValidator; import com.nomagic.task.ProgressStatus; import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.Element; import com.nomagic.uml2.transaction.ModelValidationResult; import gov.nasa.jpl.mbee.mdk.api.incubating.MDKConstants; import gov.nasa.jpl.mbee.mdk.api.incubating.annotations.SessionManaged; import gov.nasa.jpl.mbee.mdk.api.incubating.convert.Converters; import gov.nasa.jpl.mbee.mdk.api.incubating.convert.JsonToElementFunction; import gov.nasa.jpl.mbee.mdk.ems.ImportException; import gov.nasa.jpl.mbee.mdk.ems.json.JsonEquivalencePredicate; import gov.nasa.jpl.mbee.mdk.lib.Changelog; import gov.nasa.jpl.mbee.mdk.lib.MDUtils; import gov.nasa.jpl.mbee.mdk.lib.Pair; import java.text.NumberFormat; import java.util.*; import java.util.function.BiFunction; // TODO What about locks? @donbot public class EMFBulkImporter implements BulkImportFunction { private final String sessionName; private int sessionCount; private Changelog<String, Pair<Element, ObjectNode>> changelog; private Map<Pair<Element, ObjectNode>, Exception> failedElementMap; private Map<Element, ObjectNode> nonEquivalentElements; private Map<String, Element> elementCache; private final BiFunction<String, Project, Element> bulkIdToElementConverter = (id, project) -> { Element element = Converters.getIdToElementConverter().apply(id, project); System.out.println("[NO CACHE] " + id + " -> " + element); if (element == null && elementCache != null) { element = elementCache.get(id); System.out.println("[CACHE] " + id + " -> " + element); } return element; }; public EMFBulkImporter(String sessionName) { this.sessionName = sessionName; } @SessionManaged @Override public Changelog<String, Pair<Element, ObjectNode>> apply(Collection<ObjectNode> objectNodes, Project project, ProgressStatus progressStatus) { String initialProgressStatusDescription = null; long initialProgressStatusCurrent = 0; boolean initialProgressStatusIndeterminate = false; RepositoryModelValidator validator = new RepositoryModelValidator(project); if (progressStatus != null) { initialProgressStatusDescription = progressStatus.getDescription(); initialProgressStatusCurrent = progressStatus.getCurrent(); initialProgressStatusIndeterminate = progressStatus.isIndeterminate(); progressStatus.setMax(objectNodes.size() * 3); progressStatus.setCurrent(0); } try { objectNodes = new ArrayList<>(objectNodes); failedElementMap = new LinkedHashMap<>(objectNodes.size()); nonEquivalentElements = new LinkedHashMap<>(); Map<Element, Changelog.ChangeType> changeTypeMap = new HashMap<>(objectNodes.size()); JsonToElementFunction jsonToElementFunction = new EMFImporter() { @Override protected List<EStructuralFeatureOverride> getEStructuralFeatureOverrides() { if (eStructuralFeatureOverrides == null) { eStructuralFeatureOverrides = new ArrayList<>(super.getEStructuralFeatureOverrides()); eStructuralFeatureOverrides.remove(EStructuralFeatureOverride.OWNER); eStructuralFeatureOverrides.add(EStructuralFeatureOverride.getOwnerEStructuralFeatureOverride(bulkIdToElementConverter)); } return eStructuralFeatureOverrides; } @Override protected List<PreProcessor> getPreProcessors() { if (preProcessors == null) { preProcessors = new ArrayList<>(super.getPreProcessors()); preProcessors.remove(PreProcessor.CREATE); preProcessors.add(0, PreProcessor.getCreatePreProcessor(bulkIdToElementConverter)); } return preProcessors; } @Override protected BiFunction<String, Project, Element> getIdToElementConverter() { return bulkIdToElementConverter; } }; bulkImport: while (/*failedElementMap.isEmpty() && */!objectNodes.isEmpty()) { changelog = new Changelog<>(); elementCache = new HashMap<>(); List<ObjectNode> retryObjectNodes = new ArrayList<>(); if (SessionManager.getInstance().isSessionCreated()) { SessionManager.getInstance().cancelSession(); } SessionManager.getInstance().createSession(project, sessionName + " x" + objectNodes.size() + " #" + ++sessionCount); if (progressStatus != null) { progressStatus.setDescription(sessionName + " - " + NumberFormat.getInstance().format(objectNodes.size()) + " elements" + (!failedElementMap.isEmpty() ? " - " + NumberFormat.getInstance().format(failedElementMap.size()) + " failed" : "")); progressStatus.setCurrent(progressStatus.getMax() - objectNodes.size() * 3); } Iterator<ObjectNode> iterator = objectNodes.iterator(); while (iterator.hasNext()) { ObjectNode objectNode = iterator.next(); JsonNode sysmlIdJsonNode = objectNode.get(MDKConstants.SYSML_ID_KEY); String sysmlId = sysmlIdJsonNode != null && sysmlIdJsonNode.isTextual() ? sysmlIdJsonNode.asText() : null; if (MDUtils.isDeveloperMode()) { System.out.println("[ATTEMPT 1] Attempting " + sysmlId); } Changelog.Change<Element> change = null; try { change = jsonToElementFunction.apply(objectNode, project, false); } catch (ImportException | ReadOnlyElementException ignored) { } if (change == null || change.getChanged() == null) { if (MDUtils.isDeveloperMode()) { System.err.println("[FAILED 1] Could not create " + sysmlId); } // Element may fail to create on first pass, ex: Diagram (because owner doesn't exist yet + custom creation), so we need to retry after everything else. retryObjectNodes.add(objectNode); //failedElementMap.put(new Pair<>(Converters.getIdToElementConverter().apply(objectNode.get(MDKConstants.SYSML_ID_KEY).asText(), project), objectNode), importException); //iterator.remove(); //continue bulkImport; } else { if (MDUtils.isDeveloperMode()) { System.out.println("[SUCCESS 1] Imported " + sysmlId); } if (sysmlId != null) { elementCache.put(sysmlId, change.getChanged()); } changeTypeMap.put(change.getChanged(), change.getType()); } //changelog.addChange(Converters.getElementToIdConverter().apply(change.getChanged()), new Pair<>(change.getChanged(), objectNode), change.getType()); if (progressStatus != null) { progressStatus.increase(); } } for (ObjectNode objectNode : retryObjectNodes) { JsonNode sysmlIdJsonNode = objectNode.get(MDKConstants.SYSML_ID_KEY); String sysmlId = sysmlIdJsonNode != null && sysmlIdJsonNode.isTextual() ? sysmlIdJsonNode.asText() : null; if (MDUtils.isDeveloperMode()) { System.out.println("[ATTEMPT 1.5] Attempting " + sysmlId); } Changelog.Change<Element> change = null; Exception exception = new ImportException(null, objectNode, "Failed to create/update element."); try { change = jsonToElementFunction.apply(objectNode, project, false); } catch (ImportException | ReadOnlyElementException e) { exception = e; } if (change == null || change.getChanged() == null) { if (MDUtils.isDeveloperMode()) { System.err.println("[FAILED 1.5] Could not create " + sysmlId); } failedElementMap.put(new Pair<>(Converters.getIdToElementConverter().apply(objectNode.get(MDKConstants.SYSML_ID_KEY).asText(), project), objectNode), exception); objectNodes.remove(objectNode); continue bulkImport; } else { if (MDUtils.isDeveloperMode()) { System.out.println("[SUCCESS 1.5] Imported " + sysmlId); } if (sysmlId != null) { elementCache.put(sysmlId, change.getChanged()); } changeTypeMap.put(change.getChanged(), change.getType()); } } iterator = objectNodes.iterator(); while (iterator.hasNext()) { ObjectNode objectNode = iterator.next(); JsonNode sysmlIdJsonNode = objectNode.get(MDKConstants.SYSML_ID_KEY); String sysmlId = sysmlIdJsonNode != null && sysmlIdJsonNode.isTextual() ? sysmlIdJsonNode.asText() : "<>"; if (MDUtils.isDeveloperMode()) { System.out.println("[ATTEMPT 2] Attempting " + sysmlId); } Changelog.Change<Element> change = null; Exception exception = new ImportException(null, objectNode, "Failed to create/update element with relationships."); try { change = jsonToElementFunction.apply(objectNode, project, true); } catch (ImportException | ReadOnlyElementException e) { exception = e; } if (change == null || change.getChanged() == null) { if (MDUtils.isDeveloperMode()) { System.err.println("[FAILED 2] Could not import " + sysmlId); } failedElementMap.put(new Pair<>(Converters.getIdToElementConverter().apply(objectNode.get(MDKConstants.SYSML_ID_KEY).asText(), project), objectNode), exception); iterator.remove(); continue bulkImport; } else { if (MDUtils.isDeveloperMode()) { System.out.println("[SUCCESS 2] Imported " + sysmlId); } if (sysmlId != null) { elementCache.put(sysmlId, change.getChanged()); } Changelog.ChangeType changeType = changeTypeMap.get(change.getChanged()); changelog.addChange(Converters.getElementToIdConverter().apply(change.getChanged()), new Pair<>(change.getChanged(), objectNode), changeType != null ? changeType : change.getType()); } if (progressStatus != null) { progressStatus.increase(); } } for (Changelog.ChangeType changeType : Changelog.ChangeType.values()) { for (Map.Entry<String, Pair<Element, ObjectNode>> entry : changelog.get(changeType).entrySet()) { Element element = entry.getValue().getFirst(); ObjectNode objectNode = entry.getValue().getSecond(); Collection<ModelValidationResult> results = validator.validateChanges(Collections.singleton(element)); if (results != null && !results.isEmpty()) { ModelValidationResult result = results.iterator().next(); if (MDUtils.isDeveloperMode()) { System.err.println("[FAILED 3] " + result.toString()); } failedElementMap.put(new Pair<>(element, objectNode), new ImportException(element, objectNode, "Element failed validation after importing. Reason: " + result.getReason())); objectNodes.remove(objectNode); continue bulkImport; } if (element.isInvalid()) { if (MDUtils.isDeveloperMode()) { JsonNode sysmlIdJsonNode = objectNode.get(MDKConstants.SYSML_ID_KEY); String sysmlId = sysmlIdJsonNode != null && sysmlIdJsonNode.isTextual() ? sysmlIdJsonNode.asText() : "<>"; System.err.println("[FAILED 4] Could not create " + sysmlId); } failedElementMap.put(new Pair<>(element, objectNode), new ImportException(element, objectNode, "Element was found to be invalid after importing.")); objectNodes.remove(objectNode); continue bulkImport; } ObjectNode sourceObjectNode = Converters.getElementToJsonConverter().apply(element, project); if (!JsonEquivalencePredicate.getInstance().test(sourceObjectNode, objectNode)) { // currently handled as a warning instead of an error nonEquivalentElements.put(element, objectNode); } if (progressStatus != null) { progressStatus.increase(); } } } if (SessionManager.getInstance().isSessionCreated()) { SessionManager.getInstance().closeSession(); } break; } } finally { if (SessionManager.getInstance().isSessionCreated()) { SessionManager.getInstance().cancelSession(); } if (progressStatus != null) { progressStatus.setDescription(initialProgressStatusDescription); progressStatus.setCurrent(initialProgressStatusCurrent); progressStatus.setIndeterminate(initialProgressStatusIndeterminate); } } return changelog; } public String getSessionName() { return sessionName; } public int getSessionCount() { return sessionCount; } public Changelog<String, Pair<Element, ObjectNode>> getChangelog() { return changelog; } public Map<Pair<Element, ObjectNode>, Exception> getFailedElementMap() { return failedElementMap; } public Map<Element, ObjectNode> getNonEquivalentElements() { return nonEquivalentElements; } }
package de.nrw.hbz.regal.api.helper; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.StringReader; import java.io.StringWriter; import java.io.UnsupportedEncodingException; import java.net.URL; import java.net.URLEncoder; import java.util.Date; import java.util.List; import org.apache.commons.httpclient.URIException; import org.apache.commons.httpclient.util.URIUtil; import org.apache.commons.io.IOUtils; import org.openrdf.model.Literal; import org.openrdf.model.Statement; import org.openrdf.model.URI; import org.openrdf.model.ValueFactory; import org.openrdf.repository.RepositoryConnection; import org.openrdf.repository.RepositoryException; import org.openrdf.repository.RepositoryResult; import org.openrdf.repository.sail.SailRepository; import org.openrdf.rio.RDFFormat; import org.openrdf.rio.RDFHandlerException; import org.openrdf.rio.RDFWriter; import org.openrdf.rio.Rio; import org.openrdf.rio.helpers.BasicWriterSettings; import org.openrdf.rio.helpers.JSONLDMode; import org.openrdf.rio.helpers.JSONLDSettings; import org.openrdf.sail.memory.MemoryStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.stringtemplate.v4.ST; import de.nrw.hbz.regal.api.CreateObjectBean; import de.nrw.hbz.regal.datatypes.Node; import de.nrw.hbz.regal.fedora.FedoraInterface; class Representations { final static Logger logger = LoggerFactory.getLogger(Representations.class); FedoraInterface fedora = null; String server = null; String uriPrefix = null; public Representations(FedoraInterface fedora, String server) { this.fedora = fedora; this.server = server; uriPrefix = server + "/" + "resource" + "/"; } /** * @param pid * the pid * @param format * application/rdf+xml text/plain application/json * @param metadata * where to read metadata from * @param lastModified * when the object was last modified * @param fedoraHost * the fedoraHost for externals * @param parents * all parents of the pid * @param children * all children of the pid * @return a oai_ore resource map */ public String getReM(String pid, String format, URL metadata, Date lastModified, String fedoraHost, List<String> parents, List<String> children) { String result = null; Node node = fedora.readNode(pid); @SuppressWarnings("unused") String dcNamespace = "http://purl.org/dc/elements/1.1/"; String dctermsNamespace = "http://purl.org/dc/terms/"; @SuppressWarnings("unused") String foafNamespace = "http://xmlns.com/foaf/0.1/"; String oreNamespace = "http: @SuppressWarnings("unused") String rdfNamespace = " http://www.w3.org/1999/02/22-rdf-syntax-ns @SuppressWarnings("unused") String rdfsNamespace = "http://www.w3.org/2000/01/rdf-schema String regalNamespace = "http://hbz-nrw.de/regal InputStream in = null; RepositoryConnection con = null; try { SailRepository myRepository = new SailRepository(new MemoryStore()); myRepository.initialize(); con = myRepository.getConnection(); String baseURI = ""; try { in = metadata.openStream(); con.add(in, baseURI, RDFFormat.N3); } catch (Exception e) { logger.warn(e.getMessage()); } // Graph remGraph = new org.openrdf.model.impl.GraphImpl(); ValueFactory f = myRepository.getValueFactory(); // Links View view = getExternalLinks(pid); // Things URI aggregation = f.createURI(/* uriPrefix + */pid); URI rem = f.createURI(/* uriPrefix + */pid + ".rdf"); URI regal = f.createURI("https://github.com/edoweb/regal/"); URI data = f.createURI(aggregation.stringValue() + "/data"); URI fulltext = f.createURI(aggregation.stringValue() + "/fulltext"); Literal cType = f.createLiteral(node.getContentType()); Literal lastTimeModified = f.createLiteral(lastModified); String mime = node.getMimeType(); // Predicates // ore URI describes = f.createURI(oreNamespace, "describes"); URI isDescribedBy = f.createURI(oreNamespace, "isDescribedBy"); URI aggregates = f.createURI(oreNamespace, "aggregates"); URI isAggregatedBy = f.createURI(oreNamespace, "isAggregatedBy"); URI similarTo = f.createURI(oreNamespace, "similarTo"); URI isPartOf = f.createURI(dctermsNamespace, "isPartOf"); URI hasPart = f.createURI(dctermsNamespace, "hasPart"); URI modified = f.createURI(dctermsNamespace, "modified"); URI creator = f.createURI(dctermsNamespace, "creator"); URI dcFormat = f.createURI(dctermsNamespace, "format"); URI dcHasFormat = f.createURI(dctermsNamespace, "hasFormat"); // regal URI contentType = f.createURI(regalNamespace, "contentType"); // Statements if (mime != null && !mime.isEmpty()) { Literal dataMime = f.createLiteral(mime); con.add(data, dcFormat, dataMime); con.add(aggregation, aggregates, data); if (dataMime.toString().compareTo("application/pdf") == 0) { con.add(aggregation, aggregates, fulltext); con.add(data, dcHasFormat, fulltext); } } String str = getOriginalUri(pid); if (str != null && !str.isEmpty()) { URI originalObject = f.createURI(str); con.add(aggregation, similarTo, originalObject); } str = view.getFirstLobidUrl(); if (str != null && !str.isEmpty()) { URI lobidResource = f.createURI(str); con.add(aggregation, similarTo, lobidResource); } str = view.getFirstVerbundUrl(); if (str != null && !str.isEmpty()) { URI catalogResource = f.createURI(str); con.add(aggregation, similarTo, catalogResource); } URI fedoraObject = f.createURI(fedoraHost + "/objects/" + pid); con.add(rem, describes, aggregation); con.add(rem, modified, lastTimeModified); con.add(rem, creator, regal); con.add(aggregation, isDescribedBy, rem); con.add(aggregation, similarTo, fedoraObject); con.add(aggregation, contentType, cType); for (String relPid : parents) { URI relUrl = f.createURI(/* uriPrefix + */relPid); con.add(aggregation, isAggregatedBy, relUrl); con.add(aggregation, isPartOf, relUrl); } for (String relPid : children) { URI relUrl = f.createURI(/* uriPrefix + */relPid); con.add(aggregation, aggregates, relUrl); con.add(aggregation, hasPart, relUrl); } StringWriter out = new StringWriter(); RDFWriter writer = null; if (format.compareTo("application/rdf+xml") == 0) { writer = Rio.createWriter(RDFFormat.RDFXML, out); } else if (format.compareTo("text/plain") == 0) { writer = Rio.createWriter(RDFFormat.NTRIPLES, out); } else if (format.compareTo("application/json") == 0) { writer = Rio.createWriter(RDFFormat.JSONLD, out); writer.getWriterConfig().set(JSONLDSettings.JSONLD_MODE, JSONLDMode.EXPAND); writer.getWriterConfig().set(BasicWriterSettings.PRETTY_PRINT, true); } else if (format.compareTo("text/html") == 0) { // TODO: This will work one day // writer = Rio.createWriter(RDFFormat.RDFA, out); writer = Rio.createWriter(RDFFormat.NTRIPLES, out); try { writer.startRDF(); RepositoryResult<Statement> statements = con.getStatements( null, null, null, false); while (statements.hasNext()) { Statement statement = statements.next(); writer.handleStatement(statement); } writer.endRDF(); result = out.toString(); } catch (RDFHandlerException e) { logger.error(e.getMessage()); } return getHtml(result, mime, pid); } else if (format.compareTo("application/json+elasticsearch") == 0) { writer = Rio.createWriter(RDFFormat.JSONLD, out); writer.getWriterConfig().set(JSONLDSettings.JSONLD_MODE, JSONLDMode.EXPAND); writer.getWriterConfig().set(BasicWriterSettings.PRETTY_PRINT, true); try { writer.startRDF(); RepositoryResult<Statement> statements = con.getStatements( null, null, null, false); while (statements.hasNext()) { Statement statement = statements.next(); if (statement.getSubject().stringValue().endsWith(pid)) writer.handleStatement(statement); } writer.endRDF(); result = out.toString(); result = result.substring(1, result.length() - 1); return result; } catch (RDFHandlerException e) { logger.error(e.getMessage()); } } else { throw new HttpArchiveException(406, format + " is not supported"); } try { writer.startRDF(); RepositoryResult<Statement> statements = con.getStatements( null, null, null, false); while (statements.hasNext()) { Statement statement = statements.next(); writer.handleStatement(statement); } writer.endRDF(); result = out.toString(); } catch (RDFHandlerException e) { logger.error(e.getMessage()); } } catch (RepositoryException e) { logger.error(e.getMessage()); } finally { if (con != null) { try { con.close(); } catch (RepositoryException e) { logger.error(e.getMessage()); e.printStackTrace(); } } } return result; } private View getExternalLinks(String pid) { View view = new View(); Node node = fedora.readNode(pid); for (String id : node.getBean().getIdentifier()) { if (id.startsWith("doi")) { view.addDoi(id); } else if (id.startsWith("urn")) { view.addUrn(id); break; } else if (id.startsWith("HT")) { view.addAlephId(id); break; } else if (id.startsWith("TT")) { view.addAlephId(id); break; } else { view.addIdentifier(id); } } return view; } private String getOriginalUri(String pid) { String pidWithoutNamespace = pid.substring(pid.indexOf(':') + 1); String originalUri = null; if (pid.contains("edoweb") || pid.contains("ellinet")) { if (pid.length() <= 17) { originalUri = "http://klio.hbz-nrw.de:1801/webclient/MetadataManager?pid=" + pidWithoutNamespace; } } if (pid.contains("dipp")) { originalUri = "http://193.30.112.23:9280/fedora/get/" + pid + "/QDC"; } if (pid.contains("ubm")) { originalUri = "http://ubm.opus.hbz-nrw.de/frontdoor.php?source_opus=" + pidWithoutNamespace + "&la=de"; } if (pid.contains("fhdd")) { originalUri = "http://fhdd.opus.hbz-nrw.de/frontdoor.php?source_opus=" + pidWithoutNamespace + "&la=de"; } if (pid.contains("kola")) { originalUri = "http://kola.opus.hbz-nrw.de/frontdoor.php?source_opus=" + pidWithoutNamespace + "&la=de"; } return originalUri; } private String getHtml(String rdf, String mime, String pid) { String result = ""; RepositoryConnection con = null; try { java.net.URL fileLocation = Thread.currentThread() .getContextClassLoader().getResource("html.html"); StringWriter writer = new StringWriter(); IOUtils.copy(fileLocation.openStream(), writer); String data = writer.toString(); ST st = new ST(data, '$', '$'); st.add("serverRoot", server); if (mime != null) { String dataLink = uriPrefix + pid + "/data"; String logoLink = ""; if (mime.compareTo("application/pdf") == 0) { logoLink = "/pdflogo.svg"; } else if (mime.compareTo("application/zip") == 0) { logoLink = "/zip.png"; } else { logoLink = "/data.png"; } st.add("data", "<tr><td class=\"textlink\"><a href=\"" + dataLink + "\"><img src=\"" + logoLink + "\" width=\"100\" /></a></td></tr>"); } else { st.add("data", ""); } SailRepository myRepository = new SailRepository(new MemoryStore()); myRepository.initialize(); con = myRepository.getConnection(); String baseURI = ""; try { con.add(new StringReader(rdf), baseURI, RDFFormat.N3); RepositoryResult<Statement> statements = con.getStatements( null, null, null, false); while (statements.hasNext()) { Statement statement = statements.next(); String subject = statement.getSubject().stringValue(); String predicate = statement.getPredicate().stringValue(); String object = statement.getObject().stringValue(); MyTriple triple = new MyTriple(subject, predicate, object, pid); if (predicate.compareTo("http://purl.org/dc/terms/hasPart") == 0 || predicate .compareTo("http://purl.org/dc/terms/isPartOf") == 0) { st.add("relations", triple); } else if (predicate .compareTo("http: || predicate .compareTo("http: { // do nothing!; } else if (predicate .compareTo("http: st.add("links", triple); } else { st.add("statements", triple); } } result = st.render(); } catch (Exception e) { logger.warn(e.getMessage()); } } catch (RepositoryException e) { logger.error(e.getMessage()); } catch (IOException e) { logger.error(e.getMessage()); } finally { if (con != null) { try { con.close(); } catch (RepositoryException e) { logger.error(e.getMessage()); e.printStackTrace(); } } } return result; } /** * @param pid * The pid of an existing object. * @param oaidc * the oaidc data of the object * @param fedoraExtern * the fedora entpoint url for external users * @return the view of the object */ public View getView(String pid, String oaidc, String fedoraExtern) { Node node = fedora.readNode(pid); fedora.readDcToNode(node, new ByteArrayInputStream(oaidc.getBytes()), "oai_dc"); return getView(node, fedoraExtern); } /** * @param node * An object as node * @param type * The type * @return the view of the object of type type. */ View getView(Node node, String fedoraExtern) { String pid = node.getPID(); String uri = pid; String apiUrl = server + "/resource/" + pid; View view = new View(); view.setLastModified(node.getLastModified()); view.setCreator(node.getBean().getCreator()); view.setTitle(node.getBean().getTitle()); view.setLanguage(node.getBean().getLanguage()); view.setSubject(node.getBean().getSubject()); view.setType(node.getBean().getType()); view.setLocation(node.getBean().getSource()); view.setPublisher(node.getBean().getPublisher()); view.setDescription(node.getBean().getDescription()); view.setContributer(node.getBean().getContributer()); String label = node.getLabel(); if (label != null && !label.isEmpty()) view.addDescription(label); view.setUri(uri); view.setApiUrl(apiUrl); view.setContentType(node.getContentType()); String pidWithoutNamespace = pid.substring(pid.indexOf(':') + 1); view.addFedoraUrl(fedoraExtern + "/objects/" + pid); // TODO You know what to do! if (pid.contains("edoweb") || pid.contains("ellinet")) { if (pid.length() <= 17) { view.addOriginalObjectUrl("http://klio.hbz-nrw.de:1801/webclient/MetadataManager?pid=" + pidWithoutNamespace); // TODO only if synced Resource view.addCacheUrl(server + "/" + node.getNamespace() + "base/" + pidWithoutNamespace); } } if (pid.contains("dipp")) { // TODO only if synced Resource view.addOriginalObjectUrl("http://193.30.112.23:9280/fedora/get/" + pid + "/QDC"); try { view.addCacheUrl(server + "/" + node.getNamespace() + "base/" + URLEncoder.encode(URLEncoder.encode(pid, "utf-8"), "utf-8")); } catch (UnsupportedEncodingException e) { // TODO Auto-generated catch block e.printStackTrace(); } } if (pid.contains("ubm")) { // TODO only if synced Resource view.addOriginalObjectUrl("http://ubm.opus.hbz-nrw.de/frontdoor.php?source_opus=" + pidWithoutNamespace + "&la=de"); view.addCacheUrl(server + "/" + node.getNamespace() + "base/" + pidWithoutNamespace); } if (pid.contains("fhdd")) { // TODO only if synced Resource view.addOriginalObjectUrl("http://fhdd.opus.hbz-nrw.de/frontdoor.php?source_opus=" + pidWithoutNamespace + "&la=de"); view.addCacheUrl(server + "/" + node.getNamespace() + "base/" + pidWithoutNamespace); } if (pid.contains("kola")) { // TODO only if synced Resource view.addOriginalObjectUrl("http://kola.opus.hbz-nrw.de/frontdoor.php?source_opus=" + pidWithoutNamespace + "&la=de"); view.addCacheUrl(server + "/" + node.getNamespace() + "base/" + pidWithoutNamespace); } String query = "<info:fedora/" + pid + "> * *"; try { view.addRisearchUrl(fedoraExtern + "/risearch?type=triples&lang=spo&format=RDF/XML&query=" + URIUtil.encodeQuery(query)); } catch (URIException e) { } String mime = node.getMimeType(); if (mime != null && !mime.isEmpty()) { if (mime.compareTo("application/pdf") == 0) { view.addPdfUrl(apiUrl + "/data"); } if (mime.compareTo("application/zip") == 0) { view.addZipUrl(apiUrl + "/data"); } } for (String date : node.getBean().getDate()) { view.addYear(date.substring(0, 4)); } for (String ddc : node.getBean().getSubject()) { if (ddc.startsWith("ddc")) { view.addDdc(ddc); break; } } for (String id : node.getBean().getIdentifier()) { if (id.startsWith("doi")) { view.addDoi(id); } else if (id.startsWith("urn")) { view.addUrn(id); break; } else if (id.startsWith("HT")) { view.addAlephId(id); break; } else if (id.startsWith("TT")) { view.addAlephId(id); break; } else { view.addIdentifier(id); } } return view; } /** * @param list * a list with pids * @return all objects in a html list */ public String getAllAsHtml(List<String> list) { String result = ""; try { java.net.URL fileLocation = Thread.currentThread() .getContextClassLoader().getResource("list.html"); StringWriter writer = new StringWriter(); IOUtils.copy(fileLocation.openStream(), writer); String data = writer.toString(); ST st = new ST(data, '$', '$'); st.add("type", "resource"); for (String item : list) { st.add("items", "<li><a href=\"" + uriPrefix + item + "\">" + item + "</a></li>"); } result = st.render(); } catch (IOException e) { throw new HttpArchiveException(500, e); } return result; } /** * @param list * a list with pids * @param type * the type to be displaye * @return html listing of all objects */ public String getAllOfTypeAsHtml(List<String> list, String type) { String result = ""; try { java.net.URL fileLocation = Thread.currentThread() .getContextClassLoader().getResource("list.html"); StringWriter writer = new StringWriter(); IOUtils.copy(fileLocation.openStream(), writer); String data = writer.toString(); ST st = new ST(data, '$', '$'); st.add("type", type); for (String item : list) { st.add("items", "<li><a href=\"" + uriPrefix + item + "\">" + item + "</a></li>"); } result = st.render(); } catch (IOException e) { throw new HttpArchiveException(500, e); } return result; } /** * @param pid * the pid to read from * @return the parentPid and contentType as json */ public CreateObjectBean getRegalJson(String pid) { Node node = fedora.readNode(pid); CreateObjectBean result = new CreateObjectBean(); String parentPid = null; String type = node.getContentType(); parentPid = fedora.getNodeParent(node); result.setParentPid(parentPid); result.setType(type); return result; } private class MyTriple { String subject; String predicate; String object; String pid; public MyTriple(String subject, String predicate, String object, String pid) { this.subject = subject; this.predicate = predicate; this.object = object; this.pid = pid; } public String toString() { String subjectLink = null; String objectLink = null; String namespace = pid.substring(0, pid.indexOf(":")); if (subject.startsWith(pid)) { subjectLink = uriPrefix + subject; } else { subjectLink = subject; } if (object.startsWith(namespace)) { objectLink = uriPrefix + object; } else if (object.startsWith("http")) { objectLink = object; } if (predicate.compareTo("http://hbz-nrw.de/regal#contentType") == 0) { objectLink = "/" + object + "/"; } if (objectLink != null) { return "<tr><td><a href=\"" + subjectLink + "\">" + subject + "</a></td><td><a href=\"" + predicate + "\">" + predicate + "</a></td><td about=\"" + subject + "\"><a property=\"" + predicate + "\" href=\"" + objectLink + "\">" + object + "</a></td></tr>"; } else { return "<tr><td><a href=\"" + subjectLink + "\">" + subject + "</a></td><td><a href=\"" + predicate + "\">" + predicate + "</a></td><td about=\"" + subject + "\">" + object + "</td></tr>"; } } } }
package de.retest.recheck.auth; import static org.keycloak.adapters.rotation.AdapterTokenVerifier.verifyToken; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.net.MalformedURLException; import java.net.ServerSocket; import java.net.Socket; import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.security.PublicKey; import java.security.interfaces.RSAPublicKey; import java.util.Map; import java.util.UUID; import java.util.stream.Collectors; import org.apache.http.NameValuePair; import org.apache.http.client.utils.URIBuilder; import org.apache.http.client.utils.URLEncodedUtils; import org.keycloak.OAuth2Constants; import org.keycloak.adapters.ServerRequest.HttpFailure; import org.keycloak.common.VerificationException; import org.keycloak.representations.AccessTokenResponse; import org.omg.CORBA.ServerRequest; import com.auth0.jwk.JwkException; import com.auth0.jwk.UrlJwkProvider; import com.auth0.jwt.JWT; import com.auth0.jwt.algorithms.Algorithm; import com.auth0.jwt.interfaces.DecodedJWT; import com.auth0.jwt.interfaces.JWTVerifier; import kong.unirest.HttpResponse; import kong.unirest.JsonNode; import kong.unirest.Unirest; import kong.unirest.json.JSONObject; import lombok.Cleanup; import lombok.Data; import lombok.extern.slf4j.Slf4j; @Slf4j public class RetestAuthentication { private static final String REALM = "customer"; private static final String URL = "https://sso.prod.cloud.retest.org/auth"; private static final String BASE_URL = URL + "/realms/" + REALM + "/protocol/openid-connect"; private static final String AUTH_URL = BASE_URL + "/auth"; private static final String TOKEN_URL = BASE_URL + "/token"; private static final String CERTS_URL = BASE_URL + "/certs"; private static final String KID = "cXdlj_AlGVf-TbXyauXYM2XairgNUahzgOXHAuAxAmQ"; private DecodedJWT accessToken; private final AuthenticationHandler handler; private final String client; private final JWTVerifier verifier; public RetestAuthentication( final AuthenticationHandler handler, final String client ) { this.handler = handler; this.client = client; verifier = getJwtVerifier(); } private JWTVerifier getJwtVerifier() { try { final UrlJwkProvider provider = new UrlJwkProvider( URI.create( CERTS_URL ).toURL() ); final PublicKey publicKey = provider.get( KID ).getPublicKey(); return JWT.require( Algorithm.RSA256( (RSAPublicKey) publicKey, null ) ).build(); } catch ( final JwkException | MalformedURLException e ) { throw new RuntimeException( "Error accessing keycloak JWK information", e ); } } public void authenticate() { if ( handler.getOfflineToken() != null ) { try { refreshTokens(); } catch ( IOException | HttpFailure e ) { log.info( "Token not recognized, initiating authentication" ); login(); } } else { log.info( "No active token found, initiating authentication" ); login(); } } private void login() { try { final CallbackListener callback = new CallbackListener(); callback.start(); final String redirectUri = "http://localhost:" + callback.server.getLocalPort(); final String state = UUID.randomUUID().toString(); final URIBuilder builder = new URIBuilder( AUTH_URL ); builder.addParameter( "response_type", "code" ); builder.addParameter( "client_id", client ); builder.addParameter( "redirect_uri", redirectUri ); builder.addParameter( "state", state ); builder.addParameter( "scope", "offline_access" ); final URI loginUri = URI.create( builder.build().toString() ); handler.showWebLoginUri( loginUri ); callback.join(); if ( !state.equals( callback.result.getState() ) ) { handler.loginFailed( new RuntimeException() ); } if ( callback.result.getError() != null ) { handler.loginFailed( new RuntimeException() ); } if ( callback.result.getErrorException() != null ) { handler.loginFailed( callback.result.getErrorException() ); } final TokenBundle bundle = accessCodeToToken( callback.result.getCode(), redirectUri ); accessToken = verifier.verify( bundle.accessToken ); handler.loginPerformed( bundle.refreshToken ); } catch ( final InterruptedException | IOException | URISyntaxException e ) { log.error( "Error during authentication", e ); Thread.currentThread().interrupt(); } } private TokenBundle accessCodeToToken( final String code, final String redirectUri ) { final TokenBundle bundle = new TokenBundle(); final HttpResponse<JsonNode> response = Unirest.post( TOKEN_URL ) .field( "grant_type", "authorization_code" ) .field( "code", code ) .field( "client_id", client ) .field( "redirect_uri", redirectUri ) .asJson(); if ( response.isSuccess() ) { final JSONObject object = response.getBody().getObject(); bundle.setAccessToken( object.getString( "access_token" ) ); bundle.setRefreshToken( object.getString( "refresh_token" ) ); } return bundle; } @Data private static class TokenBundle { private String accessToken; private String refreshToken; } public void logout() { final String offlineToken = handler.getOfflineToken(); if ( offlineToken != null ) { try { log.info( "Performing logout" ); ServerRequest.invokeLogout( deployment, offlineToken ); handler.logoutPerformed(); } catch ( IOException | HttpFailure e ) { log.error( "Error during logout", e ); handler.logoutFailed( e ); } } else { log.error( "No offline token provided" ); } } public String getAccessToken() { try { refreshTokens(); } catch ( IOException | HttpFailure e ) { log.error( "Error refreshing token(s)", e ); } return accessToken; } private void refreshTokens() throws IOException, HttpFailure { if ( !isTokenValid() ) { final AccessTokenResponse response = ServerRequest.invokeRefresh( deployment, handler.getOfflineToken() ); accessToken = response.getToken(); } } private boolean isTokenValid() { try { return accessToken != null && verifyToken( accessToken, deployment ).isActive(); } catch ( final VerificationException e ) { log.info( "Current token is invalid, requesting new one" ); } return false; } static KeycloakResult getRequestParameters( final String request ) { final String url = "http://localhost/" + request.split( " " )[1]; final Map<String, String> parameters = URLEncodedUtils.parse( URI.create( url ), StandardCharsets.UTF_8 ) .stream() .collect( Collectors.toMap( NameValuePair::getName, NameValuePair::getValue ) ); return KeycloakResult.builder() .code( parameters.get( OAuth2Constants.CODE ) ) .error( parameters.get( OAuth2Constants.ERROR ) ) .errorDescription( parameters.get( "error-description" ) ) .state( parameters.get( OAuth2Constants.STATE ) ) .build(); } private class CallbackListener extends Thread { private final ServerSocket server; private KeycloakResult result; public CallbackListener() throws IOException { server = new ServerSocket( 0 ); } @Override public void run() { try ( Socket socket = server.accept() ) { @Cleanup final BufferedReader br = new BufferedReader( new InputStreamReader( socket.getInputStream() ) ); final String request = br.readLine(); result = getRequestParameters( request ); @Cleanup final OutputStreamWriter out = new OutputStreamWriter( socket.getOutputStream() ); @Cleanup final PrintWriter writer = new PrintWriter( out ); if ( result.getError() == null ) { writer.println( "HTTP/1.1 302 Found" ); writer.println( "Location: " + deployment.getTokenUrl().replace( "/token", "/delegated" ) ); } else { writer.println( "HTTP/1.1 302 Found" ); writer.println( "Location: " + deployment.getTokenUrl().replace( "/token", "/delegated?error=true" ) ); } } catch ( final IOException e ) { log.error( "Error during communication with sso.cloud.retest.org", e ); } } } }
package info.istamendil.bookcatalogue.models; import javax.persistence.*; import javax.validation.constraints.*; import org.hibernate.validator.constraints.*; @Entity @Table(name = "book") public class Book{ @Id @GeneratedValue(strategy = GenerationType.AUTO) @Column(name = "id", unique = true, nullable = false) private int id; @Length(max = 65535) private String description; @NotNull @Pattern(regexp = "[0-9]{10,14}") @Column(unique = true) private String isbn; @NotNull @Size(min = 1, max = 255) private String name; @NotNull @Range(min = 0L, max = 9999L) private Integer pages; @NotNull @ManyToOne(fetch = FetchType.EAGER) @JoinColumn(name = "publishing_house", nullable = false) private PublishingHouse publishingHouse; @URL private String url; @NotNull @Range(min = 0L, max = 9999L) private Integer year; public Book(){} public Book(int id, String name) { this.id = id; this.name = name; } public int getId() { return id; } public void setId(int id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getIsbn() { return isbn; } public void setIsbn(String isbn) { this.isbn = isbn; } public Integer getYear() { return year; } public void setYear(Integer year) { this.year = year; } public Integer getPages() { return pages; } public void setPages(Integer pages) { this.pages = pages; } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public PublishingHouse getPublishingHouse() { return publishingHouse; } public void setPublishingHouse(PublishingHouse publishingHouse) { this.publishingHouse = publishingHouse; } }
package io.github.nucleuspowered.nucleus; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.github.nucleuspowered.nucleus.modules.core.config.CoreConfigAdapter; import org.spongepowered.api.command.CommandSource; import org.spongepowered.api.entity.living.player.Player; import org.spongepowered.api.entity.living.player.User; import org.spongepowered.api.text.Text; import org.spongepowered.api.text.action.HoverAction; import org.spongepowered.api.text.action.TextActions; import org.spongepowered.api.text.format.TextColor; import org.spongepowered.api.text.format.TextColors; import org.spongepowered.api.text.format.TextStyle; import org.spongepowered.api.text.format.TextStyles; import org.spongepowered.api.text.serializer.TextSerializers; import java.net.MalformedURLException; import java.net.URL; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.Function; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.annotation.Nullable; public class ChatUtil { private final NucleusPlugin plugin; private final Pattern urlParser = Pattern.compile("(?<first>(^|\\s))(?<reset>&r)?(?<colour>(&[0-9a-flmnrok])+)?" + "(?<options>\\{[a-z]+?\\})?(?<url>(http(s)?://)?([A-Za-z0-9]+\\.)+[A-Za-z0-9]{2,}\\S*)", Pattern.CASE_INSENSITIVE); private final Pattern tokenParser = Pattern.compile("^\\{\\{(?<capture>[\\S]+)}}", Pattern.CASE_INSENSITIVE); private final Pattern tokenParserLookAhead = Pattern.compile("(?=\\{\\{(?<capture>[\\S]+)}})", Pattern.CASE_INSENSITIVE); private final Pattern enhancedUrlParser = Pattern.compile("(?<first>(^|\\s))(?<reset>&r)?(?<colour>(&[0-9a-flmnrok])+)?" + "((?<options>\\{[a-z]+?\\})?(?<url>(http(s)?://)?([A-Za-z0-9]+\\.)+[A-Za-z0-9]{2,}\\S*)|" + "(?<specialUrl>(\\[(?<msg>.+?)\\](?<optionssurl>\\{[a-z]+\\})?\\((?<sUrl>(http(s)?://)?([A-Za-z0-9]+\\.)+[A-Za-z0-9]{2,}[^\\s)]*)\\)))|" + "(?<specialCmd>(\\[(?<sMsg>.+?)\\](?<optionsscmd>\\{[a-z]+\\})?\\((?<sCmd>/.+?)\\))))", Pattern.CASE_INSENSITIVE); private CoreConfigAdapter cca = null; public static final StyleTuple EMPTY = new StyleTuple(TextColors.NONE, TextStyles.NONE); public ChatUtil(NucleusPlugin plugin) { this.plugin = plugin; } public final Text getMessageFromTemplate(String templates, CommandSource cs, final boolean trimTrailingSpace) { return getMessageFromTemplate(Lists.newArrayList(templates), cs, trimTrailingSpace, Maps.newHashMap(), Maps.newHashMap()).get(0); } public final Text getMessageFromTemplateWithVariables(String templates, CommandSource cs, final boolean trimTrailingSpace, Map<String, Object> variables) { return getMessageFromTemplate(Lists.newArrayList(templates), cs, trimTrailingSpace, Maps.newHashMap(), variables).get(0); } public final List<Text> getMessageFromTemplate(List<String> templates, CommandSource cs, final boolean trimTrailingSpace) { return getMessageFromTemplate(templates, cs, trimTrailingSpace, Maps.newHashMap(), Maps.newHashMap()); } public final Text getMessageFromTemplate(String templates, CommandSource cs, final boolean trimTrailingSpace, Map<String, Function<CommandSource, Optional<Text>>> tokensArray, Map<String, Object> variables) { return getMessageFromTemplate(Lists.newArrayList(templates), cs, trimTrailingSpace, tokensArray, variables).get(0); } public final List<Text> getMessageFromTemplate(List<String> templates, CommandSource cs, final boolean trimTrailingSpace, Map<String, Function<CommandSource, Optional<Text>>> tokensArray, Map<String, Object> variables) { List<Text> texts = Lists.newArrayList(); templates.forEach(template -> { StyleTuple st = new StyleTuple(TextColors.WHITE, TextStyles.NONE); boolean trimNext = trimTrailingSpace; Text.Builder tb = Text.builder(); String[] items = tokenParserLookAhead.split(template); Matcher tokenCheck = tokenParser.matcher(""); for (String textElement : items) { if (tokenCheck.reset(textElement).find(0)) { textElement = textElement.replace(tokenCheck.group(), ""); String tokenName = tokenCheck.group("capture"); // Token processing here. Optional<Text> tokenResult; if (tokensArray.containsKey(tokenName.toLowerCase())) { tokenResult = tokensArray.get(tokenName.toLowerCase()).apply(cs); } else { tokenResult = plugin.getTokenHandler().getTextFromToken(tokenName, cs, variables); } if (tokenResult.isPresent()) { tb.append(Text.builder().color(st.colour).style(st.style).append(tokenResult.get()).build()); } else { tb.append(Text.EMPTY); } trimNext = false; } if (trimNext) { textElement = textElement.replaceAll("^\\s+", ""); } if (!textElement.isEmpty()) { // Just convert the colour codes, but that's it. Text r = TextSerializers.FORMATTING_CODE.deserialize(textElement); tb.append(Text.of(st.colour, st.style, r)); st = getLastColourAndStyle(r, st); trimNext = false; } } texts.add(tb.build()); }); return texts; } public Text addLinksToText(Text message, @Nullable Player player) { return addLinksToAmpersandFormattedString(TextSerializers.FORMATTING_CODE.serialize(message), player, enhancedUrlParser); } public Text addUrlsToAmpersandFormattedString(String message) { return addLinksToAmpersandFormattedString(message, null, urlParser); } public Text addLinksToAmpersandFormattedString(String message, @Nullable Player player, Pattern parser) { Preconditions.checkNotNull(message, "message"); if (message.isEmpty()) { return Text.EMPTY; } Matcher m = parser.matcher(message); if (!m.find()) { return TextSerializers.FORMATTING_CODE.deserialize(message); } List<Text> texts = Lists.newArrayList(); String remaining = message; StyleTuple st = ChatUtil.EMPTY; do { // We found a URL. We split on the URL that we have. String[] textArray = remaining.split(parser.pattern(), 2); Text first = Text.builder().color(st.colour).style(st.style) .append(TextSerializers.FORMATTING_CODE.deserialize(textArray[0])).build(); // Add this text to the list regardless. texts.add(first); // If we have more to do, shove it into the "remaining" variable. if (textArray.length == 2) { remaining = textArray[1]; } else { remaining = null; } // Get the last colour & styles String colourMatch = m.group("colour"); if (colourMatch != null && !colourMatch.isEmpty()) { // If there is a reset, explicitly do it. TextStyle reset = TextStyles.NONE; if (m.group("reset") != null) { reset = TextStyles.RESET; } first = Text.of(reset, TextSerializers.FORMATTING_CODE.deserialize(m.group("colour") + " ")); } st = getLastColourAndStyle(first, st); // Build the URL String whiteSpace = m.group("first"); if (m.group("url") != null) { String url = m.group("url"); texts.add(getTextForUrl(url, url, whiteSpace, st, m.group("options"))); } else if (m.group("specialUrl") != null) { String url = m.group("sUrl"); String msg = m.group("msg"); texts.add(getTextForUrl(url, msg, whiteSpace, st, m.group("optionssurl"))); } else { // Must be commands. String cmd = m.group("sCmd"); String msg = m.group("sMsg"); String optionList = m.group("optionsscmd"); if (player != null) { cmd = cmd.replace("{{player}}", player.getName()); } msg = String.join("", whiteSpace, msg); Text.Builder textBuilder = Text.builder(msg).color(st.colour).style(st.style).onClick(TextActions.runCommand(cmd)) .onHover(setupHoverOnCmd(cmd, optionList)); if (optionList != null && optionList.contains("s")) { textBuilder.onClick(TextActions.suggestCommand(cmd)); } texts.add(textBuilder.build()); } } while (remaining != null && m.find()); // Add the last bit. if (remaining != null) { texts.add(Text.builder().color(st.colour).style(st.style) .append(TextSerializers.FORMATTING_CODE.deserialize(remaining)).build()); } // Join it all together. return Text.join(texts); } @Nullable private HoverAction<?> setupHoverOnCmd(String cmd, @Nullable String optionList) { if (optionList != null) { if (optionList.contains("h")) { return null; } if (optionList.contains("s")) { return TextActions.showText(plugin.getMessageProvider().getTextMessageWithFormat("chat.command.clicksuggest", cmd)); } } return TextActions.showText(plugin.getMessageProvider().getTextMessageWithFormat("chat.command.click", cmd)); } private Text getTextForUrl(String url, String msg, String whiteSpace, StyleTuple st, @Nullable String optionString) { String toParse = TextSerializers.FORMATTING_CODE.stripCodes(url); if (!whiteSpace.isEmpty()) { msg = String.join("", whiteSpace, msg); } try { URL urlObj; if (!toParse.startsWith("http: urlObj = new URL("http://" + toParse); } else { urlObj = new URL(toParse); } Text.Builder textBuilder = Text.builder(msg).color(st.colour).style(st.style).onClick(TextActions.openUrl(urlObj)); if (optionString != null && optionString.contains("h")) { textBuilder.onHover(TextActions.showText(plugin.getMessageProvider().getTextMessageWithFormat("chat.url.click", url))); } return textBuilder.build(); } catch (MalformedURLException e) { // URL parsing failed, just put the original text in here. initCoreConfigAdapter(); plugin.getLogger().warn(plugin.getMessageProvider().getMessageWithFormat("chat.url.malformed", url)); if (this.cca.getNodeOrDefault().isDebugmode()) { e.printStackTrace(); } return Text.builder(url).color(st.colour).style(st.style).build(); } } public StyleTuple getLastColourAndStyle(Text text, @Nullable StyleTuple current) { List<Text> texts = flatten(text); TextColor tc = TextColors.NONE; TextStyle ts = TextStyles.NONE; for (int i = texts.size() - 1; i > -1; i // If we have both a Text Colour and a Text Style, then break out. if (tc != TextColors.NONE && ts != TextStyles.NONE) { break; } if (tc == TextColors.NONE) { tc = texts.get(i).getColor(); // If the text colour is reset, the style requires a reset too. if (tc == TextColors.RESET) { ts = TextStyles.RESET; break; } } if (ts == TextStyles.NONE) { ts = texts.get(i).getStyle(); } } if (current == null) { return new StyleTuple(tc, ts); } return new StyleTuple(tc != TextColors.NONE ? tc : current.colour, ts != TextStyles.NONE ? ts : current.style); } private List<Text> flatten(Text text) { List<Text> texts = Lists.newArrayList(text); if (!text.getChildren().isEmpty()) { text.getChildren().forEach(x -> texts.addAll(flatten(x))); } return texts; } public Text addCommandToName(CommandSource p) { Text.Builder text = Text.builder(p.getName()); if (p instanceof User) { return addCommandToNameInternal(text, (User)p); } return text.build(); } public Text addCommandToDisplayName(CommandSource p) { Text name = getName(p); if (p instanceof User) { return addCommandToNameInternal(name, (User)p); } return name; } private Text addCommandToNameInternal(Text name, User user) { return addCommandToNameInternal(name.toBuilder(), user); } private Text addCommandToNameInternal(Text.Builder name, User user) { initCoreConfigAdapter(); String cmd = cca.getNodeOrDefault().getCommandOnNameClick(); if (cmd == null || cmd.isEmpty()) { return name.build(); } if (!cmd.startsWith("/")) { cmd = "/" + cmd; } if (!cmd.endsWith(" ")) { cmd = cmd + " "; } final String commandToRun = cmd.replaceAll("\\{\\{player\\}\\}", user.getName()); Optional<HoverAction<?>> ha = name.getHoverAction(); Text.Builder hoverAction; if (ha.isPresent() && (ha.get() instanceof HoverAction.ShowText)) { HoverAction.ShowText h = (HoverAction.ShowText)ha.get(); hoverAction = h.getResult().toBuilder(); hoverAction.append(Text.NEW_LINE); } else { hoverAction = Text.builder(); } hoverAction.append(Nucleus.getNucleus().getMessageProvider().getTextMessageWithFormat("name.hover.command", commandToRun)); return name.onClick(TextActions.suggestCommand(commandToRun)).onHover(TextActions.showText(hoverAction.toText())).build(); } private Text getName(CommandSource cs) { if (cs instanceof Player) { return plugin.getNameUtil().getName((Player)cs); } return Text.of(cs.getName()); } private void initCoreConfigAdapter() { if (this.cca == null) { this.cca = plugin.getInjector().getInstance(CoreConfigAdapter.class); } } public static final class StyleTuple { public final TextColor colour; public final TextStyle style; StyleTuple(TextColor colour, TextStyle style) { this.colour = colour; this.style = style; } } }
package hudson.plugins.tasks; import hudson.maven.AggregatableAction; import hudson.maven.MavenAggregatedReport; import hudson.maven.MavenBuild; import hudson.maven.MavenModule; import hudson.maven.MavenModuleSet; import hudson.maven.MavenModuleSetBuild; import hudson.model.Action; import hudson.model.AbstractBuild; import hudson.plugins.analysis.core.HealthDescriptor; import hudson.plugins.analysis.core.ParserResult; import hudson.plugins.tasks.parser.TasksParserResult; import java.util.List; import java.util.Map; /** * A {@link TasksResultAction} for native maven jobs. This action * additionally provides result aggregation for sub-modules and for the main * project. * * @author Ulli Hafner * @deprecated not used anymore */ @Deprecated public class MavenTasksResultAction extends TasksResultAction implements AggregatableAction, MavenAggregatedReport { /** Tag identifiers indicating high priority. */ private String high; /** Tag identifiers indicating normal priority. */ private String normal; /** Tag identifiers indicating low priority. */ private String low; /** The default encoding to be used when reading and parsing files. */ private String defaultEncoding; /** * Creates a new instance of {@link MavenTasksResultAction}. * * @param owner * the associated build of this action * @param healthDescriptor * health descriptor to use * @param defaultEncoding * the default encoding to be used when reading and parsing files * @param high * tag identifiers indicating high priority * @param normal * tag identifiers indicating normal priority * @param low * tag identifiers indicating low priority * @param result * the result in this build */ // CHECKSTYLE:OFF public MavenTasksResultAction(final AbstractBuild<?, ?> owner, final HealthDescriptor healthDescriptor, final String defaultEncoding, final String high, final String normal, final String low, final TasksResult result) { super(owner, healthDescriptor, result); initializeFields(defaultEncoding, high, normal, low); } // CHECKSTYLE:ON /** * Creates a new instance of {@link MavenTasksResultAction}. * * @param owner * the associated build of this action * @param healthDescriptor * health descriptor to use * @param defaultEncoding * the default encoding to be used when reading and parsing files * @param high * tag identifiers indicating high priority * @param normal * tag identifiers indicating normal priority * @param low * tag identifiers indicating low priority */ public MavenTasksResultAction(final AbstractBuild<?, ?> owner, final HealthDescriptor healthDescriptor, final String defaultEncoding, final String high, final String normal, final String low) { super(owner, healthDescriptor); initializeFields(defaultEncoding, high, normal, low); } /** * Initializes the fields of this action. * * @param defaultEncoding * the default encoding to be used when reading and parsing files * @param high * tag identifiers indicating high priority * @param normal * tag identifiers indicating normal priority * @param low * tag identifiers indicating low priority */ // CHECKSTYLE:OFF @SuppressWarnings("hiding") private void initializeFields(final String defaultEncoding, final String high, final String normal, final String low) { this.high = high; this.normal = normal; this.low = low; this.defaultEncoding = defaultEncoding; } // CHECKSTYLE:ON /** {@inheritDoc} */ public MavenAggregatedReport createAggregatedAction(final MavenModuleSetBuild build, final Map<MavenModule, List<MavenBuild>> moduleBuilds) { return new MavenTasksResultAction(build, getHealthDescriptor(), defaultEncoding, high, normal, low); } /** {@inheritDoc} */ public Action getProjectAction(final MavenModuleSet moduleSet) { return new TasksProjectAction(moduleSet); } /** {@inheritDoc} */ public Class<? extends AggregatableAction> getIndividualActionType() { return getClass(); } /** * Called whenever a new module build is completed, to update the aggregated * report. When multiple builds complete simultaneously, Jenkins serializes * the execution of this method, so this method needs not be * concurrency-safe. * * @param moduleBuilds * Same as <tt>MavenModuleSet.getModuleBuilds()</tt> but provided * for convenience and efficiency. * @param newBuild * Newly completed build. */ public void update(final Map<MavenModule, List<MavenBuild>> moduleBuilds, final MavenBuild newBuild) { // not used anymore } /** {@inheritDoc} */ @Override protected ParserResult createResult() { return new TasksParserResult(); } /** Backward compatibility. @deprecated */ @edu.umd.cs.findbugs.annotations.SuppressWarnings("UUF") @SuppressWarnings("unused") @Deprecated private transient String height; }
package info.u_team.u_team_core.util; import java.util.Map; import java.util.concurrent.Callable; import java.util.function.Supplier; import com.google.common.collect.Maps; import net.minecraft.client.renderer.tileentity.ItemStackTileEntityRenderer; import net.minecraft.item.Item.Properties; import net.minecraftforge.common.ToolType; import net.minecraftforge.fml.common.ObfuscationReflectionHelper; public class ItemProperties extends Properties { public ItemProperties() { } public ItemProperties(Properties properties) { maxStackSize = properties.maxStackSize; maxDamage = properties.maxDamage; containerItem = properties.containerItem; group = properties.group; rarity = properties.rarity; food = properties.food; setValueCanRepair(getValueCanRepair(properties)); setValueToolClasses(Maps.newHashMap(getValueToolClasses(properties))); setValueIster(getValueIster(properties)); } private boolean getValueCanRepair(Properties properties) { return ObfuscationReflectionHelper.getPrivateValue(Properties.class, properties, "canRepair"); } private void setValueCanRepair(boolean value) { ObfuscationReflectionHelper.setPrivateValue(Properties.class, this, value, "canRepair"); } private Map<ToolType, Integer> getValueToolClasses(Properties properties) { return ObfuscationReflectionHelper.getPrivateValue(Properties.class, properties, "toolClasses"); } private void setValueToolClasses(Map<ToolType, Integer> value) { ObfuscationReflectionHelper.setPrivateValue(Properties.class, this, value, "toolClasses"); } private Supplier<Callable<ItemStackTileEntityRenderer>> getValueIster(Properties properties) { return ObfuscationReflectionHelper.getPrivateValue(Properties.class, properties, "ister"); } private void setValueIster(Supplier<Callable<ItemStackTileEntityRenderer>> value) { ObfuscationReflectionHelper.setPrivateValue(Properties.class, this, value, "ister"); } }
package jenkins.plugins.telegram; import hudson.EnvVars; import hudson.Extension; import hudson.Launcher; import hudson.model.AbstractBuild; import hudson.model.AbstractProject; import hudson.model.BuildListener; import hudson.model.Descriptor; import hudson.model.listeners.ItemListener; import hudson.tasks.BuildStepDescriptor; import hudson.tasks.BuildStepMonitor; import hudson.tasks.Notifier; import hudson.tasks.Publisher; import hudson.util.FormValidation; import jenkins.model.Jenkins; import jenkins.model.JenkinsLocationConfiguration; import net.sf.json.JSONObject; import org.apache.commons.lang.StringUtils; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.QueryParameter; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.export.Exported; import java.io.IOException; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; public class TelegramNotifier extends Notifier { private static final Logger logger = Logger.getLogger(TelegramNotifier.class.getName()); private String authToken; private String buildServerUrl; private String chatId; private String sendAs; private boolean startNotification; private boolean notifySuccess; private boolean notifyAborted; private boolean notifyNotBuilt; private boolean notifyUnstable; private boolean notifyFailure; private boolean notifyBackToNormal; private boolean notifyRepeatedFailure; private boolean includeTestSummary; private CommitInfoChoice commitInfoChoice; private boolean includeCustomMessage; private String customMessage; @Override public DescriptorImpl getDescriptor() { return (DescriptorImpl) super.getDescriptor(); } public String getChatId() { return chatId; } public String getAuthToken() { return authToken; } public String getBuildServerUrl() { if(buildServerUrl == null || buildServerUrl.equals("")) { JenkinsLocationConfiguration jenkinsConfig = new JenkinsLocationConfiguration(); return jenkinsConfig.getUrl(); } else { return buildServerUrl; } } public String getSendAs() { return sendAs; } public boolean getStartNotification() { return startNotification; } public boolean getNotifySuccess() { return notifySuccess; } public CommitInfoChoice getCommitInfoChoice() { return commitInfoChoice; } public boolean getNotifyAborted() { return notifyAborted; } public boolean getNotifyFailure() { return notifyFailure; } public boolean getNotifyNotBuilt() { return notifyNotBuilt; } public boolean getNotifyUnstable() { return notifyUnstable; } public boolean getNotifyBackToNormal() { return notifyBackToNormal; } public boolean includeTestSummary() { return includeTestSummary; } public boolean getNotifyRepeatedFailure() { return notifyRepeatedFailure; } public boolean includeCustomMessage() { return includeCustomMessage; } public String getCustomMessage() { return customMessage; } @DataBoundConstructor public TelegramNotifier(final String authToken, final String chatId, final String buildServerUrl, final String sendAs, final boolean startNotification, final boolean notifyAborted, final boolean notifyFailure, final boolean notifyNotBuilt, final boolean notifySuccess, final boolean notifyUnstable, final boolean notifyBackToNormal, final boolean notifyRepeatedFailure, final boolean includeTestSummary, CommitInfoChoice commitInfoChoice, boolean includeCustomMessage, String customMessage) { super(); this.authToken = authToken; this.buildServerUrl = buildServerUrl; this.chatId = chatId; this.sendAs = sendAs; this.startNotification = startNotification; this.notifyAborted = notifyAborted; this.notifyFailure = notifyFailure; this.notifyNotBuilt = notifyNotBuilt; this.notifySuccess = notifySuccess; this.notifyUnstable = notifyUnstable; this.notifyBackToNormal = notifyBackToNormal; this.notifyRepeatedFailure = notifyRepeatedFailure; this.includeTestSummary = includeTestSummary; this.commitInfoChoice = commitInfoChoice; this.includeCustomMessage = includeCustomMessage; this.customMessage = customMessage; } public BuildStepMonitor getRequiredMonitorService() { return BuildStepMonitor.NONE; } public TelegramService newTelegramService(AbstractBuild r, BuildListener listener) { String authToken = this.authToken; if (StringUtils.isEmpty(authToken)) { authToken = getDescriptor().getToken(); } String chatId = this.chatId; if (StringUtils.isEmpty(chatId)) { chatId = getDescriptor().getChatId(); } EnvVars env = null; try { env = r.getEnvironment(listener); } catch (Exception e) { listener.getLogger().println("Error retrieving environment vars: " + e.getMessage()); env = new EnvVars(); } authToken = env.expand(authToken); chatId = env.expand(chatId); return new StandardTelegramService(authToken, chatId); } @Override public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException { return true; } @Override public boolean prebuild(AbstractBuild<?, ?> build, BuildListener listener) { if (startNotification) { Map<Descriptor<Publisher>, Publisher> map = build.getProject().getPublishersList().toMap(); for (Publisher publisher : map.values()) { if (publisher instanceof TelegramNotifier) { logger.info("Invoking Started..."); new ActiveNotifier((TelegramNotifier) publisher, listener).started(build); } } } return super.prebuild(build, listener); } @Extension public static class DescriptorImpl extends BuildStepDescriptor<Publisher> { private String token; private String chatId; private String buildServerUrl; private String sendAs; public static final CommitInfoChoice[] COMMIT_INFO_CHOICES = CommitInfoChoice.values(); public DescriptorImpl() { load(); } public String getToken() { return token; } public String getChatId() { return chatId; } public String getBuildServerUrl() { if(buildServerUrl == null || buildServerUrl.equals("")) { JenkinsLocationConfiguration jenkinsConfig = new JenkinsLocationConfiguration(); return jenkinsConfig.getUrl(); } else { return buildServerUrl; } } public String getSendAs() { return sendAs; } public boolean isApplicable(Class<? extends AbstractProject> aClass) { return true; } @Override public TelegramNotifier newInstance(StaplerRequest sr, JSONObject json) { String token = sr.getParameter("telegramToken"); String chatId = sr.getParameter("telegramChatId"); boolean startNotification = "true".equals(sr.getParameter("telegramStartNotification")); boolean notifySuccess = "true".equals(sr.getParameter("telegramNotifySuccess")); boolean notifyAborted = "true".equals(sr.getParameter("telegramNotifyAborted")); boolean notifyNotBuilt = "true".equals(sr.getParameter("telegramNotifyNotBuilt")); boolean notifyUnstable = "true".equals(sr.getParameter("telegramNotifyUnstable")); boolean notifyFailure = "true".equals(sr.getParameter("telegramNotifyFailure")); boolean notifyBackToNormal = "true".equals(sr.getParameter("telegramNotifyBackToNormal")); boolean notifyRepeatedFailure = "true".equals(sr.getParameter("telegramNotifyRepeatedFailure")); boolean includeTestSummary = "true".equals(sr.getParameter("includeTestSummary")); CommitInfoChoice commitInfoChoice = CommitInfoChoice.forDisplayName(sr.getParameter("telegramCommitInfoChoice")); boolean includeCustomMessage = "on".equals(sr.getParameter("includeCustomMessage")); String customMessage = sr.getParameter("customMessage"); return new TelegramNotifier(token, chatId, buildServerUrl, sendAs, startNotification, notifyAborted, notifyFailure, notifyNotBuilt, notifySuccess, notifyUnstable, notifyBackToNormal, notifyRepeatedFailure, includeTestSummary, commitInfoChoice, includeCustomMessage, customMessage); } @Override public boolean configure(StaplerRequest sr, JSONObject formData) throws FormException { token = sr.getParameter("telegramToken"); chatId = sr.getParameter("telegramChatId"); buildServerUrl = sr.getParameter("telegramBuildServerUrl"); sendAs = sr.getParameter("telegramSendAs"); if(buildServerUrl == null || buildServerUrl.equals("")) { JenkinsLocationConfiguration jenkinsConfig = new JenkinsLocationConfiguration(); buildServerUrl = jenkinsConfig.getUrl(); } if (buildServerUrl != null && !buildServerUrl.endsWith("/")) { buildServerUrl = buildServerUrl + "/"; } save(); return super.configure(sr, formData); } TelegramService getTelegramService(final String authToken, final String chatId) { return new StandardTelegramService(authToken, chatId); } @Override public String getDisplayName() { return "Telegram Notifications"; } public FormValidation doTestConnection(@QueryParameter("telegramToken") final String authToken, @QueryParameter("telegramChatId") final String chatId, @QueryParameter("telegramBuildServerUrl") final String buildServerUrl) throws FormException { try { String targetToken = authToken; if (StringUtils.isEmpty(targetToken)) { targetToken = this.token; } String targetChatId = chatId; if (StringUtils.isEmpty(targetChatId)) { targetChatId = this.chatId; } String targetBuildServerUrl = buildServerUrl; if (StringUtils.isEmpty(targetBuildServerUrl)) { targetBuildServerUrl = this.buildServerUrl; } TelegramService testTelegramService = getTelegramService(targetToken, targetChatId); String message = "Telegram/Jenkins plugin: you're all set on " + targetBuildServerUrl; boolean success = testTelegramService.publish(message, "good"); return success ? FormValidation.ok("Success") : FormValidation.error("Failure"); } catch (Exception e) { return FormValidation.error("Client error : " + e.getMessage()); } } } }
package mcjty.immcraft.worldgen; import mcjty.immcraft.blocks.ModBlocks; import mcjty.immcraft.blocks.foliage.SticksTE; import mcjty.immcraft.blocks.generic.GenericBlock; import net.minecraft.block.Block; import net.minecraft.block.material.Material; import net.minecraft.init.Blocks; import net.minecraft.util.BlockPos; import net.minecraft.util.EnumFacing; import net.minecraft.world.World; import net.minecraft.world.chunk.IChunkProvider; import net.minecraftforge.fml.common.IWorldGenerator; import java.util.Random; public class ImmCraftGenerator implements IWorldGenerator { public static ImmCraftGenerator instance = new ImmCraftGenerator(); @Override public void generate(Random random, int chunkX, int chunkZ, World world, IChunkProvider chunkGenerator, IChunkProvider chunkProvider) { generateWorld(random, chunkX, chunkZ, world); } public void generateWorld(Random random, int chunkX, int chunkZ, World world) { // addOreSpawn(ModBlocks.resonatingOreBlock, (byte) 0, Blocks.stone, world, random, chunkX * 16, chunkZ * 16, // WorldGenConfiguration.minVeinSize, WorldGenConfiguration.maxVeinSize, WorldGenConfiguration.chancesToSpawn, WorldGenConfiguration.minY, WorldGenConfiguration.maxY); spawnRubble(random, chunkX, chunkZ, world); } public static final int ROCK_TRIES = 10; public static final int STICK_TRIES = 30; private void spawnRubble(Random random, int chunkX, int chunkZ, World world) { // Spawn above ground for (int i = 0 ; i < random.nextInt(ROCK_TRIES) ; i++) { int x = chunkX * 16 + random.nextInt(16); int z = chunkZ * 16 + random.nextInt(16); BlockPos pos = world.getTopSolidOrLiquidBlock(new BlockPos(x, 0, z)).down(); Block block = world.getBlockState(pos).getBlock(); if (isRockSpawnable(block)) { if (world.isAirBlock(pos.up())) { world.setBlockState(pos.up(), ModBlocks.rockBlock.getDefaultState().withProperty(GenericBlock.FACING_HORIZ, EnumFacing.getHorizontal(random.nextInt(4))), 3); } } } for (int i = 0 ; i < random.nextInt(STICK_TRIES) ; i++) { int x = chunkX * 16 + random.nextInt(16); int z = chunkZ * 16 + random.nextInt(16); BlockPos pos = world.getTopSolidOrLiquidBlock(new BlockPos(x, 0, z)).down(); Block block = world.getBlockState(pos).getBlock(); if (isStickSpawnable(block)) { trySpawnSticks(world, pos, random); } } // Spawn in caves for (int i = 0 ; i < random.nextInt(ROCK_TRIES) ; i++) { int x = chunkX * 16 + random.nextInt(16); int z = chunkZ * 16 + random.nextInt(16); BlockPos pos = world.getTopSolidOrLiquidBlock(new BlockPos(x, 0, z)).down(); int y = findCaveSpot(world, pos); pos = new BlockPos(x, y, z); Block block = world.getBlockState(pos).getBlock(); if (y != -1 && isRockSpawnable(block)) { if (world.isAirBlock(pos.up())) { world.setBlockState(pos.up(), ModBlocks.rockBlock.getDefaultState().withProperty(GenericBlock.FACING_HORIZ, EnumFacing.getHorizontal(random.nextInt(4))), 3); } } } } private void trySpawnSticks(World world, BlockPos pos, Random random) { int x = pos.getX(); int y = pos.getY(); int z = pos.getZ(); y++; if (!world.isAirBlock(new BlockPos(x, y, z))) { return; } y++; for (int i = 0 ; i < 15 ; i++) { if (!world.isAirBlock(new BlockPos(x, y, z))) { if (isLeafBlock(world.getBlockState(new BlockPos(x, y, z)).getBlock())) { world.setBlockState(pos.up(), ModBlocks.sticksBlock.getDefaultState().withProperty(GenericBlock.FACING_HORIZ, EnumFacing.getHorizontal(random.nextInt(4))), 3); SticksTE sticksTE = (SticksTE) world.getTileEntity(pos.up()); sticksTE.setSticks(random.nextInt(6)+1); } return; } y++; } } private int findCaveSpot(World world, BlockPos pos) { boolean air = false; int x = pos.getX(); int y = pos.getY(); int z = pos.getZ(); while (y > 1 && !air) { if (world.isAirBlock(new BlockPos(x, y, z))) { air = true; } y } if (air) { while (y > 1 && air) { if (!world.isAirBlock(new BlockPos(x, y, z))) { air = false; } else { y } } if (!air) { return y; } } return -1; } private boolean isStickSpawnable(Block block) { return block == Blocks.dirt || block == Blocks.grass; } private boolean isLeafBlock(Block block) { return block.getMaterial() == Material.leaves; } private boolean isRockSpawnable(Block block) { return block == Blocks.dirt || block == Blocks.grass || block == Blocks.stone; } // public void addOreSpawn(Block block, byte blockMeta, Block targetBlock, // World world, Random random, int blockXPos, int blockZPos, int minVeinSize, int maxVeinSize, int chancesToSpawn, int minY, int maxY) { // WorldGenMinable minable = new WorldGenMinable(block.getStateFromMeta(blockMeta), (minVeinSize - random.nextInt(maxVeinSize - minVeinSize)), targetBlock); // for (int i = 0 ; i < chancesToSpawn ; i++) { // int posX = blockXPos + random.nextInt(16); // int posY = minY + random.nextInt(maxY - minY); // int posZ = blockZPos + random.nextInt(16); // minable.generate(world, random, posX, posY, posZ); }
package net.bootsfaces.issues.issue280; import java.io.Serializable; import javax.faces.bean.ManagedBean; import javax.faces.view.ViewScoped; @ManagedBean @ViewScoped public class BookshopBean implements Serializable { private static final long serialVersionUID = 1L; private double discount=10.5; private double price=34.90; private int quantity=1; private double total=0; public double getDiscount() { return discount; } public void setDiscount(double discount) { this.discount = discount; } public void calculate() { total = quantity * (price * (100.0-discount)/100.0); } public double getPrice() { return price; } public void setPrice(double price) { this.price = price; } public int getQuantity() { return quantity; } public void setQuantity(int quantity) { this.quantity = quantity; } public double getTotal() { return total; } public void setTotal(double total) { this.total = total; } }
package me.robertyang.codeic.block; import net.minecraft.block.Block; import net.minecraft.block.material.Material; import net.minecraft.block.properties.PropertyBool; import net.minecraft.block.state.BlockStateContainer; import net.minecraft.block.state.IBlockState; import net.minecraft.entity.EntityLivingBase; import net.minecraft.item.ItemStack; import net.minecraft.util.math.BlockPos; import net.minecraft.world.IBlockAccess; import net.minecraft.world.World; public class BlockPortBlock extends Block{ public static final PropertyBool ISPOWERED = PropertyBool.create("ispowered"); public BlockPortBlock() { super(Material.GROUND); this.setDefaultState(this.blockState.getBaseState().withProperty(ISPOWERED, false)); } @Override protected BlockStateContainer createBlockState() { return new BlockStateContainer(this, ISPOWERED); } @Override public IBlockState getStateFromMeta(int meta) { Boolean isPowered = Boolean.valueOf((meta & 1) != 0); return this.getDefaultState().withProperty(BlockPortBlock.ISPOWERED, isPowered); } @Override public int getMetaFromState(IBlockState state) { int isPowered = state.getValue(ISPOWERED).booleanValue() ? 1 : 0; return isPowered; } @Override public void onBlockPlacedBy(World worldIn, BlockPos pos, IBlockState state, EntityLivingBase placer, ItemStack stack) { worldIn.setBlockState(pos, this.getDefaultState().withProperty(ISPOWERED, false)); } /** * Called when a tile entity on a side of this block changes is created or is destroyed. * @param world The world * @param pos Block position in world * @param neighbor Block position of neighbor */ @Override public void onNeighborChange(IBlockAccess world, BlockPos pos, BlockPos neighbor){ } }
package ml.duncte123.skybot.utils; import ml.duncte123.skybot.Settings; import ml.duncte123.skybot.objects.ConsoleUser; import ml.duncte123.skybot.objects.FakeUser; import net.dv8tion.jda.bot.sharding.ShardManager; import net.dv8tion.jda.core.JDA; import net.dv8tion.jda.core.MessageBuilder; import net.dv8tion.jda.core.Permission; import net.dv8tion.jda.core.entities.Guild; import net.dv8tion.jda.core.entities.TextChannel; import net.dv8tion.jda.core.entities.User; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.HashMap; import java.util.Map; import java.util.TreeMap; public class ModerationUtils { private static Logger logger = LoggerFactory.getLogger(ModerationUtils.class); /** * This will send a message to a channel called modlog * * @param mod The mod that performed the punishment * @param punishedUser The user that got punished * @param punishment The type of punishment * @param reason The reason of the punishment * @param time How long it takes for the punishment to get removed * @param g A instance of the {@link Guild} */ public static void modLog(User mod, User punishedUser, String punishment, String reason, String time, Guild g){ TextChannel logChannel = AirUtils.getLogChannel(GuildSettingsUtils.getGuild(g).getLogChannel(), g); if(logChannel==null || !logChannel.getGuild().getSelfMember().hasPermission(logChannel, Permission.MESSAGE_WRITE, Permission.MESSAGE_READ)) return; String length = ""; if (time != null && !time.isEmpty()) { length = " lasting " + time + ""; } String punishedUserMention = "<@" + punishedUser.getId() + ">"; MessageBuilder message = new MessageBuilder() .append("_Relevant user: ") .append(punishedUserMention) .append("_") .setEmbed(EmbedUtils.embedField(punishedUser.getName() + " " + punishment, punishment + " by " + mod.getName() + length + (reason.isEmpty()?"":" for " + reason))); /*logChannel.sendMessage(EmbedUtils.embedField(punishedUser.getName() + " " + punishment, punishment + " by " + mod.getName() + length + (reason.isEmpty()?"":" for " + reason))).queue( msg -> msg.getTextChannel().sendMessage("_Relevant user: " + punishedUserMention + "_").queue() );*/ logChannel.sendMessage(message.build()).queue(); } /** * A version of {@link #modLog(User, User, String, String, String, Guild)} but without the time * * @param mod The mod that performed the punishment * @param punishedUser The user that got punished * @param punishment The type of punishment * @param reason The reason of the punishment * @param g A instance of the {@link Guild} */ public static void modLog(User mod, User punishedUser, String punishment, String reason, Guild g) { modLog(mod, punishedUser, punishment, reason, "", g); } /** * To log a unban or a unmute * * @param mod The mod that permed the executeCommand * @param unbannedUser The user that the executeCommand is for * @param punishment The type of punishment that got removed * @param g A instance of the {@link Guild} */ public static void modLog(User mod, User unbannedUser, String punishment, Guild g) { modLog(mod, unbannedUser, punishment, "", g); } /** * Add the banned user to the database * * @param modID The user id from the mod * @param userName The username from the banned user * @param userDiscriminator the discriminator from the user * @param userId the id from the banned users * @param unbanDate When we need to unban the user * @param guildId What guild the user got banned in */ public static void addBannedUserToDb(String modID, String userName, String userDiscriminator, String userId, String unbanDate, String guildId) { Map<String, Object> postFields = new TreeMap<>(); postFields.put("modId", modID); postFields.put("username", userName); postFields.put("discriminator", userDiscriminator); postFields.put("userId", userId); postFields.put("unbanDate", unbanDate); postFields.put("guildId", guildId); try { WebUtils.postRequest(Settings.apiBase + "/ban/json", postFields, WebUtils.AcceptType.URLENCODED, it -> { it.close(); return null; }); } catch (NullPointerException e) { e.printStackTrace(); } } /** * Returns the current amount of warnings that a user has * @param u the {@link User User} to check the warnings for * @return The current amount of warnings that a user has */ public static int getWarningCountForUser(User u, Guild g) { if(u == null) throw new IllegalArgumentException("User to check can not be null"); try { final int[] out = new int[1]; WebUtils.getJSONObject(String.format( "%s/getWarnsForUser/json?user_id=%s&guild_id=%s", Settings.apiBase, u.getId(), g.getId()) , it -> { out[0] = it.getJSONArray("warnings").length(); return null; }); return out[0]; } catch (IOException e) { e.printStackTrace(); return 0; } } /** * This attempts to register a warning in the database * @param moderator The mod that executed the warning * @param target The user to warn * @param reason the reason for the warn * @param jda a jda instance because we need the token for auth */ public static void addWarningToDb(User moderator, User target, String reason, Guild guild, JDA jda) { Map<String, Object> postFields = new HashMap<>(); postFields.put("mod_id", moderator.getId()); postFields.put("user_id", target.getId()); postFields.put("guild_id", guild.getId()); postFields.put("reason", reason.isEmpty()? "No Reason provided" : " for " + reason); postFields.put("token", jda.getToken()); try { WebUtils.postRequest(Settings.apiBase + "/addWarning/json", postFields, WebUtils.AcceptType.URLENCODED, it -> { it.close(); return null; }); } catch (NullPointerException e) { e.printStackTrace(); } } /** * This will check if there are users that can be unbanned * * @param shardManager the current shard manager for this bot */ public static void checkUnbans(ShardManager shardManager) { logger.debug("Checking for users to unban"); int usersUnbanned = 0; Connection database = AirUtils.db.getConnManager().getConnection(); try { Statement smt = database.createStatement(); ResultSet res = smt.executeQuery("SELECT * FROM " + AirUtils.db.getName() + ".bans"); while (res.next()) { java.util.Date unbanDate = res.getTimestamp("unban_date"); java.util.Date currDate = new java.util.Date(); if (currDate.after(unbanDate)) { usersUnbanned++; logger.debug("Unbanning " + res.getString("Username")); try { shardManager.getGuildCache().getElementById(res.getString("guildId")).getController() .unban(res.getString("userId")).reason("Ban expired").queue(); modLog(new ConsoleUser(), new FakeUser(res.getString("Username"), res.getString("userId"), res.getString("discriminator")), "unbanned", shardManager.getGuildById(res.getString("guildId"))); } catch (NullPointerException ignored) { } database.createStatement().executeUpdate("DELETE FROM " + AirUtils.db.getName() + ".bans WHERE id=" + res.getInt("id") + ""); } } logger.debug("Checking done, unbanned " + usersUnbanned + " users."); } catch (Exception e) { e.printStackTrace(); } finally { try { database.close(); } catch (SQLException e2) { e2.printStackTrace(); } } } }
package net.bull.javamelody; import java.io.IOException; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.liferay.portal.kernel.exception.PortalException; import com.liferay.portal.kernel.exception.SystemException; import com.liferay.portal.model.RoleConstants; import com.liferay.portal.service.UserLocalServiceUtil; import com.liferay.portal.util.PortalUtil; /** * Filter of monitoring JavaMelody for Liferay. * * @author Emeric Vernat */ public class LiferayMonitoringFilter extends PluginMonitoringFilter { /** {@inheritDoc} */ @Override public String getApplicationType() { return "Liferay"; } /** {@inheritDoc} */ @Override public void init(FilterConfig config) throws ServletException { // rewrap datasources in GlobalNamingResources with ResourceLink in // context.xml System.setProperty(Parameters.PARAMETER_SYSTEM_PREFIX + Parameter.REWRAP_DATASOURCES.getCode(), Boolean.TRUE.toString()); if (Parameters.getParameter(Parameter.SQL_TRANSFORM_PATTERN) == null) { System.setProperty(Parameters.PARAMETER_SYSTEM_PREFIX + Parameter.SQL_TRANSFORM_PATTERN.getCode(), "\\([\\?, ]+\\)"); } if (Parameters.getParameter(Parameter.DISPLAYED_COUNTERS) == null) { // disable jsp counter to fix // the jsp counter does not display anything anyway. // In consequence, jsf, job, ejb, jpa, spring, guice are also // disabled. System.setProperty(Parameters.PARAMETER_SYSTEM_PREFIX + Parameter.DISPLAYED_COUNTERS.getCode(), "http,sql,error,log"); } super.init(config); LOG.debug("JavaMelody is monitoring Liferay"); } /** {@inheritDoc} */ @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { if (!(request instanceof HttpServletRequest)) { super.doFilter(request, response, chain); return; } final HttpServletRequest httpRequest = (HttpServletRequest) request; final HttpServletResponse httpResponse = (HttpServletResponse) response; if (httpRequest.getRequestURI().equals(getMonitoringUrl(httpRequest))) { try { if (!isAdmin(httpRequest)) { LOG.debug("Forbidden access to monitoring from " + request.getRemoteAddr()); httpResponse.sendError(HttpServletResponse.SC_FORBIDDEN, "Forbidden access"); httpResponse.flushBuffer(); return; } } catch (final Exception e) { throw new ServletException(e); } } super.doFilter(request, response, chain); } private boolean isAdmin(HttpServletRequest httpRequest) throws PortalException, SystemException { final long userId = PortalUtil.getUserId(httpRequest); final long companyId = PortalUtil.getDefaultCompanyId(); return UserLocalServiceUtil.hasRoleUser(companyId, RoleConstants.ADMINISTRATOR, userId, true); } }
package net.floodlightcontroller.util; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; import java.util.ArrayDeque; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.projectfloodlight.openflow.protocol.OFActionCopyField; import org.projectfloodlight.openflow.protocol.OFFactories; import org.projectfloodlight.openflow.protocol.OFFactory; import org.projectfloodlight.openflow.protocol.OFFlowMod; import org.projectfloodlight.openflow.protocol.OFOxmList; import org.projectfloodlight.openflow.protocol.OFVersion; import org.projectfloodlight.openflow.protocol.action.OFAction; import org.projectfloodlight.openflow.protocol.action.OFActionEnqueue; import org.projectfloodlight.openflow.protocol.action.OFActionExperimenter; import org.projectfloodlight.openflow.protocol.action.OFActionGroup; import org.projectfloodlight.openflow.protocol.action.OFActionMeter; import org.projectfloodlight.openflow.protocol.action.OFActionOutput; import org.projectfloodlight.openflow.protocol.action.OFActionPopMpls; import org.projectfloodlight.openflow.protocol.action.OFActionPushMpls; import org.projectfloodlight.openflow.protocol.action.OFActionPushPbb; import org.projectfloodlight.openflow.protocol.action.OFActionPushVlan; import org.projectfloodlight.openflow.protocol.action.OFActionSetDlDst; import org.projectfloodlight.openflow.protocol.action.OFActionSetDlSrc; import org.projectfloodlight.openflow.protocol.action.OFActionSetField; import org.projectfloodlight.openflow.protocol.action.OFActionSetMplsLabel; import org.projectfloodlight.openflow.protocol.action.OFActionSetMplsTc; import org.projectfloodlight.openflow.protocol.action.OFActionSetMplsTtl; import org.projectfloodlight.openflow.protocol.action.OFActionSetNwDst; import org.projectfloodlight.openflow.protocol.action.OFActionSetNwEcn; import org.projectfloodlight.openflow.protocol.action.OFActionSetNwSrc; import org.projectfloodlight.openflow.protocol.action.OFActionSetNwTos; import org.projectfloodlight.openflow.protocol.action.OFActionSetNwTtl; import org.projectfloodlight.openflow.protocol.action.OFActionSetQueue; import org.projectfloodlight.openflow.protocol.action.OFActionSetTpDst; import org.projectfloodlight.openflow.protocol.action.OFActionSetTpSrc; import org.projectfloodlight.openflow.protocol.action.OFActionSetVlanPcp; import org.projectfloodlight.openflow.protocol.action.OFActionSetVlanVid; import org.projectfloodlight.openflow.protocol.oxm.OFOxm; import org.projectfloodlight.openflow.protocol.oxm.OFOxmActsetOutput; import org.projectfloodlight.openflow.protocol.oxm.OFOxmArpOp; import org.projectfloodlight.openflow.protocol.oxm.OFOxmArpSha; import org.projectfloodlight.openflow.protocol.oxm.OFOxmArpSpa; import org.projectfloodlight.openflow.protocol.oxm.OFOxmArpTha; import org.projectfloodlight.openflow.protocol.oxm.OFOxmArpTpa; import org.projectfloodlight.openflow.protocol.oxm.OFOxmEthDst; import org.projectfloodlight.openflow.protocol.oxm.OFOxmEthSrc; import org.projectfloodlight.openflow.protocol.oxm.OFOxmEthType; import org.projectfloodlight.openflow.protocol.oxm.OFOxmIcmpv4Code; import org.projectfloodlight.openflow.protocol.oxm.OFOxmIcmpv4Type; import org.projectfloodlight.openflow.protocol.oxm.OFOxmIcmpv6Code; import org.projectfloodlight.openflow.protocol.oxm.OFOxmIcmpv6Type; import org.projectfloodlight.openflow.protocol.oxm.OFOxmIpDscp; import org.projectfloodlight.openflow.protocol.oxm.OFOxmIpEcn; import org.projectfloodlight.openflow.protocol.oxm.OFOxmIpProto; import org.projectfloodlight.openflow.protocol.oxm.OFOxmIpv4Dst; import org.projectfloodlight.openflow.protocol.oxm.OFOxmIpv4Src; import org.projectfloodlight.openflow.protocol.oxm.OFOxmIpv6Dst; import org.projectfloodlight.openflow.protocol.oxm.OFOxmIpv6Exthdr; import org.projectfloodlight.openflow.protocol.oxm.OFOxmIpv6Flabel; import org.projectfloodlight.openflow.protocol.oxm.OFOxmIpv6NdSll; import org.projectfloodlight.openflow.protocol.oxm.OFOxmIpv6NdTarget; import org.projectfloodlight.openflow.protocol.oxm.OFOxmIpv6NdTll; import org.projectfloodlight.openflow.protocol.oxm.OFOxmIpv6Src; import org.projectfloodlight.openflow.protocol.oxm.OFOxmMetadata; import org.projectfloodlight.openflow.protocol.oxm.OFOxmMplsBos; import org.projectfloodlight.openflow.protocol.oxm.OFOxmMplsLabel; import org.projectfloodlight.openflow.protocol.oxm.OFOxmMplsTc; import org.projectfloodlight.openflow.protocol.oxm.OFOxmPacketType; import org.projectfloodlight.openflow.protocol.oxm.OFOxmSctpDst; import org.projectfloodlight.openflow.protocol.oxm.OFOxmSctpSrc; import org.projectfloodlight.openflow.protocol.oxm.OFOxmTcpDst; import org.projectfloodlight.openflow.protocol.oxm.OFOxmTcpFlags; import org.projectfloodlight.openflow.protocol.oxm.OFOxmTcpSrc; import org.projectfloodlight.openflow.protocol.oxm.OFOxmUdpDst; import org.projectfloodlight.openflow.protocol.oxm.OFOxmUdpSrc; import org.projectfloodlight.openflow.protocol.oxm.OFOxmVlanPcp; import org.projectfloodlight.openflow.protocol.oxm.OFOxmVlanVid; import org.projectfloodlight.openflow.types.ArpOpcode; import org.projectfloodlight.openflow.types.EthType; import org.projectfloodlight.openflow.types.ICMPv4Code; import org.projectfloodlight.openflow.types.ICMPv4Type; import org.projectfloodlight.openflow.types.IPv4Address; import org.projectfloodlight.openflow.types.IPv6Address; import org.projectfloodlight.openflow.types.IPv6FlowLabel; import org.projectfloodlight.openflow.types.IpDscp; import org.projectfloodlight.openflow.types.IpEcn; import org.projectfloodlight.openflow.types.IpProtocol; import org.projectfloodlight.openflow.types.MacAddress; import org.projectfloodlight.openflow.types.OFBooleanValue; import org.projectfloodlight.openflow.types.OFMetadata; import org.projectfloodlight.openflow.types.OFPort; import org.projectfloodlight.openflow.types.OFVlanVidMatch; import org.projectfloodlight.openflow.types.TransportPort; import org.projectfloodlight.openflow.types.U16; import org.projectfloodlight.openflow.types.U32; import org.projectfloodlight.openflow.types.U64; import org.projectfloodlight.openflow.types.U8; import org.projectfloodlight.openflow.types.VlanPcp; import org.projectfloodlight.openflow.types.VlanVid; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.core.JsonGenerator.Feature; import com.fasterxml.jackson.core.JsonParser; /** * OFAction helper functions. Use with any OpenFlowJ-Loxi Action. * String utility functions for converting OFActions to and from * dpctl/ofctl-style strings, which is primarily used by the * static flow pusher. * * Includes string methods refactored from StaticFlowEntryPusher * * @author Ryan Izard <ryan.izard@bigswitch.com, rizard@g.clemson.edu> */ public class ActionUtils { private static final Logger log = LoggerFactory.getLogger(ActionUtils.class); /* OF1.3 ACTIONS (includes OF1.0) */ public static final String STR_OUTPUT = "output"; public static final String STR_ENQUEUE = "enqueue"; public static final String STR_VLAN_STRIP = "strip_vlan"; public static final String STR_VLAN_POP = "pop_vlan"; public static final String STR_VLAN_PUSH = "push_vlan"; public static final String STR_VLAN_SET_PCP = "set_vlan_pcp"; public static final String STR_VLAN_SET_VID = "set_vlan_vid"; public static final String STR_QUEUE_SET = "set_queue"; public static final String STR_DL_SRC_SET = "set_eth_src"; public static final String STR_DL_DST_SET = "set_eth_dst"; public static final String STR_NW_SRC_SET = "set_ipv4_src"; public static final String STR_NW_DST_SET = "set_ipv4_dst"; public static final String STR_NW_ECN_SET = "set_ip_ecn"; public static final String STR_NW_TOS_SET = "set_ip_tos"; public static final String STR_NW_TTL_SET = "set_ip_ttl"; public static final String STR_NW_TTL_DEC = "dec_ip_ttl"; public static final String STR_TTL_IN_COPY = "copy_ip_ttl_in"; public static final String STR_TTL_OUT_COPY = "copy_ip_ttl_out"; public static final String STR_MPLS_LABEL_SET = "set_mpls_label"; public static final String STR_MPLS_TC_SET = "set_mpls_tc"; public static final String STR_MPLS_TTL_SET = "set_mpls_ttl"; public static final String STR_MPLS_TTL_DEC = "dec_mpls_ttl"; public static final String STR_MPLS_PUSH = "push_mpls"; public static final String STR_MPLS_POP = "pop_mpls"; public static final String STR_TP_SRC_SET = "set_tp_src"; public static final String STR_TP_DST_SET = "set_tp_dst"; public static final String STR_PBB_PUSH = "push_pbb"; public static final String STR_PBB_POP = "pop_pbb"; public static final String STR_GROUP = "group"; public static final String STR_FIELD_SET = "set_field"; public static final String STR_FIELD_COPY = "copy_field"; public static final String STR_METER = "meter"; public static final String STR_EXPERIMENTER = "experimenter"; public static final String STR_NOT_APPLICABLE = "n/a"; /* OF1.3 set-field operations are defined as any OF1.3 match. * We will borrow MatchUtils's String definitions of all OF1.3 * set-field operations to be consistent. */ private static final JsonFactory jsonFactory = new JsonFactory(); private static final String JSON_EMPTY_OBJECT = "{}"; /** * Returns a String representation of all the OpenFlow actions. * @param actions; A list of OFActions to encode into one string * @return A dpctl-style string of the actions */ public static String actionsToString(List<OFAction> actions) { StringBuilder sb = new StringBuilder(); for (OFAction a : actions) { if (sb.length() > 0) { sb.append(','); } switch(a.getType()) { case OUTPUT: sb.append(STR_OUTPUT).append("=").append(ActionUtils.portToString(((OFActionOutput)a).getPort())); break; case ENQUEUE: long queue = ((OFActionEnqueue)a).getQueueId(); sb.append(STR_ENQUEUE).append("=").append(portToString(((OFActionEnqueue)a).getPort())).append(":0x").append(String.format("%02x", queue)); break; case STRIP_VLAN: sb.append(STR_VLAN_STRIP); break; case POP_VLAN: sb.append(STR_VLAN_POP); break; case PUSH_VLAN: sb.append(STR_VLAN_PUSH).append("=").append(Integer.toString(((OFActionPushVlan)a).getEthertype().getValue())); break; case SET_VLAN_VID: sb.append(STR_VLAN_SET_VID).append("=").append(Short.toString(((OFActionSetVlanVid)a).getVlanVid().getVlan())); break; case SET_VLAN_PCP: sb.append(STR_VLAN_SET_PCP).append("=").append(Byte.toString(((OFActionSetVlanPcp)a).getVlanPcp().getValue())); break; case SET_QUEUE: sb.append(STR_QUEUE_SET).append("=").append(Long.toString(((OFActionSetQueue)a).getQueueId())); break; case SET_DL_SRC: sb.append(STR_DL_SRC_SET).append("=").append( ((OFActionSetDlSrc)a).getDlAddr().toString()); break; case SET_DL_DST: sb.append(STR_DL_DST_SET).append("=").append(((OFActionSetDlDst)a).getDlAddr().toString()); break; case SET_NW_ECN: sb.append(STR_NW_ECN_SET).append("=").append(Byte.toString(((OFActionSetNwEcn)a).getNwEcn().getEcnValue())); break; case SET_NW_TOS: sb.append(STR_NW_TOS_SET).append("=").append(Short.toString(((OFActionSetNwTos)a).getNwTos())); break; case SET_NW_TTL: sb.append(STR_NW_TTL_SET).append("=").append(Short.toString(((OFActionSetNwTtl)a).getNwTtl())); break; case DEC_NW_TTL: sb.append(STR_NW_TTL_DEC); break; case SET_MPLS_LABEL: sb.append(STR_MPLS_LABEL_SET).append("=").append(Long.toString(((OFActionSetMplsLabel)a).getMplsLabel())); break; case SET_MPLS_TC: sb.append(STR_MPLS_TC_SET).append("=").append(Short.toString(((OFActionSetMplsTc)a).getMplsTc())); break; case SET_MPLS_TTL: sb.append(STR_MPLS_TTL_SET).append("=").append(Short.toString(((OFActionSetMplsTtl)a).getMplsTtl())); break; case DEC_MPLS_TTL: sb.append(STR_MPLS_TTL_DEC); break; case PUSH_MPLS: sb.append(STR_MPLS_PUSH).append("=").append(Integer.toString(((OFActionPushMpls)a).getEthertype().getValue())); break; case POP_MPLS: sb.append(STR_MPLS_POP).append("=").append(Integer.toString(((OFActionPopMpls)a).getEthertype().getValue())); break; case SET_NW_SRC: sb.append(STR_NW_SRC_SET).append("=").append(((OFActionSetNwSrc)a).getNwAddr().toString()); break; case SET_NW_DST: sb.append(STR_NW_DST_SET).append("=").append(((OFActionSetNwDst)a).getNwAddr().toString()); break; case SET_TP_SRC: sb.append(STR_TP_SRC_SET).append("=").append(((OFActionSetTpSrc)a).getTpPort().toString()); break; case SET_TP_DST: sb.append(STR_TP_DST_SET).append("=").append(((OFActionSetTpDst)a).getTpPort().toString()); break; case COPY_TTL_IN: sb.append(STR_TTL_IN_COPY); break; case COPY_TTL_OUT: sb.append(STR_TTL_OUT_COPY); break; case PUSH_PBB: sb.append(STR_PBB_PUSH).append("=").append(Integer.toString(((OFActionPushPbb)a).getEthertype().getValue())); break; case POP_PBB: sb.append(STR_PBB_POP); break; case EXPERIMENTER: sb.append(STR_EXPERIMENTER).append("=").append(Long.toString(((OFActionExperimenter)a).getExperimenter())); break; case GROUP: sb.append(STR_GROUP).append("=").append(Integer.toString(((OFActionGroup)a).getGroup().getGroupNumber())); break; case SET_FIELD: log.debug("Got Set-Field action. Setting {}", ((OFActionSetField)a)); /* ARP */ if (((OFActionSetField)a).getField() instanceof OFOxmArpOp) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_ARP_OPCODE) .append(MatchUtils.SET_FIELD_DELIM) .append(Integer.toString(((OFOxmArpOp) ((OFActionSetField) a).getField()).getValue().getOpcode())); } else if (((OFActionSetField)a).getField() instanceof OFOxmArpSha) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_ARP_SHA) .append(MatchUtils.SET_FIELD_DELIM) .append(((OFOxmArpSha) ((OFActionSetField) a).getField()).getValue().toString()); // macaddress formats string already } else if (((OFActionSetField)a).getField() instanceof OFOxmArpTha) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_ARP_DHA) .append(MatchUtils.SET_FIELD_DELIM) .append(((OFOxmArpTha) ((OFActionSetField) a).getField()).getValue().toString()); } else if (((OFActionSetField)a).getField() instanceof OFOxmArpSpa) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_ARP_SPA) .append(MatchUtils.SET_FIELD_DELIM) .append(((OFOxmArpSpa) ((OFActionSetField) a).getField()).getValue().toString()); // ipaddress formats string already } else if (((OFActionSetField)a).getField() instanceof OFOxmArpTpa) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_ARP_DPA) .append(MatchUtils.SET_FIELD_DELIM) .append(((OFOxmArpTpa) ((OFActionSetField) a).getField()).getValue().toString()); } else if (((OFActionSetField)a).getField() instanceof OFOxmIpv6NdSll) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_IPV6_ND_SLL) .append(MatchUtils.SET_FIELD_DELIM) .append(((OFOxmIpv6NdSll) ((OFActionSetField) a).getField()).getValue().toString()); // macaddress formats string already } else if (((OFActionSetField)a).getField() instanceof OFOxmIpv6NdTll) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_IPV6_ND_TLL) .append(MatchUtils.SET_FIELD_DELIM) .append(((OFOxmIpv6NdTll) ((OFActionSetField) a).getField()).getValue().toString()); // macaddress formats string already } else if (((OFActionSetField)a).getField() instanceof OFOxmIpv6NdTarget) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_IPV6_ND_TARGET) .append(MatchUtils.SET_FIELD_DELIM) .append(((OFOxmIpv6NdTarget) ((OFActionSetField) a).getField()).getValue().toString()); } /* DATA LAYER */ else if (((OFActionSetField)a).getField() instanceof OFOxmEthType) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_DL_TYPE) .append(MatchUtils.SET_FIELD_DELIM) .append(Integer.toString(((OFOxmEthType) ((OFActionSetField) a).getField()).getValue().getValue())); } else if (((OFActionSetField)a).getField() instanceof OFOxmEthSrc) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_DL_SRC) .append(MatchUtils.SET_FIELD_DELIM) .append(((OFOxmEthSrc) ((OFActionSetField) a).getField()).getValue().toString()); } else if (((OFActionSetField)a).getField() instanceof OFOxmEthDst) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_DL_DST) .append(MatchUtils.SET_FIELD_DELIM) .append(((OFOxmEthDst) ((OFActionSetField) a).getField()).getValue().toString()); } else if (((OFActionSetField)a).getField() instanceof OFOxmVlanVid) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_DL_VLAN) .append(MatchUtils.SET_FIELD_DELIM) .append(Short.toString(((OFOxmVlanVid) ((OFActionSetField) a).getField()).getValue().getVlan())); } else if (((OFActionSetField)a).getField() instanceof OFOxmVlanPcp) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_DL_VLAN_PCP) .append(MatchUtils.SET_FIELD_DELIM) .append(Byte.toString(((OFOxmVlanPcp) ((OFActionSetField) a).getField()).getValue().getValue())); } /* ICMP */ else if (((OFActionSetField)a).getField() instanceof OFOxmIcmpv4Code) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_ICMP_CODE) .append(MatchUtils.SET_FIELD_DELIM) .append(Short.toString(((OFOxmIcmpv4Code) ((OFActionSetField) a).getField()).getValue().getCode())); } else if (((OFActionSetField)a).getField() instanceof OFOxmIcmpv4Type) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_ICMP_TYPE) .append(MatchUtils.SET_FIELD_DELIM) .append(Short.toString(((OFOxmIcmpv4Type) ((OFActionSetField) a).getField()).getValue().getType())); } else if (((OFActionSetField)a).getField() instanceof OFOxmIcmpv6Code) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_ICMPV6_CODE) .append(MatchUtils.SET_FIELD_DELIM) .append(Short.toString(((OFOxmIcmpv6Code) ((OFActionSetField) a).getField()).getValue().getRaw())); } else if (((OFActionSetField)a).getField() instanceof OFOxmIcmpv6Type) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_ICMPV6_TYPE) .append(MatchUtils.SET_FIELD_DELIM) .append(Short.toString(((OFOxmIcmpv6Type) ((OFActionSetField) a).getField()).getValue().getRaw())); } /* NETWORK LAYER */ else if (((OFActionSetField)a).getField() instanceof OFOxmIpProto) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_NW_PROTO) .append(MatchUtils.SET_FIELD_DELIM) .append(Short.toString(((OFOxmIpProto) ((OFActionSetField) a).getField()).getValue().getIpProtocolNumber())); } else if (((OFActionSetField)a).getField() instanceof OFOxmIpv4Src) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_NW_SRC) .append(MatchUtils.SET_FIELD_DELIM) .append(((OFOxmIpv4Src) ((OFActionSetField) a).getField()).getValue().toString()); } else if (((OFActionSetField)a).getField() instanceof OFOxmIpv4Dst) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_NW_DST) .append(MatchUtils.SET_FIELD_DELIM) .append(((OFOxmIpv4Dst) ((OFActionSetField) a).getField()).getValue().toString()); } else if (((OFActionSetField)a).getField() instanceof OFOxmIpv6Src) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_IPV6_SRC) .append(MatchUtils.SET_FIELD_DELIM) .append(((OFOxmIpv6Src) ((OFActionSetField) a).getField()).getValue().toString()); } else if (((OFActionSetField)a).getField() instanceof OFOxmIpv6Dst) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_IPV6_DST) .append(MatchUtils.SET_FIELD_DELIM) .append(((OFOxmIpv6Dst) ((OFActionSetField) a).getField()).getValue().toString()); } else if (((OFActionSetField)a).getField() instanceof OFOxmIpv6Flabel) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_IPV6_FLOW_LABEL) .append(MatchUtils.SET_FIELD_DELIM) .append(((OFOxmIpv6Flabel) ((OFActionSetField) a).getField()).getValue().toString()); } else if (((OFActionSetField)a).getField() instanceof OFOxmIpv6Exthdr) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_IPV6_EXTHDR) .append(MatchUtils.SET_FIELD_DELIM) .append(((OFOxmIpv6Exthdr) ((OFActionSetField) a).getField()).getValue().toString()); } else if (((OFActionSetField)a).getField() instanceof OFOxmIpEcn) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_NW_ECN) .append(MatchUtils.SET_FIELD_DELIM) .append(Byte.toString(((OFOxmIpEcn) ((OFActionSetField) a).getField()).getValue().getEcnValue())); } else if (((OFActionSetField)a).getField() instanceof OFOxmIpDscp) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_NW_DSCP) .append(MatchUtils.SET_FIELD_DELIM) .append(Byte.toString(((OFOxmIpDscp) ((OFActionSetField) a).getField()).getValue().getDscpValue())); } /* TRANSPORT LAYER, TCP, UDP, and SCTP */ else if (((OFActionSetField)a).getField() instanceof OFOxmTcpSrc) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_TCP_SRC) .append(MatchUtils.SET_FIELD_DELIM) .append(Integer.toString(((OFOxmTcpSrc) ((OFActionSetField) a).getField()).getValue().getPort())); } else if (((OFActionSetField)a).getField() instanceof OFOxmTcpDst) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_TCP_DST) .append(MatchUtils.SET_FIELD_DELIM) .append(Integer.toString(((OFOxmTcpDst) ((OFActionSetField) a).getField()).getValue().getPort())); } else if (((OFActionSetField)a).getField() instanceof OFOxmUdpSrc) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_UDP_SRC) .append(MatchUtils.SET_FIELD_DELIM) .append(Integer.toString(((OFOxmUdpSrc) ((OFActionSetField) a).getField()).getValue().getPort())); } else if (((OFActionSetField)a).getField() instanceof OFOxmUdpDst) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_UDP_DST) .append(MatchUtils.SET_FIELD_DELIM) .append(Integer.toString(((OFOxmUdpDst) ((OFActionSetField) a).getField()).getValue().getPort())); } else if (((OFActionSetField)a).getField() instanceof OFOxmSctpSrc) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_SCTP_SRC) .append(MatchUtils.SET_FIELD_DELIM) .append(Integer.toString(((OFOxmSctpSrc) ((OFActionSetField) a).getField()).getValue().getPort())); } else if (((OFActionSetField)a).getField() instanceof OFOxmSctpDst) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_SCTP_DST) .append(MatchUtils.SET_FIELD_DELIM) .append(Integer.toString(((OFOxmSctpDst) ((OFActionSetField) a).getField()).getValue().getPort())); } else if (((OFActionSetField)a).getField() instanceof OFOxmTcpFlags) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_TCP_FLAGS) .append(MatchUtils.SET_FIELD_DELIM) .append(Integer.toString(((OFOxmTcpFlags) ((OFActionSetField) a).getField()).getValue().getValue())); } /* MPLS */ else if (((OFActionSetField)a).getField() instanceof OFOxmMplsLabel) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_MPLS_LABEL) .append(MatchUtils.SET_FIELD_DELIM) .append(Long.toString(((OFOxmMplsLabel) ((OFActionSetField) a).getField()).getValue().getValue())); } else if (((OFActionSetField)a).getField() instanceof OFOxmMplsTc) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_MPLS_TC) .append(MatchUtils.SET_FIELD_DELIM) .append(Short.toString(((OFOxmMplsTc) ((OFActionSetField) a).getField()).getValue().getValue())); } else if (((OFActionSetField)a).getField() instanceof OFOxmMplsBos) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_MPLS_BOS) .append(MatchUtils.SET_FIELD_DELIM) .append(Boolean.toString(((OFOxmMplsBos) ((OFActionSetField) a).getField()).getValue().getValue())); } /* ACTSET_OUTPUT */ else if (((OFActionSetField)a).getField() instanceof OFOxmActsetOutput) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_ACTSET_OUTPUT) .append(MatchUtils.SET_FIELD_DELIM) .append(Integer.toString(((OFOxmActsetOutput) ((OFActionSetField) a).getField()).getValue().getPortNumber())); } /* PACKET_TYPE */ else if (((OFActionSetField)a).getField() instanceof OFOxmPacketType) { // TODO hard-coded "/" as delimiter...fix this sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_PACKET_TYPE) .append(MatchUtils.SET_FIELD_DELIM) .append(Integer.toString(((OFOxmPacketType) ((OFActionSetField) a).getField()).getValue().getNamespace())) .append("/") .append(Integer.toString(((OFOxmPacketType) ((OFActionSetField) a).getField()).getValue().getNsType())); } /* METADATA */ else if (((OFActionSetField)a).getField() instanceof OFOxmMetadata) { sb.append(STR_FIELD_SET).append("=").append(MatchUtils.STR_METADATA) .append(MatchUtils.SET_FIELD_DELIM) .append(Long.toString(((OFOxmMetadata) ((OFActionSetField) a).getField()).getValue().getValue().getValue())); } else { log.error("Could not decode Set-Field action field: {}", ((OFActionSetField) a)); } break; case COPY_FIELD: sb.append(STR_FIELD_COPY).append("=").append(copyFieldToJson((OFActionCopyField) a)); break; case METER: sb.append(STR_METER).append("=").append(Long.toString(((OFActionMeter)a).getMeterId())); break; default: log.error("Could not decode action: {}", a); break; } } return sb.toString(); } public static List<OFAction> fromString(String s, OFVersion v) { List<OFAction> actions = new LinkedList<OFAction>(); OFFactory f = OFFactories.getFactory(v); if (s != null && !s.trim().isEmpty()) { s = s.toLowerCase(); String[] bigStringSplit = s.split(","); // split into separate action=value or action=key@value pairs String[] tmp; ArrayDeque<String[]> actionToDecode = new ArrayDeque<String[]>(); for (int i = 0; i < bigStringSplit.length; i++) { tmp = bigStringSplit[i].split("="); // split into separate [action, value] or [action, key@value] singles if (tmp.length != 2) { log.debug("Token " + bigStringSplit[i] + " does not have form 'key=value' parsing " + s); } actionToDecode.add(tmp); // actionToDecode contains [key, value] pairs. Create a queue of pairs to process. } while (!actionToDecode.isEmpty()) { String[] keyPair = actionToDecode.pollFirst(); String key; String pair; if (keyPair.length != 2) { log.debug("[Key, Value] {} does not have form 'key=value' parsing, which is okay for some actions e.g. 'pop_vlan'.", keyPair); key = keyPair[0]; // could the be case of a constant actions (e.g. copy_ttl_in) pair = ""; } else { key = keyPair[0]; pair = keyPair[1]; } OFAction a = null; try { switch (key) { case STR_OUTPUT: a = decode_output(pair, v); break; case STR_ENQUEUE: a = decode_enqueue(pair, v); break; case STR_DL_SRC_SET: a = decode_set_src_mac(pair, v); break; case STR_DL_DST_SET: a = decode_set_dst_mac(pair, v); break; case STR_EXPERIMENTER: //no-op. Not implemented log.error("OFAction EXPERIMENTER not implemented."); break; case STR_FIELD_SET: /* ONLY OF1.1+ should get in here. These should only be header fields valid within a set-field. */ String[] actionData = pair.split(MatchUtils.SET_FIELD_DELIM); if (actionData.length != 2) { throw new IllegalArgumentException("[Action, Data] " + keyPair + " does not have form 'action=data'" + actionData); } switch (actionData[0]) { case MatchUtils.STR_ARP_OPCODE: a = f.actions().buildSetField() .setField(f.oxms().buildArpOp() .setValue(ArpOpcode.of(ParseUtils.parseHexOrDecInt(actionData[1]))) .build()) .build(); break; case MatchUtils.STR_ARP_SHA: a = f.actions().buildSetField() .setField(f.oxms().buildArpSha().setValue(MacAddress.of(actionData[1])).build()) .build(); break; case MatchUtils.STR_ARP_DHA: a = f.actions().buildSetField() .setField(f.oxms().buildArpTha().setValue(MacAddress.of(actionData[1])).build()) .build(); break; case MatchUtils.STR_ARP_SPA: a = f.actions().buildSetField() .setField(f.oxms().buildArpSpa().setValue(IPv4Address.of(actionData[1])).build()) .build(); break; case MatchUtils.STR_ARP_DPA: a = f.actions().buildSetField() .setField(f.oxms().buildArpTpa().setValue(IPv4Address.of(actionData[1])).build()) .build(); break; case MatchUtils.STR_IPV6_ND_SLL: a = f.actions().buildSetField() .setField(f.oxms().buildIpv6NdSll().setValue(MacAddress.of(actionData[1])).build()) .build(); break; case MatchUtils.STR_IPV6_ND_TLL: a = f.actions().buildSetField() .setField(f.oxms().buildIpv6NdTll().setValue(MacAddress.of(actionData[1])).build()) .build(); break; case MatchUtils.STR_IPV6_ND_TARGET: a = f.actions().buildSetField() .setField(f.oxms().buildIpv6NdTarget().setValue(IPv6Address.of(actionData[1])).build()) .build(); break; case MatchUtils.STR_DL_TYPE: a = f.actions().buildSetField() .setField(f.oxms().buildEthType() .setValue(EthType.of(ParseUtils.parseHexOrDecInt(actionData[1]))) .build()) .build(); break; case MatchUtils.STR_DL_SRC: a = f.actions().buildSetField() .setField(f.oxms().buildEthSrc().setValue(MacAddress.of(actionData[1])).build()) .build(); break; case MatchUtils.STR_DL_DST: a = f.actions().buildSetField() .setField(f.oxms().buildEthDst().setValue(MacAddress.of(actionData[1])).build()) .build(); break; case MatchUtils.STR_DL_VLAN: a = f.actions().buildSetField() .setField(f.oxms().buildVlanVid() .setValue(OFVlanVidMatch.ofVlan(ParseUtils.parseHexOrDecInt(actionData[1]))) .build()) .build(); break; case MatchUtils.STR_DL_VLAN_PCP: a = f.actions().buildSetField() .setField(f.oxms().buildVlanPcp() .setValue(VlanPcp.of(ParseUtils.parseHexOrDecByte(actionData[1]))) .build()) .build(); break; case MatchUtils.STR_ICMP_CODE: a = f.actions().buildSetField() .setField(f.oxms().buildIcmpv4Code() .setValue(ICMPv4Code.of(ParseUtils.parseHexOrDecShort(actionData[1]))) .build()) .build(); break; case MatchUtils.STR_ICMP_TYPE: a = f.actions().buildSetField() .setField(f.oxms().buildIcmpv4Type() .setValue(ICMPv4Type.of(ParseUtils.parseHexOrDecShort(actionData[1]))) .build()) .build(); break; case MatchUtils.STR_ICMPV6_CODE: a = f.actions().buildSetField() .setField(f.oxms().buildIcmpv6Code() .setValue(U8.of(ParseUtils.parseHexOrDecShort(actionData[1]))) .build()) .build(); break; case MatchUtils.STR_ICMPV6_TYPE: a = f.actions().buildSetField() .setField(f.oxms().buildIcmpv6Type() .setValue(U8.of(ParseUtils.parseHexOrDecShort(actionData[1]))) .build()) .build(); break; case MatchUtils.STR_NW_PROTO: a = f.actions().buildSetField() .setField(f.oxms().buildIpProto() .setValue(IpProtocol.of(ParseUtils.parseHexOrDecShort(actionData[1]))) .build()) .build(); break; case MatchUtils.STR_NW_SRC: a = f.actions().buildSetField() .setField(f.oxms().buildIpv4Src().setValue(IPv4Address.of(actionData[1])).build()) .build(); break; case MatchUtils.STR_NW_DST: a = f.actions().buildSetField() .setField(f.oxms().buildIpv4Dst().setValue(IPv4Address.of(actionData[1])).build()) .build(); break; case MatchUtils.STR_IPV6_SRC: a = f.actions().buildSetField() .setField(f.oxms().buildIpv6Src().setValue(IPv6Address.of(actionData[1])).build()) .build(); break; case MatchUtils.STR_IPV6_DST: a = f.actions().buildSetField() .setField(f.oxms().buildIpv6Dst().setValue(IPv6Address.of(actionData[1])).build()) .build(); break; case MatchUtils.STR_IPV6_FLOW_LABEL: a = f.actions().buildSetField() .setField(f.oxms().buildIpv6Flabel() .setValue(IPv6FlowLabel.of(ParseUtils.parseHexOrDecInt(actionData[1]))) .build()) .build(); break; case MatchUtils.STR_NW_ECN: a = f.actions().buildSetField() .setField(f.oxms().buildIpEcn() .setValue(IpEcn.of(ParseUtils.parseHexOrDecByte(actionData[1]))) .build()) .build(); break; case MatchUtils.STR_NW_DSCP: a = f.actions().buildSetField() .setField(f.oxms().buildIpDscp() .setValue(IpDscp.of(ParseUtils.parseHexOrDecByte(actionData[1]))) .build()) .build(); break; case MatchUtils.STR_SCTP_SRC: a = f.actions().buildSetField() .setField(f.oxms().buildSctpSrc() .setValue(TransportPort.of(ParseUtils.parseHexOrDecInt(actionData[1]))) .build()) .build(); break; case MatchUtils.STR_SCTP_DST: a = f.actions().buildSetField() .setField(f.oxms().buildSctpDst() .setValue(TransportPort.of(ParseUtils.parseHexOrDecInt(actionData[1]))) .build()) .build(); break; case MatchUtils.STR_TCP_SRC: a = f.actions().buildSetField() .setField(f.oxms().buildTcpSrc() .setValue(TransportPort.of(ParseUtils.parseHexOrDecInt(actionData[1]))) .build()) .build(); break; case MatchUtils.STR_TCP_DST: a = f.actions().buildSetField() .setField(f.oxms().buildTcpDst() .setValue(TransportPort.of(ParseUtils.parseHexOrDecInt(actionData[1]))) .build()) .build(); break; case MatchUtils.STR_UDP_SRC: a = f.actions().buildSetField() .setField(f.oxms().buildUdpSrc() .setValue(TransportPort.of(ParseUtils.parseHexOrDecInt(actionData[1]))) .build()) .build(); break; case MatchUtils.STR_UDP_DST: a = f.actions().buildSetField() .setField(f.oxms().buildUdpDst() .setValue(TransportPort.of(ParseUtils.parseHexOrDecInt(actionData[1]))) .build()) .build(); break; case MatchUtils.STR_MPLS_LABEL: a = f.actions().buildSetField() .setField(f.oxms().buildMplsLabel() .setValue(U32.of(ParseUtils.parseHexOrDecLong(actionData[1]))) .build()) .build(); break; case MatchUtils.STR_MPLS_TC: a = f.actions().buildSetField() .setField(f.oxms().buildMplsTc() .setValue(U8.of(ParseUtils.parseHexOrDecShort(actionData[1]))) .build()) .build(); break; case MatchUtils.STR_MPLS_BOS: a = f.actions().buildSetField() .setField(f.oxms().buildMplsBos() .setValue(OFBooleanValue.of(Boolean.parseBoolean(actionData[1]))) .build()) // interprets anything other than "true" as false .build(); break; case MatchUtils.STR_METADATA: a = f.actions().buildSetField() .setField(f.oxms().buildMetadata() .setValue(OFMetadata.of(U64.of(ParseUtils.parseHexOrDecLong(actionData[1])))) .build()) .build(); break; case MatchUtils.STR_ACTSET_OUTPUT: a = f.actions().buildSetField() .setField(f.oxms().buildActsetOutput() .setValue(portFromString(actionData[1])) .build()) .build(); break; case MatchUtils.STR_TCP_FLAGS: a = f.actions().buildSetField() .setField(f.oxms().buildTcpFlags() .setValue(U16.of(ParseUtils.parseHexOrDecInt(actionData[1]))) .build()) .build(); break; default: log.error("Unexpected OF1.2+ setfield '{}'", actionData); break; } break; case STR_GROUP: a = f.actions().buildGroup() .setGroup(GroupUtils.groupIdFromString(pair)) .build(); break; case STR_MPLS_LABEL_SET: a = f.actions().buildSetMplsLabel() .setMplsLabel(ParseUtils.parseHexOrDecLong(pair)) .build(); break; case STR_MPLS_POP: a = f.actions().buildPopMpls() .setEthertype(EthType.of(ParseUtils.parseHexOrDecInt(pair))) .build(); break; case STR_MPLS_PUSH: a = f.actions().buildPushMpls() .setEthertype(EthType.of(ParseUtils.parseHexOrDecInt(pair))) .build(); break; case STR_MPLS_TC_SET: a = f.actions().buildSetMplsTc() .setMplsTc(ParseUtils.parseHexOrDecShort(pair)) .build(); break; case STR_MPLS_TTL_DEC: a = f.actions().decMplsTtl(); break; case STR_MPLS_TTL_SET: a = f.actions().buildSetMplsTtl() .setMplsTtl(ParseUtils.parseHexOrDecShort(pair)) .build(); break; case STR_NW_TOS_SET: a = decode_set_tos_bits(pair, v); // should only be used by OF1.0 break; case STR_NW_SRC_SET: a = decode_set_src_ip(pair, v); break; case STR_NW_DST_SET: a = decode_set_dst_ip(pair, v); break; case STR_NW_ECN_SET: // loxi does not support DSCP set for OF1.1 a = f.actions().buildSetNwEcn() .setNwEcn(IpEcn.of(ParseUtils.parseHexOrDecByte(pair))) .build(); break; case STR_NW_TTL_DEC: a = f.actions().decNwTtl(); break; case STR_NW_TTL_SET: a = f.actions().buildSetNwTtl() .setNwTtl(ParseUtils.parseHexOrDecShort(pair)) .build(); break; case STR_PBB_POP: a = f.actions().popPbb(); break; case STR_PBB_PUSH: a = f.actions().buildPushPbb() .setEthertype(EthType.of(ParseUtils.parseHexOrDecInt(pair))) .build(); break; case STR_QUEUE_SET: a = f.actions().buildSetQueue() .setQueueId(ParseUtils.parseHexOrDecLong(pair)) .build(); break; case STR_TP_SRC_SET: a = decode_set_src_port(pair, v); break; case STR_TP_DST_SET: a = decode_set_dst_port(pair, v); break; case STR_TTL_IN_COPY: a = f.actions().copyTtlIn(); break; case STR_TTL_OUT_COPY: a = f.actions().copyTtlOut(); break; case STR_VLAN_POP: a = f.actions().popVlan(); break; case STR_VLAN_PUSH: a = f.actions().buildPushVlan() .setEthertype(EthType.of(ParseUtils.parseHexOrDecInt(pair))) .build(); break; case STR_VLAN_STRIP: a = f.actions().stripVlan(); break; case STR_VLAN_SET_VID: a = decode_set_vlan_id(pair, v); break; case STR_VLAN_SET_PCP: a = decode_set_vlan_priority(pair, v); break; case STR_METER: a = f.actions().buildMeter() .setMeterId(ParseUtils.parseHexOrDecLong(pair)) .build(); break; case STR_FIELD_COPY: a = (OFAction) copyFieldFromJson(pair, f.getVersion()); break; default: log.error("Unexpected action key '{}'", keyPair); break; } } catch (Exception e) { log.error("Illegal Action: {}", e.getMessage()); } if (a != null) { actions.add(a); } } } else { log.debug("actions not found --> drop"); } return actions; } /** * Append OFActionCopyField object to an existing JsonGenerator. * This method assumes the field name of the action has been * written already, if required. The appended data will * be formatted as follows: * { * "src_field":"name", * "dst_field":"name", * "src_offset_bits":"bits", * "dst_offset_bits":"bits", * "num_bits":"bits" * } * @param jsonGen * @param c */ public static void copyFieldToJson(JsonGenerator jsonGen, OFActionCopyField c) { jsonGen.configure(Feature.WRITE_NUMBERS_AS_STRINGS, true); try { jsonGen.writeStartObject(); Iterator<OFOxm<?>> i = c.getOxmIds().iterator(); if (i.hasNext()) { jsonGen.writeStringField("src_field" , OXMUtils.oxmIdToString(U32.of(i.next().getCanonical().getTypeLen()))); } else { log.error("either src_field or dst_field or both not set in {}", c); } if (i.hasNext()) { jsonGen.writeStringField("dst_field" , OXMUtils.oxmIdToString(U32.of(i.next().getCanonical().getTypeLen()))); } else { log.error("either src_field or dst_field not set in {}", c); } if (i.hasNext()) { log.warn("OFOxmList should only have src_field followed by dst_field. Extra field {}", i.next()); } jsonGen.writeNumberField("src_offset_bits", c.getSrcOffset()); jsonGen.writeNumberField("dst_offset_bits", c.getDstOffset()); jsonGen.writeNumberField("num_bits", c.getNBits()); jsonGen.writeEndObject(); jsonGen.close(); } catch (IOException e) { log.error("Error composing OFActionCopyField JSON object. {}", e.getMessage()); return; } } /** * Convert OFActionCopyField object to a JSON string. * This method assumes the field name of the action has been * written already, if required. The appended data will * be formatted as follows: * { * "src_field":"name", * "dst_field":"name", * "src_offset_bits":"bits", * "dst_offset_bits":"bits", * "num_bits":"bits" * } * @param jsonGen * @param c */ public static String copyFieldToJson(OFActionCopyField c) { Writer w = new StringWriter(); JsonGenerator jsonGen; try { jsonGen = jsonFactory.createGenerator(w); } catch (IOException e) { log.error("Could not instantiate JSON Generator. {}", e.getMessage()); return JSON_EMPTY_OBJECT; } copyFieldToJson(jsonGen, c); return w.toString(); /* overridden impl returns contents of Writer's StringBuffer */ } /** * Convert a JSON string to an OFActionCopyField object. * The format of the input JSON is expected to be: * { * "src_field":"name", * "dst_field":"name", * "src_offset_bits":"bits", * "dst_offset_bits":"bits", * "num_bits":"bits" * } * @param json * @param v * @return */ public static OFActionCopyField copyFieldFromJson(String json, OFVersion v) { if (json == null) { throw new IllegalArgumentException("JSON string cannot be null"); } if (v == null) { throw new IllegalArgumentException("OFVersion cannot be null"); } final JsonParser jp; try { jp = jsonFactory.createParser(json); } catch (IOException e) { log.error("Could not create JSON parser for OFActionCopyField {}", json); return null; } try { if (jp.nextToken() != JsonToken.START_OBJECT) { throw new IOException("Expected START_OBJECT"); } OFActionCopyField.Builder b = OFFactories.getFactory(v).buildActionCopyField(); OFOxm<?> srcField = null; OFOxm<?> dstField = null; while (jp.nextToken() != JsonToken.END_OBJECT) { String key = jp.getCurrentName().toLowerCase().trim(); jp.nextToken(); String value = jp.getText().toLowerCase().trim(); switch (key) { case "src_field": srcField = OXMUtils.oxmStringToOxm(value, v); break; case "dst_field": dstField = OXMUtils.oxmStringToOxm(value, v); break; case "src_offset_bits": b.setSrcOffset(ParseUtils.parseHexOrDecInt(value)); break; case "dst_offset_bits": b.setDstOffset(ParseUtils.parseHexOrDecInt(value)); break; case "num_bits": b.setNBits(ParseUtils.parseHexOrDecInt(value)); break; default: log.warn("Unexpected OFActionCopyField key {}", key); break; } } if (srcField == null || dstField == null) { log.error("Src and dst OXMs must be specified. Got {} and {}, respectively", srcField, dstField); return null; } else { b.setOxmIds(OFOxmList.of(srcField, dstField)); return b.build(); } } catch (IOException e) { log.error("Could not parse: {}", json); log.error("JSON parse error message: {}", e.getMessage()); return null; } } /** * Parses OFFlowMod actions from strings. * @param fmb The OFFlowMod.Builder to set the actions for * @param s The string containing all the actions * @param log A logger to log for errors. */ public static void fromString(OFFlowMod.Builder fmb, String s) { List<OFAction> actions = fromString(s, fmb.getVersion()); log.debug("actions: {}", actions); fmb.setActions(actions); return; } public static OFPort portFromString(String s) { return MatchUtils.portFromString(s); } public static String portToString(OFPort p) { return MatchUtils.portToString(p); } /** * Parse string and numerical port representations. * The key and delimiter for the action should be omitted, and only the * data should be presented to this decoder. Data can be any signed integer * or hex (w/leading 0x prefix) as a string or the special string port * STR_PORT_* as defined in {@link MatchUtils}. * * @param actionToDecode; The action as a string to decode * @param version; The OF version to create the action for * @return */ private static OFActionOutput decode_output(String actionToDecode, OFVersion version) { OFActionOutput.Builder ab = OFFactories.getFactory(version).actions().buildOutput(); OFPort port = portFromString(actionToDecode); if (port == null) { log.error("Could not parse output port {}", actionToDecode); return null; } else { ab.setPort(port); ab.setMaxLen(Integer.MAX_VALUE); log.debug("action {}", ab); return ab.build(); } } /** * Parse enqueue actions. * The key and delimiter for the action should be omitted, and only the * data should be presented to this decoder. Data with a leading 0x is permitted. * * @param actionToDecode; The action as a string to decode * @param version; The OF version to create the action for * @return */ private static OFActionEnqueue decode_enqueue(String actionToDecode, OFVersion version) { Matcher n = Pattern.compile("(?:((?:0x)?\\d+)\\:((?:0x)?\\d+))").matcher(actionToDecode); if (n.matches()) { OFPort port; if (n.group(1) != null) { port = portFromString(n.group(1)); if (port == null) { log.error("Invalid port {}", n.group(1)); return null; } } else { log.error("Missing port number for enqueue action"); return null; } int queueid = 0; if (n.group(2) != null) { try { queueid = ParseUtils.parseHexOrDecInt(n.group(2)); } catch (NumberFormatException e) { log.debug("Invalid queue-id in: '{}' (error ignored)", actionToDecode); return null; } } OFActionEnqueue a = OFFactories.getFactory(version).actions().buildEnqueue() .setPort(port) .setQueueId(queueid) .build(); log.debug("action {}", a); return a; } else { log.debug("Invalid action: '{}'", actionToDecode); return null; } } /** * Parse set_vlan_id actions. * The key and delimiter for the action should be omitted, and only the * data should be presented to this decoder. Data with a leading 0x is permitted. * * @param actionToDecode; The action as a string to decode * @param version; The OF version to create the action for * @return */ private static OFActionSetVlanVid decode_set_vlan_id(String actionToDecode, OFVersion version) { Matcher n = Pattern.compile("((?:0x)?\\d+)").matcher(actionToDecode); if (n.matches()) { if (n.group(1) != null) { try { VlanVid vlanid = VlanVid.ofVlan(ParseUtils.parseHexOrDecShort(n.group(1))); OFActionSetVlanVid a = OFFactories.getFactory(version).actions().buildSetVlanVid() .setVlanVid(vlanid) .build(); log.debug("action {}", a); return a; } catch (NumberFormatException e) { log.debug("Invalid VLAN in: {} (error ignored)", actionToDecode); return null; } } } else { log.debug("Invalid action: '{}'", actionToDecode); return null; } return null; } /** * Parse set_vlan_pcp actions. * The key and delimiter for the action should be omitted, and only the * data should be presented to this decoder. Data with a leading 0x is permitted. * * @param actionToDecode; The action as a string to decode * @param version; The OF version to create the action for * @return */ private static OFActionSetVlanPcp decode_set_vlan_priority(String actionToDecode, OFVersion version) { Matcher n = Pattern.compile("((?:0x)?\\d+)").matcher(actionToDecode); if (n.matches()) { if (n.group(1) != null) { try { OFActionSetVlanPcp a = OFFactories.getFactory(version).actions().buildSetVlanPcp() .setVlanPcp(VlanPcp.of(ParseUtils.parseHexOrDecByte(n.group(1)))) .build(); log.debug("action {}", a); return a; } catch (NumberFormatException e) { log.debug("Invalid VLAN priority in: {} (error ignored)", actionToDecode); return null; } } } else { log.debug("Invalid action: '{}'", actionToDecode); return null; } return null; } /** * Parse set_dl_src actions. * The key and delimiter for the action should be omitted, and only the * data should be presented to this decoder. * * @param actionToDecode; The action as a string to decode * @param version; The OF version to create the action for * @return */ private static OFActionSetDlSrc decode_set_src_mac(String actionToDecode, OFVersion version) { try { OFActionSetDlSrc a = OFFactories.getFactory(version).actions().buildSetDlSrc() .setDlAddr(MacAddress.of(actionToDecode)) .build(); log.debug("action {}", a); return a; } catch (Exception e) { log.debug("Invalid action: '{}'", actionToDecode); return null; } } /** * Parse set_dl_dst actions. * The key and delimiter for the action should be omitted, and only the * data should be presented to this decoder. * * @param actionToDecode; The action as a string to decode * @param version; The OF version to create the action for * @return */ private static OFActionSetDlDst decode_set_dst_mac(String actionToDecode, OFVersion version) { try { OFActionSetDlDst a = OFFactories.getFactory(version).actions().buildSetDlDst() .setDlAddr(MacAddress.of(actionToDecode)) .build(); log.debug("action {}", a); return a; } catch (Exception e) { log.debug("Invalid action: '{}'", actionToDecode); return null; } } /** * Parse set_tos actions. * The key and delimiter for the action should be omitted, and only the * data should be presented to this decoder. A leading 0x is permitted. * * @param actionToDecode; The action as a string to decode * @param version; The OF version to create the action for * @return */ private static OFActionSetNwTos decode_set_tos_bits(String actionToDecode, OFVersion version) { Matcher n = Pattern.compile("((?:0x)?\\d+)").matcher(actionToDecode); if (n.matches()) { if (n.group(1) != null) { try { OFActionSetNwTos a = OFFactories.getFactory(version).actions().buildSetNwTos() .setNwTos(ParseUtils.parseHexOrDecByte(n.group(1))) .build(); log.debug("action {}", a); return a; } catch (NumberFormatException e) { log.debug("Invalid dst-port in: {} (error ignored)", actionToDecode); return null; } } } else { log.debug("Invalid action: '{}'", actionToDecode); return null; } return null; } /** * Parse set_nw_src actions. * The key and delimiter for the action should be omitted, and only the * data should be presented to this decoder. * * @param actionToDecode; The action as a string to decode * @param version; The OF version to create the action for * @return */ private static OFActionSetNwSrc decode_set_src_ip(String actionToDecode, OFVersion version) { try { OFActionSetNwSrc a = OFFactories.getFactory(version).actions().buildSetNwSrc() .setNwAddr(IPv4Address.of(actionToDecode)) .build(); log.debug("action {}", a); return a; } catch (Exception e) { log.debug("Invalid action: '{}'", actionToDecode); return null; } } /** * Parse set_nw_dst actions. * The key and delimiter for the action should be omitted, and only the * data should be presented to this decoder. * * @param actionToDecode; The action as a string to decode * @param version; The OF version to create the action for * @return */ private static OFActionSetNwDst decode_set_dst_ip(String actionToDecode, OFVersion version) { try { OFActionSetNwDst a = OFFactories.getFactory(version).actions().buildSetNwDst() .setNwAddr(IPv4Address.of(actionToDecode)) .build(); log.debug("action {}", a); return a; } catch (Exception e) { log.debug("Invalid action: '{}'", actionToDecode); return null; } } /** * Parse set_tp_src actions. * The key and delimiter for the action should be omitted, and only the * data should be presented to this decoder. A leading 0x is permitted. * * @param actionToDecode; The action as a string to decode * @param version; The OF version to create the action for * @return */ private static OFActionSetTpSrc decode_set_src_port(String actionToDecode, OFVersion version) { try { OFActionSetTpSrc a = OFFactories.getFactory(version).actions().buildSetTpSrc() .setTpPort(TransportPort.of(Integer.parseInt(actionToDecode))) .build(); log.debug("action {}", a); return a; } catch (NumberFormatException e) { log.debug("Invalid src-port in: {} (error ignored)", actionToDecode); return null; } } /** * Parse set_tp_dst actions. * The key and delimiter for the action should be omitted, and only the * data should be presented to this decoder. A leading 0x is permitted. * * @param actionToDecode; The action as a string to decode * @param version; The OF version to create the action for * @return */ private static OFAction decode_set_dst_port(String actionToDecode, OFVersion version) { try { OFActionSetTpDst a = OFFactories.getFactory(version).actions().buildSetTpDst() .setTpPort(TransportPort.of(Integer.parseInt(actionToDecode))) .build(); log.debug("action {}", a); return a; } catch (NumberFormatException e) { log.debug("Invalid dst-port in: {} (error ignored)", actionToDecode); return null; } } }
package net.ihiroky.niotty; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import java.util.concurrent.locks.LockSupport; /** * A implementation of {@link net.ihiroky.niotty.EventDispatcher} which depends on * {@link java.util.concurrent.locks.LockSupport#parkNanos(Object, long)} and * {@link java.util.concurrent.locks.LockSupport#unpark(Thread)} )}. */ public class DefaultEventDispatcher extends EventDispatcher { private volatile int signaled_; private static final AtomicIntegerFieldUpdater<DefaultEventDispatcher> SIGNALED_UPDATER = AtomicIntegerFieldUpdater.newUpdater(DefaultEventDispatcher.class, "signaled_"); private static final int FALSE = 0; private static final int TRUE = 1; /** * Create a new instance. */ public DefaultEventDispatcher() { signaled_ = FALSE; } @Override protected void onOpen() { } @Override protected void onClose() { } @Override protected void poll(long timeout, TimeUnit timeUnit) throws InterruptedException { long start = System.nanoTime(); long timeoutNanos = timeUnit.convert(timeout, TimeUnit.NANOSECONDS); while (signaled_ == FALSE && timeoutNanos > 0L) { LockSupport.parkNanos(this, timeoutNanos); if (Thread.interrupted()) { throw new InterruptedException(); } long now = System.nanoTime(); timeoutNanos -= now - start; start = now; } signaled_ = FALSE; } @Override protected void wakeUp() { if (SIGNALED_UPDATER.compareAndSet(this, FALSE, TRUE)) { LockSupport.unpark(thread()); } } }
package net.minecraftforge.client.model; import java.awt.Color; import java.awt.Graphics2D; import java.awt.image.BufferedImage; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import net.minecraft.block.Block; import net.minecraft.block.state.IBlockState; import net.minecraft.client.Minecraft; import net.minecraft.client.renderer.BlockModelShapes; import net.minecraft.client.renderer.ItemMeshDefinition; import net.minecraft.client.renderer.ItemModelMesher; import net.minecraft.client.renderer.block.model.BlockPart; import net.minecraft.client.renderer.block.model.BlockPartFace; import net.minecraft.client.renderer.block.model.ItemCameraTransforms; import net.minecraft.client.renderer.block.model.ItemModelGenerator; import net.minecraft.client.renderer.block.model.ModelBlock; import net.minecraft.client.renderer.block.model.ModelBlockDefinition; import net.minecraft.client.renderer.block.model.ModelBlockDefinition.MissingVariantException; import net.minecraft.client.renderer.block.model.ModelBlockDefinition.Variant; import net.minecraft.client.renderer.block.model.ModelBlockDefinition.Variants; import net.minecraft.client.renderer.block.statemap.IStateMapper; import net.minecraft.client.renderer.texture.IIconCreator; import net.minecraft.client.renderer.texture.TextureAtlasSprite; import net.minecraft.client.renderer.texture.TextureMap; import net.minecraft.client.renderer.vertex.DefaultVertexFormats; import net.minecraft.client.renderer.vertex.VertexFormat; import net.minecraft.client.resources.IResourceManager; import net.minecraft.client.resources.model.BuiltInModel; import net.minecraft.client.resources.model.ModelBakery; import net.minecraft.client.resources.model.ModelResourceLocation; import net.minecraft.client.resources.model.ModelRotation; import net.minecraft.client.resources.model.SimpleBakedModel; import net.minecraft.client.resources.model.WeightedBakedModel; import net.minecraft.item.Item; import net.minecraft.util.EnumFacing; import net.minecraft.util.IRegistry; import net.minecraft.util.ResourceLocation; import net.minecraftforge.fml.common.FMLLog; import net.minecraftforge.fml.common.registry.GameData; import net.minecraftforge.fml.common.registry.RegistryDelegate; import org.apache.commons.lang3.tuple.Pair; import com.google.common.base.Function; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; public class ModelLoader extends ModelBakery { private final Map<ModelResourceLocation, IModel> stateModels = new HashMap<ModelResourceLocation, IModel>(); private final Set<ResourceLocation> textures = new HashSet<ResourceLocation>(); private final Set<ResourceLocation> loadingModels = new HashSet<ResourceLocation>(); private final Set<ModelResourceLocation> missingVariants = Sets.newHashSet(); private IModel missingModel = null; private boolean isLoading = false; public boolean isLoading() { return isLoading; } public ModelLoader(IResourceManager manager, TextureMap map, BlockModelShapes shapes) { super(manager, map, shapes); VanillaLoader.instance.setLoader(this); ModelLoaderRegistry.clearModelCache(); } @Override public IRegistry setupModelRegistry() { isLoading = true; loadBlocks(); loadItems(); try { missingModel = getModel(new ResourceLocation(MODEL_MISSING.getResourceDomain(), MODEL_MISSING.getResourcePath())); } catch (IOException e) { // If this ever happens things are bad. Should never NOT be able to load the missing model. Throwables.propagate(e); } stateModels.put(MODEL_MISSING, missingModel); textures.remove(TextureMap.LOCATION_MISSING_TEXTURE); textures.addAll(LOCATIONS_BUILTIN_TEXTURES); textureMap.loadSprites(resourceManager, new IIconCreator() { public void registerSprites(TextureMap map) { for(ResourceLocation t : textures) { map.registerSprite(t); } } }); Function<ResourceLocation, TextureAtlasSprite> textureGetter = new Function<ResourceLocation, TextureAtlasSprite>() { public TextureAtlasSprite apply(ResourceLocation location) { return Minecraft.getMinecraft().getTextureMapBlocks().getAtlasSprite(location.toString()); } }; IFlexibleBakedModel missingBaked = missingModel.bake(missingModel.getDefaultState(), DefaultVertexFormats.ITEM, textureGetter); for (Entry<ModelResourceLocation, IModel> e : stateModels.entrySet()) { if(e.getValue() == getMissingModel()) { bakedRegistry.putObject(e.getKey(), missingBaked); } else { bakedRegistry.putObject(e.getKey(), e.getValue().bake(e.getValue().getDefaultState(), DefaultVertexFormats.ITEM, textureGetter)); } } return bakedRegistry; } private void loadBlocks() { Map<IBlockState, ModelResourceLocation> stateMap = blockModelShapes.getBlockStateMapper().putAllStateModelLocations(); Collection<ModelResourceLocation> variants = Lists.newArrayList(stateMap.values()); variants.add(new ModelResourceLocation("minecraft:item_frame", "normal")); //Vanilla special cases item_frames so must we variants.add(new ModelResourceLocation("minecraft:item_frame", "map")); loadVariants(variants); } @Override protected void registerVariant(ModelBlockDefinition definition, ModelResourceLocation location) { Variants variants = null; try { variants = definition.getVariants(location.getVariant()); } catch(MissingVariantException e) { missingVariants.add(location); } if (variants != null && !variants.getVariants().isEmpty()) { try { stateModels.put(location, new WeightedRandomModel(location, variants)); } catch(Throwable e) { throw new RuntimeException(e); } } } private void loadItems() { registerVariantNames(); for(Item item : GameData.getItemRegistry().typeSafeIterable()) { for(String s : (List<String>)getVariantNames(item)) { ResourceLocation file = getItemLocation(s); ModelResourceLocation memory = new ModelResourceLocation(s, "inventory"); IModel model = null; try { model = getModel(file); } catch (IOException e) { // Handled by our finally block. } finally { if (model == null || model == getMissingModel()) { FMLLog.fine("Item json isn't found for '" + memory + "', trying to load the variant from the blockstate json"); registerVariant(getModelBlockDefinition(memory), memory); } else stateModels.put(memory, model); } } } } public IModel getModel(ResourceLocation location) throws IOException { if(!ModelLoaderRegistry.loaded(location)) loadAnyModel(location); return ModelLoaderRegistry.getModel(location); } @Override protected ResourceLocation getModelLocation(ResourceLocation model) { return new ResourceLocation(model.getResourceDomain(), model.getResourcePath() + ".json"); } private void loadAnyModel(ResourceLocation location) throws IOException { if(loadingModels.contains(location)) { throw new IllegalStateException("circular model dependencies involving model " + location); } loadingModels.add(location); try { IModel model = ModelLoaderRegistry.getModel(location); for (ResourceLocation dep : model.getDependencies()) { getModel(dep); } textures.addAll(model.getTextures()); } finally { loadingModels.remove(location); } } private class VanillaModelWrapper implements IRetexturableModel { private final ResourceLocation location; private final ModelBlock model; public VanillaModelWrapper(ResourceLocation location, ModelBlock model) { this.location = location; this.model = model; } public Collection<ResourceLocation> getDependencies() { if(model.getParentLocation() == null || model.getParentLocation().getResourcePath().startsWith("builtin/")) return Collections.emptyList(); return Collections.singletonList(model.getParentLocation()); } public Collection<ResourceLocation> getTextures() { // setting parent here to make textures resolve properly if(model.getParentLocation() != null) { try { IModel parent = getModel(model.getParentLocation()); if(parent instanceof VanillaModelWrapper) { model.parent = ((VanillaModelWrapper) parent).model; } else { throw new IllegalStateException("vanilla model '" + model + "' can't have non-vanilla parent"); } } catch (IOException e) { FMLLog.warning("Could not load vanilla model parent '" + model.getParentLocation() + "' for '" + model + "': " + e.toString()); IModel missing = ModelLoader.this.getMissingModel(); if (missing instanceof VanillaModelWrapper) { model.parent = ((VanillaModelWrapper)missing).model; } else { throw new IllegalStateException("vanilla model '" + model + "' has missing parent, and missing model is not a vanilla model"); } } } ImmutableSet.Builder<ResourceLocation> builder = ImmutableSet.builder(); if(hasItemModel(model)) { for(String s : (List<String>)ItemModelGenerator.LAYERS) { String r = model.resolveTextureName(s); ResourceLocation loc = new ResourceLocation(r); if(!r.equals(s)) { builder.add(loc); } // mojang hardcode if(model.getRootModel() == MODEL_COMPASS && !loc.equals(TextureMap.LOCATION_MISSING_TEXTURE)) { TextureAtlasSprite.setLocationNameCompass(loc.toString()); } else if(model.getRootModel() == MODEL_CLOCK && !loc.equals(TextureMap.LOCATION_MISSING_TEXTURE)) { TextureAtlasSprite.setLocationNameClock(loc.toString()); } } } for(String s : (Iterable<String>)model.textures.values()) { if(!s.startsWith(" { builder.add(new ResourceLocation(s)); } } return builder.build(); } public IFlexibleBakedModel bake(IModelState state, VertexFormat format, Function<ResourceLocation, TextureAtlasSprite> bakedTextureGetter) { if(!Attributes.moreSpecific(format, Attributes.DEFAULT_BAKED_FORMAT)) { throw new IllegalArgumentException("can't bake vanilla models to the format that doesn't fit into the default one: " + format); } ModelBlock model = this.model; if(model == null) return getMissingModel().bake(state, format, bakedTextureGetter); ItemCameraTransforms transforms = new ItemCameraTransforms(model.getThirdPersonTransform(), model.getFirstPersonTransform(), model.getHeadTransform(), model.getInGuiTransform()); if(hasItemModel(model)) { IPerspectiveState perState = state instanceof IPerspectiveState ? (IPerspectiveState)state : new IPerspectiveState.Impl(state, transforms); return new ItemLayerModel(model).bake(perState, format, bakedTextureGetter); } if(isCustomRenderer(model)) return new IFlexibleBakedModel.Wrapper(new BuiltInModel(transforms), format); // TODO perspective awareness for this return bakeNormal(model, state.apply(this), format, bakedTextureGetter, state instanceof UVLock); } private IFlexibleBakedModel bakeNormal(ModelBlock model, TRSRTransformation state, VertexFormat format, Function<ResourceLocation, TextureAtlasSprite> bakedTextureGetter, boolean uvLocked) { TextureAtlasSprite particle = bakedTextureGetter.apply(new ResourceLocation(model.resolveTextureName("particle"))); SimpleBakedModel.Builder builder = (new SimpleBakedModel.Builder(model)).setTexture(particle); for(BlockPart part : (Iterable<BlockPart>)model.getElements()) { for(Map.Entry<EnumFacing, BlockPartFace> e : (Iterable<Map.Entry<EnumFacing, BlockPartFace>>)part.mapFaces.entrySet()) { TextureAtlasSprite textureatlassprite1 = bakedTextureGetter.apply(new ResourceLocation(model.resolveTextureName(e.getValue().texture))); if (e.getValue().cullFace == null || !TRSRTransformation.isInteger(state.getMatrix())) { builder.addGeneralQuad(makeBakedQuad(part, e.getValue(), textureatlassprite1, e.getKey(), state, uvLocked)); } else { builder.addFaceQuad(state.rotate(e.getValue().cullFace), makeBakedQuad(part, e.getValue(), textureatlassprite1, e.getKey(), state, uvLocked)); } } } return new IFlexibleBakedModel.Wrapper(builder.makeBakedModel(), format); } public IModelState getDefaultState() { return ModelRotation.X0_Y0; } @Override public IModel retexture(ImmutableMap<String, String> textures) { if (textures.isEmpty()) return this; List<BlockPart> elements = Lists.newArrayList(); //We have to duplicate this so we can edit it below. for (BlockPart part : (List<BlockPart>)this.model.getElements()) { elements.add(new BlockPart(part.positionFrom, part.positionTo, Maps.newHashMap(part.mapFaces), part.partRotation, part.shade)); } ModelBlock neweModel = new ModelBlock(this.model.getParentLocation(), elements, Maps.newHashMap(this.model.textures), this.model.isAmbientOcclusion(), this.model.isGui3d(), //New Textures man VERY IMPORTANT new ItemCameraTransforms(this.model.getThirdPersonTransform(), this.model.getFirstPersonTransform(), this.model.getHeadTransform(), this.model.getInGuiTransform())); neweModel.name = this.model.name; neweModel.parent = this.model.parent; Set<String> removed = Sets.newHashSet(); for (Entry<String, String> e : textures.entrySet()) { if ("".equals(e.getValue())) { removed.add(e.getKey()); neweModel.textures.remove(e.getKey()); } else neweModel.textures.put(e.getKey(), e.getValue()); } // Map the model's texture references as if it was the parent of a model with the retexture map as its textures. Map<String, String> remapped = Maps.newHashMap(); for (Entry<String, String> e : (Set<Entry<String, String>>)neweModel.textures.entrySet()) { if (e.getValue().startsWith(" { String key = e.getValue().substring(1); if (neweModel.textures.containsKey(key)) remapped.put(e.getKey(), (String)neweModel.textures.get(key)); } } neweModel.textures.putAll(remapped); //Remove any faces that use a null texture, this is for performance reasons, also allows some cool layering stuff. for (BlockPart part : (List<BlockPart>)neweModel.getElements()) { Iterator<Entry<EnumFacing, BlockPartFace>> itr = part.mapFaces.entrySet().iterator(); while (itr.hasNext()) { Entry<EnumFacing, BlockPartFace> entry = itr.next(); if (removed.contains(entry.getValue().texture)) itr.remove(); } } return new VanillaModelWrapper(location, neweModel); } } public static class UVLock implements IModelState { private final IModelState state; public UVLock(IModelState state) { this.state = state; } public TRSRTransformation apply(IModelPart part) { return state.apply(part); } } // Weighted models can contain multiple copies of 1 model with different rotations - this is to make it work with IModelState (different copies will be different objects). private static class WeightedPartWrapper implements IModel { private final IModel model; public WeightedPartWrapper(IModel model) { this.model = model; } public Collection<ResourceLocation> getDependencies() { return model.getDependencies(); } public Collection<ResourceLocation> getTextures() { return model.getTextures(); } public IFlexibleBakedModel bake(IModelState state, VertexFormat format, Function<ResourceLocation, TextureAtlasSprite> bakedTextureGetter) { return model.bake(state, format, bakedTextureGetter); } public IModelState getDefaultState() { return model.getDefaultState(); } } private class WeightedRandomModel implements IModel { private final List<Variant> variants; private final List<ResourceLocation> locations = new ArrayList<ResourceLocation>(); private final List<IModel> models = new ArrayList<IModel>(); private final IModelState defaultState; @Deprecated public WeightedRandomModel(Variants variants){ this(null, variants); } // Remove 1.9 public WeightedRandomModel(ModelResourceLocation parent, Variants variants) { this.variants = variants.getVariants(); ImmutableMap.Builder<IModelPart, IModelState> builder = ImmutableMap.builder(); for (Variant v : (List<Variant>)variants.getVariants()) { ResourceLocation loc = v.getModelLocation(); locations.add(loc); IModel model = null; try { model = getModel(loc); } catch (Exception e) { /* * Vanilla eats this, which makes it only show variants that have models. * But that doesn't help debugging, so we maintain the missing model * so that resource pack makers have a hint that their states are broken. */ FMLLog.warning("Unable to load block model: \'" + loc + "\' for variant: \'" + parent + "\': " + e.toString()); model = getMissingModel(); } if (v instanceof ISmartVariant) { model = ((ISmartVariant)v).process(model, ModelLoader.this); textures.addAll(model.getTextures()); // Kick this, just in case. } model = new WeightedPartWrapper(model); models.add(model); builder.put(model, v.getState()); } if (models.size() == 0) //If all variants are missing, add one with the missing model and default rotation. { IModel missing = getMissingModel(); models.add(missing); builder.put(missing, TRSRTransformation.identity()); } defaultState = new MapModelState(builder.build()); } public Collection<ResourceLocation> getDependencies() { return ImmutableList.copyOf(locations); } public Collection<ResourceLocation> getTextures() { return Collections.emptyList(); } private IModelState addUV(boolean uv, IModelState state) { if(uv) return new UVLock(state); return state; } private IModelState getState(IModelState state, IModelPart part) { if(state instanceof MapModelState) { return ((MapModelState)state).getState(part); } return state; } public IFlexibleBakedModel bake(IModelState state, VertexFormat format, Function<ResourceLocation, TextureAtlasSprite> bakedTextureGetter) { if(!Attributes.moreSpecific(format, Attributes.DEFAULT_BAKED_FORMAT)) { throw new IllegalArgumentException("can't bake vanilla weighted models to the format that doesn't fit into the default one: " + format); } if(variants.size() == 1) { Variant v = variants.get(0); IModel model = models.get(0); return model.bake(addUV(v.isUvLocked(), getState(state, model)), format, bakedTextureGetter); } WeightedBakedModel.Builder builder = new WeightedBakedModel.Builder(); for(int i = 0; i < variants.size(); i++) { IModel model = models.get(i); Variant v = variants.get(i); builder.add(model.bake(addUV(v.isUvLocked(), getState(state, model)), format, bakedTextureGetter), variants.get(i).getWeight()); } return new FlexibleWeightedBakedModel(builder.build(), Attributes.DEFAULT_BAKED_FORMAT); } public IModelState getDefaultState() { return defaultState; } } private static class FlexibleWeightedBakedModel extends WeightedBakedModel implements IFlexibleBakedModel { private final WeightedBakedModel parent; private final VertexFormat format; public FlexibleWeightedBakedModel(WeightedBakedModel parent, VertexFormat format) { super(parent.models); this.parent = parent; this.format = format; } public VertexFormat getFormat() { return format; } } private boolean isBuiltinModel(ModelBlock model) { return model == MODEL_GENERATED || model == MODEL_COMPASS || model == MODEL_CLOCK || model == MODEL_ENTITY; } public IModel getMissingModel() { if (missingModel == null) { try { missingModel = getModel(new ResourceLocation(MODEL_MISSING.getResourceDomain(), MODEL_MISSING.getResourcePath())); } catch (IOException e) { // If this ever happens things are bad. Should never NOT be able to load the missing model. Throwables.propagate(e); } } return missingModel; } static enum VanillaLoader implements ICustomModelLoader { instance; private ModelLoader loader; void setLoader(ModelLoader loader) { this.loader = loader; } ModelLoader getLoader() { return loader; } public void onResourceManagerReload(IResourceManager resourceManager) { // do nothing, cause loader will store the reference to the resourceManager } public boolean accepts(ResourceLocation modelLocation) { return true; } public IModel loadModel(ResourceLocation modelLocation) throws IOException { return loader.new VanillaModelWrapper(modelLocation, loader.loadModel(modelLocation)); } } public static class White extends TextureAtlasSprite { public static ResourceLocation loc = new ResourceLocation("white"); public static White instance = new White(); protected White() { super(loc.toString()); } @Override public boolean hasCustomLoader(IResourceManager manager, ResourceLocation location) { return true; } @Override public boolean load(IResourceManager manager, ResourceLocation location) { BufferedImage image = new BufferedImage(16, 16, BufferedImage.TYPE_INT_ARGB); Graphics2D graphics = image.createGraphics(); graphics.setBackground(Color.WHITE); graphics.clearRect(0, 0, 16, 16); BufferedImage[] images = new BufferedImage[Minecraft.getMinecraft().gameSettings.mipmapLevels + 1]; images[0] = image; loadSprite(images, null); return false; } public void register(TextureMap map) { map.setTextureEntry(White.loc.toString(), White.instance); } } public void onPostBakeEvent(IRegistry modelRegistry) { Object missingModel = modelRegistry.getObject(MODEL_MISSING); for(ModelResourceLocation missing : missingVariants) { Object model = modelRegistry.getObject(missing); if(model == null || model == missingModel) { FMLLog.severe("Model definition for location %s not found", missing); } } isLoading = false; } private static final Map<RegistryDelegate<Block>, IStateMapper> customStateMappers = Maps.newHashMap(); public static void setCustomStateMapper(Block block, IStateMapper mapper) { customStateMappers.put(block.delegate, mapper); } public static void onRegisterAllBlocks(BlockModelShapes shapes) { for (Entry<RegistryDelegate<Block>, IStateMapper> e : customStateMappers.entrySet()) { shapes.registerBlockWithStateMapper(e.getKey().get(), e.getValue()); } } private static final Map<RegistryDelegate<Item>, ItemMeshDefinition> customMeshDefinitions = com.google.common.collect.Maps.newHashMap(); private static final Map<Pair<RegistryDelegate<Item>, Integer>, ModelResourceLocation> customModels = com.google.common.collect.Maps.newHashMap(); public static void setCustomModelResourceLocation(Item item, int metadata, ModelResourceLocation model) { customModels.put(Pair.of(item.delegate, metadata), model); } public static void setCustomMeshDefinition(Item item, ItemMeshDefinition meshDefinition) { customMeshDefinitions.put(item.delegate, meshDefinition); } public static void onRegisterItems(ItemModelMesher mesher) { for (Map.Entry<RegistryDelegate<Item>, ItemMeshDefinition> e : customMeshDefinitions.entrySet()) { mesher.register(e.getKey().get(), e.getValue()); } for (Entry<Pair<RegistryDelegate<Item>, Integer>, ModelResourceLocation> e : customModels.entrySet()) { mesher.register(e.getKey().getLeft().get(), e.getKey().getRight(), e.getValue()); } } }
package net.onrc.onos.apps.proxyarp; import java.net.InetAddress; import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Timer; import java.util.TimerTask; import net.floodlightcontroller.core.FloodlightContext; import net.floodlightcontroller.core.IFloodlightProviderService; import net.floodlightcontroller.core.IOFMessageListener; import net.floodlightcontroller.core.IOFSwitch; import net.floodlightcontroller.core.module.FloodlightModuleContext; import net.floodlightcontroller.core.module.IFloodlightModule; import net.floodlightcontroller.core.module.IFloodlightService; import net.floodlightcontroller.restserver.IRestApiService; import net.floodlightcontroller.util.MACAddress; import net.onrc.onos.apps.bgproute.Interface; import net.onrc.onos.core.datagrid.IDatagridService; import net.onrc.onos.core.datagrid.IEventChannel; import net.onrc.onos.core.datagrid.IEventChannelListener; import net.onrc.onos.core.devicemanager.IOnosDeviceService; import net.onrc.onos.core.flowprogrammer.IFlowPusherService; import net.onrc.onos.core.main.config.IConfigInfoService; import net.onrc.onos.core.packet.ARP; import net.onrc.onos.core.packet.Ethernet; import net.onrc.onos.core.packet.IPv4; import net.onrc.onos.core.packetservice.BroadcastPacketOutNotification; import net.onrc.onos.core.packetservice.SinglePacketOutNotification; import net.onrc.onos.core.topology.Device; import net.onrc.onos.core.topology.INetworkGraphService; import net.onrc.onos.core.topology.NetworkGraph; import net.onrc.onos.core.topology.Switch; import net.onrc.onos.core.util.Dpid; import net.onrc.onos.core.util.SwitchPort; import org.openflow.protocol.OFMessage; import org.openflow.protocol.OFPacketIn; import org.openflow.protocol.OFPacketOut; import org.openflow.protocol.OFPort; import org.openflow.protocol.OFType; import org.openflow.protocol.action.OFAction; import org.openflow.protocol.action.OFActionOutput; import org.openflow.util.HexString; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.HashMultimap; import com.google.common.collect.Multimaps; import com.google.common.collect.SetMultimap; public class ProxyArpManager implements IProxyArpService, IOFMessageListener, IFloodlightModule { private static final Logger log = LoggerFactory .getLogger(ProxyArpManager.class); private static final long ARP_TIMER_PERIOD = 100; private static final int ARP_REQUEST_TIMEOUT = 2000; private IFloodlightProviderService floodlightProvider; private IDatagridService datagrid; private IEventChannel<Long, ArpReplyNotification> arpReplyEventChannel; private IEventChannel<Long, BroadcastPacketOutNotification> broadcastPacketOutEventChannel; private IEventChannel<Long, SinglePacketOutNotification> singlePacketOutEventChannel; private static final String ARP_REPLY_CHANNEL_NAME = "onos.arp_reply"; private static final String BROADCAST_PACKET_OUT_CHANNEL_NAME = "onos.broadcast_packet_out"; private static final String SINGLE_PACKET_OUT_CHANNEL_NAME = "onos.single_packet_out"; private ArpReplyEventHandler arpReplyEventHandler = new ArpReplyEventHandler(); private BroadcastPacketOutEventHandler broadcastPacketOutEventHandler = new BroadcastPacketOutEventHandler(); private SinglePacketOutEventHandler singlePacketOutEventHandler = new SinglePacketOutEventHandler(); private IConfigInfoService configService; private IRestApiService restApi; private IFlowPusherService flowPusher; private INetworkGraphService networkGraphService; private NetworkGraph networkGraph; private IOnosDeviceService onosDeviceService; private short vlan; private static final short NO_VLAN = 0; private SetMultimap<InetAddress, ArpRequest> arpRequests; private class BroadcastPacketOutEventHandler implements IEventChannelListener<Long, BroadcastPacketOutNotification> { @Override public void entryAdded(BroadcastPacketOutNotification value) { if (log.isTraceEnabled()) { log.trace("entryAdded ip{}, sw {}, port {}, packet {}", value.getTargetAddress(), value.getInSwitch(), value.getInPort(), value.getPacketData().length); } BroadcastPacketOutNotification notification = (BroadcastPacketOutNotification) value; broadcastArpRequestOutMyEdge(notification.getPacketData(), notification.getInSwitch(), notification.getInPort()); // set timestamp ByteBuffer buffer = ByteBuffer.allocate(4); buffer.putInt(notification.getTargetAddress()); InetAddress addr = null; try { addr = InetAddress.getByAddress(buffer.array()); } catch (UnknownHostException e) { log.error("Exception:", e); } if (addr != null) { for (ArpRequest request : arpRequests.get(addr)) { request.setRequestTime(); } } } @Override public void entryUpdated(BroadcastPacketOutNotification value) { log.debug("entryUpdated"); // TODO: For now, entryUpdated() is processed as entryAdded() entryAdded(value); } @Override public void entryRemoved(BroadcastPacketOutNotification value) { log.debug("entryRemoved"); // TODO: Not implemented. Revisit when this module is refactored } } private class SinglePacketOutEventHandler implements IEventChannelListener<Long, SinglePacketOutNotification> { @Override public void entryAdded(SinglePacketOutNotification packetOutNotification) { log.debug("entryAdded"); SinglePacketOutNotification notification = (SinglePacketOutNotification) packetOutNotification; sendArpRequestOutPort(notification.getPacketData(), notification.getOutSwitch(), notification.getOutPort()); // set timestamp ByteBuffer buffer = ByteBuffer.allocate(4); buffer.putInt(notification.getTargetAddress()); InetAddress addr = null; try { addr = InetAddress.getByAddress(buffer.array()); } catch (UnknownHostException e) { log.error("Exception:", e); } if (addr != null) { for (ArpRequest request : arpRequests.get(addr)) { request.setRequestTime(); } } } @Override public void entryUpdated(SinglePacketOutNotification packetOutNotification) { log.debug("entryUpdated"); // TODO: For now, entryUpdated() is processed as entryAdded() entryAdded(packetOutNotification); } @Override public void entryRemoved(SinglePacketOutNotification packetOutNotification) { log.debug("entryRemoved"); // TODO: Not implemented. Revisit when this module is refactored } } private class ArpReplyEventHandler implements IEventChannelListener<Long, ArpReplyNotification> { @Override public void entryAdded(ArpReplyNotification arpReply) { log.debug("Received ARP reply notification for ip {}, mac {}", arpReply.getTargetAddress(), arpReply.getTargetMacAddress()); ByteBuffer buffer = ByteBuffer.allocate(4); buffer.putInt(arpReply.getTargetAddress()); InetAddress addr = null; try { addr = InetAddress.getByAddress(buffer.array()); } catch (UnknownHostException e) { log.error("Exception:", e); } if (addr != null) { sendArpReplyToWaitingRequesters(addr, arpReply.getTargetMacAddress()); } } @Override public void entryUpdated(ArpReplyNotification arpReply) { // TODO: For now, entryUpdated() is processed as entryAdded() entryAdded(arpReply); } @Override public void entryRemoved(ArpReplyNotification arpReply) { // TODO: Not implemented. Revisit when this module is refactored } } private static class ArpRequest { private final IArpRequester requester; private final boolean retry; private boolean sent = false; private long requestTime; public ArpRequest(IArpRequester requester, boolean retry) { this.requester = requester; this.retry = retry; } public ArpRequest(ArpRequest old) { this.requester = old.requester; this.retry = old.retry; } public boolean isExpired() { return sent && ((System.currentTimeMillis() - requestTime) > ARP_REQUEST_TIMEOUT); } public boolean shouldRetry() { return retry; } public void dispatchReply(InetAddress ipAddress, MACAddress replyMacAddress) { requester.arpResponse(ipAddress, replyMacAddress); } public void setRequestTime() { this.requestTime = System.currentTimeMillis(); this.sent = true; } } private class HostArpRequester implements IArpRequester { private final ARP arpRequest; private final long dpid; private final short port; public HostArpRequester(ARP arpRequest, long dpid, short port) { this.arpRequest = arpRequest; this.dpid = dpid; this.port = port; } @Override public void arpResponse(InetAddress ipAddress, MACAddress macAddress) { ProxyArpManager.this.sendArpReply(arpRequest, dpid, port, macAddress); } public ARP getArpRequest() { return arpRequest; } } @Override public Collection<Class<? extends IFloodlightService>> getModuleServices() { Collection<Class<? extends IFloodlightService>> l = new ArrayList<Class<? extends IFloodlightService>>(); l.add(IProxyArpService.class); return l; } @Override public Map<Class<? extends IFloodlightService>, IFloodlightService> getServiceImpls() { Map<Class<? extends IFloodlightService>, IFloodlightService> m = new HashMap<Class<? extends IFloodlightService>, IFloodlightService>(); m.put(IProxyArpService.class, this); return m; } @Override public Collection<Class<? extends IFloodlightService>> getModuleDependencies() { Collection<Class<? extends IFloodlightService>> dependencies = new ArrayList<Class<? extends IFloodlightService>>(); dependencies.add(IFloodlightProviderService.class); dependencies.add(IRestApiService.class); dependencies.add(IDatagridService.class); dependencies.add(IConfigInfoService.class); dependencies.add(IFlowPusherService.class); dependencies.add(INetworkGraphService.class); dependencies.add(IOnosDeviceService.class); return dependencies; } @Override public void init(FloodlightModuleContext context) { this.floodlightProvider = context.getServiceImpl(IFloodlightProviderService.class); this.configService = context.getServiceImpl(IConfigInfoService.class); this.restApi = context.getServiceImpl(IRestApiService.class); this.datagrid = context.getServiceImpl(IDatagridService.class); this.flowPusher = context.getServiceImpl(IFlowPusherService.class); this.networkGraphService = context.getServiceImpl(INetworkGraphService.class); this.onosDeviceService = context.getServiceImpl(IOnosDeviceService.class); // arpCache = new ArpCache(); arpRequests = Multimaps.synchronizedSetMultimap(HashMultimap .<InetAddress, ArpRequest>create()); } @Override public void startUp(FloodlightModuleContext context) { this.vlan = configService.getVlan(); log.info("vlan set to {}", this.vlan); restApi.addRestletRoutable(new ArpWebRoutable()); floodlightProvider.addOFMessageListener(OFType.PACKET_IN, this); networkGraph = networkGraphService.getNetworkGraph(); // Event notification setup: channels and event handlers broadcastPacketOutEventChannel = datagrid.addListener(BROADCAST_PACKET_OUT_CHANNEL_NAME, broadcastPacketOutEventHandler, Long.class, BroadcastPacketOutNotification.class); singlePacketOutEventChannel = datagrid.addListener(SINGLE_PACKET_OUT_CHANNEL_NAME, singlePacketOutEventHandler, Long.class, SinglePacketOutNotification.class); arpReplyEventChannel = datagrid.addListener(ARP_REPLY_CHANNEL_NAME, arpReplyEventHandler, Long.class, ArpReplyNotification.class); Timer arpTimer = new Timer("arp-processing"); arpTimer.scheduleAtFixedRate(new TimerTask() { @Override public void run() { doPeriodicArpProcessing(); } }, 0, ARP_TIMER_PERIOD); } /* * Function that runs periodically to manage the asynchronous request mechanism. * It basically cleans up old ARP requests if we don't get a response for them. * The caller can designate that a request should be retried indefinitely, and * this task will handle that as well. */ private void doPeriodicArpProcessing() { SetMultimap<InetAddress, ArpRequest> retryList = HashMultimap .<InetAddress, ArpRequest>create(); // Have to synchronize externally on the Multimap while using an // iterator, // even though it's a synchronizedMultimap synchronized (arpRequests) { Iterator<Map.Entry<InetAddress, ArpRequest>> it = arpRequests .entries().iterator(); while (it.hasNext()) { Map.Entry<InetAddress, ArpRequest> entry = it.next(); ArpRequest request = entry.getValue(); if (request.isExpired()) { log.debug("Cleaning expired ARP request for {}", entry .getKey().getHostAddress()); // If the ARP request is expired and then delete the device // TODO check whether this is OK from this thread HostArpRequester requester = (HostArpRequester) request.requester; ARP req = requester.getArpRequest(); Device targetDev = networkGraph.getDeviceByMac(MACAddress.valueOf(req.getTargetHardwareAddress())); if (targetDev != null) { onosDeviceService.deleteOnosDeviceByMac(MACAddress.valueOf(req.getTargetHardwareAddress())); if (log.isDebugEnabled()) { log.debug("RemoveDevice: {} due to no have not recieve the ARP reply", targetDev.getMacAddress()); } } it.remove(); if (request.shouldRetry()) { retryList.put(entry.getKey(), request); } } } } for (Map.Entry<InetAddress, Collection<ArpRequest>> entry : retryList .asMap().entrySet()) { InetAddress address = entry.getKey(); log.debug("Resending ARP request for {}", address.getHostAddress()); // Only ARP requests sent by the controller will have the retry flag // set, so for now we can just send a new ARP request for that // address. sendArpRequestForAddress(address); for (ArpRequest request : entry.getValue()) { arpRequests.put(address, new ArpRequest(request)); } } } @Override public String getName() { return "proxyarpmanager"; } @Override public boolean isCallbackOrderingPrereq(OFType type, String name) { if (type == OFType.PACKET_IN) { return "devicemanager".equals(name) || "onosdevicemanager".equals(name); } else { return false; } } @Override public boolean isCallbackOrderingPostreq(OFType type, String name) { return type == OFType.PACKET_IN && "onosforwarding".equals(name); } @Override public Command receive(IOFSwitch sw, OFMessage msg, FloodlightContext cntx) { if (!(msg instanceof OFPacketIn)) { return Command.CONTINUE; } OFPacketIn pi = (OFPacketIn) msg; Ethernet eth = IFloodlightProviderService.bcStore.get(cntx, IFloodlightProviderService.CONTEXT_PI_PAYLOAD); if (eth.getEtherType() == Ethernet.TYPE_ARP) { ARP arp = (ARP) eth.getPayload(); if (arp.getOpCode() == ARP.OP_REQUEST) { handleArpRequest(sw, pi, arp, eth); } else if (arp.getOpCode() == ARP.OP_REPLY) { // For replies we simply send a notification via Hazelcast sendArpReplyNotification(eth, pi); // handleArpReply(sw, pi, arp); } // Stop ARP packets here return Command.STOP; } // Propagate everything else return Command.CONTINUE; } private void handleArpRequest(IOFSwitch sw, OFPacketIn pi, ARP arp, Ethernet eth) { if (log.isTraceEnabled()) { log.trace("ARP request received for {}", inetAddressToString(arp.getTargetProtocolAddress())); } InetAddress target; try { target = InetAddress.getByAddress(arp.getTargetProtocolAddress()); } catch (UnknownHostException e) { log.debug("Invalid address in ARP request", e); return; } if (configService.fromExternalNetwork(sw.getId(), pi.getInPort())) { // If the request came from outside our network, we only care if // it was a request for one of our interfaces. if (configService.isInterfaceAddress(target)) { log.trace( "ARP request for our interface. Sending reply {} => {}", target.getHostAddress(), configService.getRouterMacAddress()); sendArpReply(arp, sw.getId(), pi.getInPort(), configService.getRouterMacAddress()); } return; } // MACAddress macAddress = arpCache.lookup(target); arpRequests.put(target, new ArpRequest( new HostArpRequester(arp, sw.getId(), pi.getInPort()), false)); Device targetDevice = networkGraph.getDeviceByMac(MACAddress.valueOf(arp.getTargetHardwareAddress())); if (targetDevice == null) { if (log.isTraceEnabled()) { log.trace("No device info found for {} - broadcasting", target.getHostAddress()); } // We don't know the device so broadcast the request out BroadcastPacketOutNotification key = new BroadcastPacketOutNotification(eth.serialize(), ByteBuffer.wrap(arp.getTargetProtocolAddress()).getInt(), sw.getId(), pi.getInPort()); log.debug("broadcastPacketOutEventChannel mac {}, ip {}, dpid {}, port {}, paket {}", eth.getSourceMAC().toLong(), ByteBuffer.wrap(arp.getTargetProtocolAddress()).getInt(), sw.getId(), pi.getInPort(), eth.serialize().length); broadcastPacketOutEventChannel.addTransientEntry(eth.getDestinationMAC().toLong(), key); } else { // Even if the device exists in our database, we do not reply to // the request directly, but check whether the device is still valid MACAddress macAddress = MACAddress.valueOf(arp.getTargetHardwareAddress()); if (log.isTraceEnabled()) { log.trace("The target Device Record in DB is: {} => {} from ARP request host at {}/{}", new Object[]{ inetAddressToString(arp.getTargetProtocolAddress()), macAddress, HexString.toHexString(sw.getId()), pi.getInPort()}); } // sendArpReply(arp, sw.getId(), pi.getInPort(), macAddress); Iterable<net.onrc.onos.core.topology.Port> outPorts = targetDevice.getAttachmentPoints(); if (!outPorts.iterator().hasNext()) { if (log.isTraceEnabled()) { log.trace("Device {} exists but is not connected to any ports" + " - broadcasting", macAddress); } // BroadcastPacketOutNotification key = // new BroadcastPacketOutNotification(eth.serialize(), // target, sw.getId(), pi.getInPort()); // broadcastPacketOutEventChannel.addTransientEntry(eth.getDestinationMAC().toLong(), key); } else { for (net.onrc.onos.core.topology.Port portObject : outPorts) { //long outSwitch = 0; //short outPort = 0; if (portObject.getOutgoingLink() != null || portObject.getIncomingLink() != null) { continue; } short outPort = portObject.getNumber().shortValue(); Switch outSwitchObject = portObject.getSwitch(); long outSwitch = outSwitchObject.getDpid(); if (log.isTraceEnabled()) { log.trace("Probing device {} on port {}/{}", new Object[]{macAddress, HexString.toHexString(outSwitch), outPort}); } SinglePacketOutNotification key = new SinglePacketOutNotification(eth.serialize(), ByteBuffer.wrap(target.getAddress()).getInt(), outSwitch, outPort); singlePacketOutEventChannel.addTransientEntry(eth.getDestinationMAC().toLong(), key); } } } } // Not used because device manager currently updates the database // for ARP replies. May be useful in the future. private void handleArpReply(IOFSwitch sw, OFPacketIn pi, ARP arp) { if (log.isTraceEnabled()) { log.trace("ARP reply recieved: {} => {}, on {}/{}", new Object[]{ inetAddressToString(arp.getSenderProtocolAddress()), HexString.toHexString(arp.getSenderHardwareAddress()), HexString.toHexString(sw.getId()), pi.getInPort()}); } InetAddress senderIpAddress; try { senderIpAddress = InetAddress.getByAddress(arp .getSenderProtocolAddress()); } catch (UnknownHostException e) { log.debug("Invalid address in ARP reply", e); return; } MACAddress senderMacAddress = MACAddress.valueOf(arp .getSenderHardwareAddress()); // See if anyone's waiting for this ARP reply Set<ArpRequest> requests = arpRequests.get(senderIpAddress); // Synchronize on the Multimap while using an iterator for one of the // sets List<ArpRequest> requestsToSend = new ArrayList<ArpRequest>( requests.size()); synchronized (arpRequests) { Iterator<ArpRequest> it = requests.iterator(); while (it.hasNext()) { ArpRequest request = it.next(); it.remove(); requestsToSend.add(request); } } // Don't hold an ARP lock while dispatching requests for (ArpRequest request : requestsToSend) { request.dispatchReply(senderIpAddress, senderMacAddress); } } private void sendArpRequestForAddress(InetAddress ipAddress) { // TODO what should the sender IP address and MAC address be if no // IP addresses are configured? Will there ever be a need to send // ARP requests from the controller in that case? // All-zero MAC address doesn't seem to work - hosts don't respond to it byte[] zeroIpv4 = {0x0, 0x0, 0x0, 0x0}; byte[] zeroMac = {0x0, 0x0, 0x0, 0x0, 0x0, 0x0}; byte[] genericNonZeroMac = {0x0, 0x0, 0x0, 0x0, 0x0, 0x01}; byte[] broadcastMac = {(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff}; ARP arpRequest = new ARP(); arpRequest .setHardwareType(ARP.HW_TYPE_ETHERNET) .setProtocolType(ARP.PROTO_TYPE_IP) .setHardwareAddressLength( (byte) Ethernet.DATALAYER_ADDRESS_LENGTH) .setProtocolAddressLength((byte) IPv4.ADDRESS_LENGTH) .setOpCode(ARP.OP_REQUEST).setTargetHardwareAddress(zeroMac) .setTargetProtocolAddress(ipAddress.getAddress()); MACAddress routerMacAddress = configService.getRouterMacAddress(); // TODO hack for now as it's unclear what the MAC address should be byte[] senderMacAddress = genericNonZeroMac; if (routerMacAddress != null) { senderMacAddress = routerMacAddress.toBytes(); } arpRequest.setSenderHardwareAddress(senderMacAddress); byte[] senderIPAddress = zeroIpv4; Interface intf = configService.getOutgoingInterface(ipAddress); if (intf == null) { // TODO handle the case where the controller needs to send an ARP // request but there's not IP configuration. In this case the // request should be broadcast out all edge ports in the network. log.warn("Sending ARP requests with default configuration " + "not supported"); return; } senderIPAddress = intf.getIpAddress().getAddress(); arpRequest.setSenderProtocolAddress(senderIPAddress); Ethernet eth = new Ethernet(); eth.setSourceMACAddress(senderMacAddress) .setDestinationMACAddress(broadcastMac) .setEtherType(Ethernet.TYPE_ARP).setPayload(arpRequest); if (vlan != NO_VLAN) { eth.setVlanID(vlan).setPriorityCode((byte) 0); } // sendArpRequestToSwitches(ipAddress, eth.serialize()); SinglePacketOutNotification key = new SinglePacketOutNotification(eth.serialize(), ByteBuffer.wrap(ipAddress.getAddress()).getInt(), intf.getDpid(), intf.getPort()); singlePacketOutEventChannel.addTransientEntry(MACAddress.valueOf(senderMacAddress).toLong(), key); } /* private void sendArpRequestToSwitches(InetAddress dstAddress, byte[] arpRequest) { sendArpRequestToSwitches(dstAddress, arpRequest, 0, OFPort.OFPP_NONE.getValue()); } */ /* private void sendArpRequestToSwitches(InetAddress dstAddress, byte[] arpRequest, long inSwitch, short inPort) { if (configService.hasLayer3Configuration()) { Interface intf = configService.getOutgoingInterface(dstAddress); if (intf == null) { // TODO here it should be broadcast out all non-interface edge // ports. // I think we can assume that if it's not a request for an // external // network, it's an ARP for a host in our own network. So we // want to // send it out all edge ports that don't have an interface // configured // to ensure it reaches all hosts in our network. log.debug("No interface found to send ARP request for {}", dstAddress.getHostAddress()); } else { sendArpRequestOutPort(arpRequest, intf.getDpid(), intf.getPort()); } } else { // broadcastArpRequestOutEdge(arpRequest, inSwitch, inPort); broadcastArpRequestOutMyEdge(arpRequest, inSwitch, inPort); } } */ private void sendArpReplyNotification(Ethernet eth, OFPacketIn pi) { ARP arp = (ARP) eth.getPayload(); if (log.isTraceEnabled()) { log.trace("Sending ARP reply for {} to other ONOS instances", inetAddressToString(arp.getSenderProtocolAddress())); } InetAddress targetAddress; try { targetAddress = InetAddress.getByAddress(arp .getSenderProtocolAddress()); } catch (UnknownHostException e) { log.error("Unknown host", e); return; } MACAddress mac = new MACAddress(arp.getSenderHardwareAddress()); ArpReplyNotification key = new ArpReplyNotification(ByteBuffer.wrap(targetAddress.getAddress()).getInt(), mac); log.debug("ArpReplyNotification ip {}, mac{}", ByteBuffer.wrap(targetAddress.getAddress()).getInt(), mac); arpReplyEventChannel.addTransientEntry(mac.toLong(), key); } private void broadcastArpRequestOutMyEdge(byte[] arpRequest, long inSwitch, short inPort) { List<SwitchPort> switchPorts = new ArrayList<SwitchPort>(); for (IOFSwitch sw : floodlightProvider.getSwitches().values()) { OFPacketOut po = new OFPacketOut(); po.setInPort(OFPort.OFPP_NONE).setBufferId(-1) .setPacketData(arpRequest); List<OFAction> actions = new ArrayList<OFAction>(); Switch graphSw = networkGraph.getSwitch(sw.getId()); Collection<net.onrc.onos.core.topology.Port> ports = graphSw.getPorts(); if (ports == null) { continue; } for (net.onrc.onos.core.topology.Port portObject : ports) { if (portObject.getOutgoingLink() == null && portObject.getNumber() > 0) { Long portNumber = portObject.getNumber(); if (sw.getId() == inSwitch && portNumber.shortValue() == inPort) { // This is the port that the ARP message came in, // so don't broadcast out this port continue; } switchPorts.add(new SwitchPort(new Dpid(sw.getId()), new net.onrc.onos.core.util.Port(portNumber.shortValue()))); actions.add(new OFActionOutput(portNumber.shortValue())); } } po.setActions(actions); short actionsLength = (short) (actions.size() * OFActionOutput.MINIMUM_LENGTH); po.setActionsLength(actionsLength); po.setLengthU(OFPacketOut.MINIMUM_LENGTH + actionsLength + arpRequest.length); flowPusher.add(sw, po); } if (log.isTraceEnabled()) { log.trace("Broadcast ARP request to: {}", switchPorts); } } private void sendArpRequestOutPort(byte[] arpRequest, long dpid, short port) { if (log.isTraceEnabled()) { log.trace("Sending ARP request out {}/{}", HexString.toHexString(dpid), port); } OFPacketOut po = new OFPacketOut(); po.setInPort(OFPort.OFPP_NONE).setBufferId(-1) .setPacketData(arpRequest); List<OFAction> actions = new ArrayList<OFAction>(); actions.add(new OFActionOutput(port)); po.setActions(actions); short actionsLength = (short) (actions.size() * OFActionOutput.MINIMUM_LENGTH); po.setActionsLength(actionsLength); po.setLengthU(OFPacketOut.MINIMUM_LENGTH + actionsLength + arpRequest.length); IOFSwitch sw = floodlightProvider.getSwitches().get(dpid); if (sw == null) { log.warn("Switch not found when sending ARP request"); return; } flowPusher.add(sw, po); } private void sendArpReply(ARP arpRequest, long dpid, short port, MACAddress targetMac) { if (log.isTraceEnabled()) { log.trace( "Sending reply {} => {} to {}", new Object[]{ inetAddressToString(arpRequest .getTargetProtocolAddress()), targetMac, inetAddressToString(arpRequest .getSenderProtocolAddress())}); } ARP arpReply = new ARP(); arpReply.setHardwareType(ARP.HW_TYPE_ETHERNET) .setProtocolType(ARP.PROTO_TYPE_IP) .setHardwareAddressLength( (byte) Ethernet.DATALAYER_ADDRESS_LENGTH) .setProtocolAddressLength((byte) IPv4.ADDRESS_LENGTH) .setOpCode(ARP.OP_REPLY) .setSenderHardwareAddress(targetMac.toBytes()) .setSenderProtocolAddress(arpRequest.getTargetProtocolAddress()) .setTargetHardwareAddress(arpRequest.getSenderHardwareAddress()) .setTargetProtocolAddress(arpRequest.getSenderProtocolAddress()); Ethernet eth = new Ethernet(); eth.setDestinationMACAddress(arpRequest.getSenderHardwareAddress()) .setSourceMACAddress(targetMac.toBytes()) .setEtherType(Ethernet.TYPE_ARP).setPayload(arpReply); if (vlan != NO_VLAN) { eth.setVlanID(vlan).setPriorityCode((byte) 0); } List<OFAction> actions = new ArrayList<OFAction>(); actions.add(new OFActionOutput(port)); OFPacketOut po = new OFPacketOut(); po.setInPort(OFPort.OFPP_NONE) .setBufferId(-1) .setPacketData(eth.serialize()) .setActions(actions) .setActionsLength((short) OFActionOutput.MINIMUM_LENGTH) .setLengthU( OFPacketOut.MINIMUM_LENGTH + OFActionOutput.MINIMUM_LENGTH + po.getPacketData().length); List<OFMessage> msgList = new ArrayList<OFMessage>(); msgList.add(po); IOFSwitch sw = floodlightProvider.getSwitches().get(dpid); if (sw == null) { log.warn("Switch {} not found when sending ARP reply", HexString.toHexString(dpid)); return; } flowPusher.add(sw, po); } private String inetAddressToString(byte[] bytes) { try { return InetAddress.getByAddress(bytes).getHostAddress(); } catch (UnknownHostException e) { log.debug("Invalid IP address", e); return ""; } } /* * IProxyArpService methods */ @Override public MACAddress getMacAddress(InetAddress ipAddress) { // return arpCache.lookup(ipAddress); return null; } @Override public void sendArpRequest(InetAddress ipAddress, IArpRequester requester, boolean retry) { arpRequests.put(ipAddress, new ArpRequest(requester, retry)); // Sanity check to make sure we don't send a request for our own address if (!configService.isInterfaceAddress(ipAddress)) { sendArpRequestForAddress(ipAddress); } } @Override public List<String> getMappings() { return new ArrayList<String>(); } private void sendArpReplyToWaitingRequesters(InetAddress address, MACAddress mac) { log.debug("Sending ARP reply for {} to requesters", address.getHostAddress()); // See if anyone's waiting for this ARP reply Set<ArpRequest> requests = arpRequests.get(address); // Synchronize on the Multimap while using an iterator for one of the // sets List<ArpRequest> requestsToSend = new ArrayList<ArpRequest>( requests.size()); synchronized (arpRequests) { Iterator<ArpRequest> it = requests.iterator(); while (it.hasNext()) { ArpRequest request = it.next(); it.remove(); requestsToSend.add(request); } } //TODO here, comment outed from long time ago. I will check if we need it later. /*IDeviceObject deviceObject = deviceStorage.getDeviceByIP( InetAddresses.coerceToInteger(address)); MACAddress mac = MACAddress.valueOf(deviceObject.getMACAddress()); log.debug("Found {} at {} in network map", address.getHostAddress(), mac);*/ // Don't hold an ARP lock while dispatching requests for (ArpRequest request : requestsToSend) { request.dispatchReply(address, mac); } } }
package net.kaczmarzyk.kata; import java.util.Arrays; import java.util.List; public class RomanNumeralConverter { private List<RomanSymbol> symbols = Arrays.asList( new RomanSymbol(1000, "M"), new RomanSymbol(900, "CM"), new RomanSymbol(500, "D"), new RomanSymbol(400, "CD"), new RomanSymbol(100, "C"), new RomanSymbol(90, "XC"), new RomanSymbol(50, "L"), new RomanSymbol(40, "XL"), new RomanSymbol(10, "X"), new RomanSymbol(9, "IX"), new RomanSymbol(5, "V"), new RomanSymbol(4, "IV"), new RomanSymbol(1, "I") ); public String convert(int val) { StringBuilder result = new StringBuilder(); for (RomanSymbol symbol : symbols) { RomanSymbol maxRepetition = symbol.repeatUntilGreaterOrEqualTo(val); result.append(maxRepetition.symbol); val -= maxRepetition.value; } return result.toString(); } }
package net.malisis.core.renderer; import java.lang.reflect.Field; import javax.vecmath.Matrix4f; import net.malisis.core.MalisisCore; import net.malisis.core.MalisisRegistry; import net.malisis.core.asm.AsmUtils; import net.malisis.core.renderer.element.Face; import net.malisis.core.renderer.element.Shape; import net.malisis.core.renderer.element.Vertex; import net.malisis.core.renderer.element.shape.Cube; import net.malisis.core.renderer.font.FontRenderOptions; import net.malisis.core.renderer.font.MalisisFont; import net.malisis.core.renderer.icon.IIconProvider; import net.malisis.core.renderer.icon.IMetaIconProvider; import net.malisis.core.renderer.icon.MalisisIcon; import net.malisis.core.renderer.icon.provider.IBlockIconProvider; import net.malisis.core.renderer.icon.provider.IItemIconProvider; import net.malisis.core.renderer.model.MalisisModel; import net.malisis.core.util.BlockPosUtils; import net.minecraft.block.Block; import net.minecraft.block.state.IBlockState; import net.minecraft.client.Minecraft; import net.minecraft.client.entity.EntityPlayerSP; import net.minecraft.client.renderer.DestroyBlockProgress; import net.minecraft.client.renderer.GlStateManager; import net.minecraft.client.renderer.RenderGlobal; import net.minecraft.client.renderer.Tessellator; import net.minecraft.client.renderer.WorldRenderer; import net.minecraft.client.renderer.block.model.ItemCameraTransforms.TransformType; import net.minecraft.client.renderer.texture.TextureMap; import net.minecraft.client.renderer.tileentity.TileEntitySpecialRenderer; import net.minecraft.client.renderer.vertex.DefaultVertexFormats; import net.minecraft.client.renderer.vertex.VertexFormat; import net.minecraft.client.renderer.vertex.VertexFormatElement; import net.minecraft.item.Item; import net.minecraft.item.ItemBlock; import net.minecraft.item.ItemStack; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.AxisAlignedBB; import net.minecraft.util.BlockPos; import net.minecraft.util.EnumFacing; import net.minecraft.util.EnumWorldBlockLayer; import net.minecraft.util.ResourceLocation; import net.minecraft.util.Timer; import net.minecraft.world.IBlockAccess; import net.minecraftforge.client.MinecraftForgeClient; import net.minecraftforge.client.event.RenderWorldLastEvent; import net.minecraftforge.fml.client.registry.ClientRegistry; import org.lwjgl.opengl.GL11; /** * Base class for rendering. Handles the rendering. Provides easy registration of the renderer, and automatically sets up the context for * the rendering. * * @author Ordinastie * */ @SuppressWarnings("deprecation") public class MalisisRenderer extends TileEntitySpecialRenderer implements IBlockRenderer, IRenderWorldLast { /** Reference to Tessellator.isDrawing field **/ private static Field isDrawingField; public static VertexFormat vertexFormat = new VertexFormat() { { setElement(new VertexFormatElement(0, VertexFormatElement.EnumType.FLOAT, VertexFormatElement.EnumUsage.POSITION, 3)); setElement(new VertexFormatElement(0, VertexFormatElement.EnumType.UBYTE, VertexFormatElement.EnumUsage.COLOR, 4)); setElement(new VertexFormatElement(0, VertexFormatElement.EnumType.FLOAT, VertexFormatElement.EnumUsage.UV, 2)); setElement(new VertexFormatElement(1, VertexFormatElement.EnumType.SHORT, VertexFormatElement.EnumUsage.UV, 2)); setElement(new VertexFormatElement(0, VertexFormatElement.EnumType.BYTE, VertexFormatElement.EnumUsage.NORMAL, 3)); setElement(new VertexFormatElement(0, VertexFormatElement.EnumType.BYTE, VertexFormatElement.EnumUsage.PADDING, 1)); } }; /** Whether this {@link MalisisRenderer} initialized. (initialize() already called) */ private boolean initialized = false; /** Tessellator reference. */ protected WorldRenderer wr = Tessellator.getInstance().getWorldRenderer(); /** Current world reference (BLOCK/TESR/IRWL). */ protected IBlockAccess world; /** Position of the block (BLOCK/TESR). */ protected BlockPos pos; /** Block to render (BLOCK/TESR). */ protected Block block; /** Metadata of the block to render (BLOCK/TESR). */ protected IBlockState blockState; /** TileEntity currently drawing (TESR). */ protected TileEntity tileEntity; /** Partial tick time (TESR/IRWL). */ protected float partialTick = 0; /** ItemStack to render (ITEM). */ protected ItemStack itemStack; /** Item to render (ITEM) */ protected Item item; /** Type of render for item (ITEM) **/ protected TransformType tranformType; /** RenderGlobal reference (IRWL) */ protected RenderGlobal renderGlobal; /** Type of rendering. */ protected RenderType renderType; /** Mode of rendering (GL constant). */ protected int drawMode; /** Base brightness of the block. */ protected int baseBrightness; /** An override texture set by the renderer. */ protected MalisisIcon overrideTexture; /** Whether the damage for the blocks should be handled by this {@link MalisisRenderer} (for TESR). */ protected boolean getBlockDamage = false; /** Current block destroy progression (for TESR). */ protected DestroyBlockProgress destroyBlockProgress = null; /** Whether at least one vertex has been drawn. */ protected boolean vertexDrawn = false; /** * Instantiates a new {@link MalisisRenderer}. */ public MalisisRenderer() { //this.renderId = RenderingRegistry.getNextAvailableRenderId(); } // #region set() /** * Resets data so this {@link MalisisRenderer} can be reused. */ public void reset() { this.wr = null; this.renderType = RenderType.UNSET; this.drawMode = 0; this.world = null; this.pos = null; this.block = null; this.blockState = null; this.item = null; this.itemStack = null; this.overrideTexture = null; this.destroyBlockProgress = null; } /** * Sets informations for this {@link MalisisRenderer}. * * @param world the world * @param block the block * @param x the x * @param y the y * @param z the z * @param metadata the metadata */ public void set(IBlockAccess world, Block block, BlockPos pos, IBlockState blockState) { this.world = world; this.pos = pos; this.block = block; this.blockState = blockState; } /** * Sets informations for this {@link MalisisRenderer}. * * @param world the world */ public void set(IBlockAccess world) { this.world = world; } /** * Sets informations for this {@link MalisisRenderer}. * * @param block the block */ public void set(Block block) { this.block = block; this.blockState = block.getDefaultState(); } /** * Sets informations for this {@link MalisisRenderer}. * * @param blockMetadata the block metadata */ public void set(IBlockState blockState) { this.block = blockState.getBlock(); this.blockState = blockState; } /** * Sets informations for this {@link MalisisRenderer}. * * @param x the x * @param y the y * @param z the z */ public void set(BlockPos pos) { this.pos = pos; } /** * Sets informations for this {@link MalisisRenderer}. * * @param te the te * @param partialTick the partial tick */ public void set(TileEntity te, float partialTick) { set(te.getWorld(), te.getBlockType(), te.getPos(), te.getWorld().getBlockState(te.getPos())); this.partialTick = partialTick; this.tileEntity = te; } /** * Sets informations for this {@link MalisisRenderer}. * * @param type the type * @param itemStack the item stack */ public void set(ItemStack itemStack) { this.itemStack = itemStack; this.item = itemStack.getItem(); if (item instanceof ItemBlock) set(Block.getBlockFromItem(itemStack.getItem())); } // #end //#region IBlockRenderer @Override public synchronized boolean renderBlock(WorldRenderer wr, IBlockAccess world, BlockPos pos, IBlockState state) { this.wr = wr; set(world, state.getBlock(), pos, state); prepare(RenderType.BLOCK); render(); clean(); return vertexDrawn; } //#end IBlockRenderer //#region IItemRenderer @Override public synchronized boolean renderItem(ItemStack itemStack, float partialTick) { this.wr = Tessellator.getInstance().getWorldRenderer(); set(itemStack); prepare(RenderType.ITEM); render(); clean(); return true; } @Override public boolean isGui3d() { return true; } @Override public Matrix4f getTransform(TransformType tranformType) { this.tranformType = tranformType; return null; } //#end IItemRenderer // #region TESR /** * Renders a {@link TileEntitySpecialRenderer}. * * @param te the TileEntity * @param x the x * @param y the y * @param z the z * @param partialTick the partial tick */ @Override public synchronized void renderTileEntityAt(TileEntity te, double x, double y, double z, float partialTick, int destroyStage) { if (te.getBlockType() != te.getWorld().getBlockState(te.getPos()).getBlock()) return; this.wr = Tessellator.getInstance().getWorldRenderer(); set(te, partialTick); prepare(RenderType.TILE_ENTITY, x, y, z); render(); //TODO // if (getBlockDamage) // destroyBlockProgress = getBlockDestroyProgress(); // if (destroyBlockProgress != null) // next(); // GL11.glEnable(GL11.GL_BLEND); // OpenGlHelper.glBlendFunc(GL11.GL_DST_COLOR, GL11.GL_SRC_COLOR, GL11.GL_ONE, GL11.GL_ZERO); // GL11.glAlphaFunc(GL11.GL_GREATER, 0); // GL11.glColor4f(1.0F, 1.0F, 1.0F, 0.5F); // t.disableColor(); // renderDestroyProgress(); // next(); // GL11.glDisable(GL11.GL_BLEND); clean(); } // #end TESR // #region IRenderWorldLast @Override public boolean shouldSetViewportPosition() { return true; } @Override public boolean shouldRender(RenderWorldLastEvent event, IBlockAccess world) { return true; } @Override public void renderWorldLastEvent(RenderWorldLastEvent event, IBlockAccess world) { set(world); wr = Tessellator.getInstance().getWorldRenderer(); partialTick = event.partialTicks; renderGlobal = event.context; double x = 0, y = 0, z = 0; if (shouldSetViewportPosition()) { EntityPlayerSP p = Minecraft.getMinecraft().thePlayer; x = -(p.lastTickPosX + (p.posX - p.lastTickPosX) * partialTick); y = -(p.lastTickPosY + (p.posY - p.lastTickPosY) * partialTick); z = -(p.lastTickPosZ + (p.posZ - p.lastTickPosZ) * partialTick); } prepare(RenderType.WORLD_LAST, x, y, z); render(); clean(); } // #end IRenderWorldLast // #region prepare() /** * Prepares the {@link Tessellator} and the GL states for the <b>renderType</b>. <b>data</b> is only used for TESR and IRWL.<br> * TESR and IRWL rendering are surrounded by glPushAttrib(GL_LIGHTING_BIT) and block texture sheet is bound. * * @param renderType the render type * @param data the data */ public void prepare(RenderType renderType, double... data) { _initialize(); this.renderType = renderType; if (renderType == RenderType.BLOCK) { wr.setVertexFormat(DefaultVertexFormats.BLOCK); } else if (renderType == RenderType.ITEM) { startDrawing(); } else if (renderType == RenderType.TILE_ENTITY) { GlStateManager.pushAttrib(); GlStateManager.pushMatrix(); GlStateManager.disableLighting(); GlStateManager.translate(data[0], data[1], data[2]); bindTexture(TextureMap.locationBlocksTexture); startDrawing(); } else if (renderType == RenderType.WORLD_LAST) { GlStateManager.pushAttrib(); GlStateManager.pushMatrix(); GlStateManager.translate(data[0], data[1], data[2]); bindTexture(TextureMap.locationBlocksTexture); startDrawing(); } } /** * Cleans the current renderer state. */ public void clean() { if (renderType == RenderType.ITEM) { draw(); // GlStateManager.enableLighting(); // GlStateManager.popMatrix(); // GlStateManager.popAttrib(); } else if (renderType == RenderType.TILE_ENTITY) { draw(); disableBlending(); GlStateManager.enableLighting(); GlStateManager.popMatrix(); GlStateManager.popAttrib(); } else if (renderType == RenderType.WORLD_LAST) { draw(); GlStateManager.popMatrix(); GlStateManager.popAttrib(); } reset(); } /** * Tells the {@link Tessellator} to start drawing GL_QUADS. */ public void startDrawing() { startDrawing(GL11.GL_QUADS); } /** * Tells the {@link Tessellator} to start drawing <b>drawMode</b>. * * @param drawMode the draw mode */ public void startDrawing(int drawMode) { if (isDrawing()) draw(); wr.startDrawing(drawMode); wr.setVertexFormat(vertexFormat); this.drawMode = drawMode; } /** * Checks if the {@link Tessellator} is currently drawing. * * @return true, if is drawing */ public boolean isDrawing() { if (isDrawingField == null) isDrawingField = AsmUtils.changeFieldAccess(WorldRenderer.class, "isDrawing", "field_179010_r"); try { if (wr == null) throw new NullPointerException("[MalisisRenderer] WorldRenderer not set for " + renderType); return isDrawingField.getBoolean(wr); } catch (IllegalArgumentException | IllegalAccessException e) { MalisisCore.log.error("[MalisisRenderer] Failed to get Tessellator.isDrawing value", e); return false; } } /** * Triggers a draw and restart drawing with current {@link MalisisRenderer#drawMode}. */ public void next() { next(drawMode); } /** * Triggers a draw and restart drawing with <b>drawMode</b>. * * @param drawMode the draw mode */ public void next(int drawMode) { draw(); startDrawing(drawMode); } /** * Triggers a draw. */ public void draw() { if (isDrawing()) Tessellator.getInstance().draw(); } /** * Enables the blending for the rendering. Ineffective for BLOCK renderType. */ public void enableBlending() { if (renderType == RenderType.BLOCK) return; GlStateManager.enableBlend(); GlStateManager.tryBlendFuncSeparate(GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA, GL11.GL_ONE, GL11.GL_ZERO); GlStateManager.alphaFunc(GL11.GL_GREATER, 0.0F); GlStateManager.shadeModel(GL11.GL_SMOOTH); GlStateManager.enableColorMaterial(); } /** * Disables blending for the rendering. Ineffective for BLOCK renderType. */ public void disableBlending() { if (renderType == RenderType.BLOCK) return; GlStateManager.disableColorMaterial(); GlStateManager.shadeModel(GL11.GL_FLAT); GlStateManager.disableBlend(); } /** * Enables textures */ public void enableTextures() { GL11.glEnable(GL11.GL_TEXTURE_2D); } /** * Disables textures. */ public void disableTextures() { GL11.glDisable(GL11.GL_TEXTURE_2D); } @Override protected void bindTexture(ResourceLocation resourceLocaltion) { Minecraft.getMinecraft().getTextureManager().bindTexture(resourceLocaltion); } /** * Sets billboard mode.<br> * Contents drawn will always be facing the player. * * @param x the x * @param y the y * @param z the z */ public void setBillboard(float x, float y, float z) { EntityPlayerSP player = Minecraft.getMinecraft().thePlayer; GlStateManager.pushMatrix(); GlStateManager.translate(x, y, z); GlStateManager.rotate(180 - player.rotationYaw, 0, 1, 0); } /** * End billboard mode. */ public void endBillboard() { GlStateManager.popMatrix(); } // #end prepare() /** * _initialize. */ protected final void _initialize() { if (initialized) return; initialize(); initialized = true; } /** * Initializes this {@link MalisisRenderer}. Does nothing by default.<br> * Called the first time a rendering is done and should be overridden if some setup is needed for the rendering (building shape and * parameters). */ protected void initialize() {} /** * Renders the blocks using the default Minecraft rendering system. * * @param renderer the renderer */ public void renderStandard() { Minecraft.getMinecraft().getBlockRendererDispatcher().renderBlock(blockState, pos, world, wr); } /** * Main rendering method. Draws simple cube by default.<br> * Should be overridden to handle the rendering. */ public void render() { drawShape(new Cube()); } protected void drawModel(MalisisModel model, RenderParameters params) { for (Shape s : model) drawShape(s, params); } /** * Draws a {@link Shape} without {@link RenderParameters} (default will be used). * * @param shape the shape */ public void drawShape(Shape shape) { drawShape(shape, null); } /** * Draws a {@link Shape} with specified {@link RenderParameters}. * * @param s the s * @param params the params */ public void drawShape(Shape s, RenderParameters params) { if (s == null) return; s.applyMatrix(); for (Face f : s.getFaces()) drawFace(f, params); } /** * Draws a {@link Face} with its own {@link RenderParameters}. * * @param face the face */ protected void drawFace(Face face) { drawFace(face, null); } /** * Draws a {@link Face} with specified {@link RenderParameters}. * * @param face the f * @param params the face params */ protected void drawFace(Face face, RenderParameters params) { if (face == null) return; int vertexCount = face.getVertexes().length; if (vertexCount != 4 && renderType == RenderType.BLOCK) { MalisisCore.log.error("[MalisisRenderer] Attempting to render a face containing {} vertexes in BLOCK for {}. Ignored", vertexCount, block); return; } params = RenderParameters.merge(params, face.getParameters()); if (!shouldRenderFace(face, params)) return; if (params.applyTexture.get()) applyTexture(face, params); //use normals if available if ((renderType == RenderType.ITEM || params.useNormals.get()) && params.direction.get() != null) wr.setNormal(params.direction.get().getFrontOffsetX(), params.direction.get().getFrontOffsetY(), params.direction.get() .getFrontOffsetZ()); baseBrightness = getBaseBrightness(params); for (int i = 0; i < face.getVertexes().length; i++) drawVertex(face.getVertexes()[i], i, params); //we need to separate each face if (drawMode == GL11.GL_POLYGON || drawMode == GL11.GL_LINE || drawMode == GL11.GL_LINE_STRIP || drawMode == GL11.GL_LINE_LOOP) next(); } /** * Draws a single {@link Vertex}. * * @param vertex the vertex * @param number the offset inside the face. (Used for AO) */ protected void drawVertex(Vertex vertex, int number, RenderParameters params) { if (vertex == null) vertex = new Vertex(0, 0, 0); // brightness int brightness = calcVertexBrightness(vertex, number, params); vertex.setBrightness(brightness); // color int color = calcVertexColor(vertex, number, params); vertex.setColor(color); // alpha if (params != null && !params.usePerVertexAlpha.get()) vertex.setAlpha(params.alpha.get()); if (renderType == RenderType.ITEM) vertex.setNormal(params.direction.get()); wr.addVertexData(getVertexData(vertex)); vertexDrawn = true; } /** * Gets the vertex data. * * @param vertex the vertex * @return the vertex data */ private int[] getVertexData(Vertex vertex) { float x = (float) vertex.getX(); float y = (float) vertex.getY(); float z = (float) vertex.getZ(); int size = vertexFormat.getNextOffset(); if (renderType == RenderType.BLOCK) { size = DefaultVertexFormats.BLOCK.getNextOffset(); //when drawing a block, the position to draw is relative to current chunk BlockPos chunkPos = BlockPosUtils.chunkPosition(pos); x += chunkPos.getX(); y += chunkPos.getY(); z += chunkPos.getZ(); } int[] data = new int[size / 4]; data[0] = Float.floatToRawIntBits(x); data[1] = Float.floatToRawIntBits(y); data[2] = Float.floatToRawIntBits(z); data[3] = vertex.getRGBA(); data[4] = Float.floatToRawIntBits((float) vertex.getU()); data[5] = Float.floatToRawIntBits((float) vertex.getV()); data[6] = vertex.getBrightness(); if (renderType != RenderType.BLOCK) data[7] = vertex.getNormal(); return data; } /** * Draws a string at the specified coordinates, with color and shadow. The string gets translated. Uses FontRenderer.drawString(). * * @param font the font * @param text the text * @param x the x * @param y the y * @param z the z * @param fro the fro */ public void drawText(MalisisFont font, String text, float x, float y, float z, FontRenderOptions fro) { if (font == null) font = MalisisFont.minecraftFont; if (fro == null) fro = new FontRenderOptions(); font.render(this, text, x, y, z, fro); } /** * Checks if a {@link Face} should be rendered. {@link RenderParameters#direction} needs to be defined for the <b>face</b>. * * @param face the face * @return true, if successful */ protected boolean shouldRenderFace(Face face, RenderParameters params) { if (renderType != RenderType.BLOCK || world == null || block == null) return true; if (params != null && params.renderAllFaces.get()) return true; RenderParameters p = face.getParameters(); if (p.direction.get() == null || p.renderAllFaces.get()) return true; boolean b = block.shouldSideBeRendered(world, pos.offset(p.direction.get()), p.direction.get()); return b; } /** * Applies the texture to the {@link Shape}.<br> * Usually necessary before some shape transformations in conjunction with {@link RenderParameters#applyTexture} set to * <code>false</code> to prevent reapplying texture when rendering. * * @param shape the shape */ public void applyTexture(Shape shape) { applyTexture(shape, null); } /** * Applies the texture to the {@link Shape} with specified {@link RenderParameters}.<br> * Usually necessary before some shape transformations in conjunction with {@link RenderParameters#applyTexture} set to * <code>false</code> to prevent reapplying texture when rendering. * * @param shape the shape * @param params the parameters */ public void applyTexture(Shape shape, RenderParameters params) { //shape.applyMatrix(); for (Face f : shape.getFaces()) { RenderParameters rp = RenderParameters.merge(params, f.getParameters()); applyTexture(f, rp); } } /** * Applies the texture to the {@link Face} with specified {@link RenderParameters}.<br> * * @param shape the shape * @param params the parameters */ public void applyTexture(Face face, RenderParameters params) { MalisisIcon icon = getIcon(face, params); boolean flipU = params.flipU.get(); if (params.direction.get() == EnumFacing.NORTH || params.direction.get() == EnumFacing.EAST) flipU = !flipU; face.setTexture(icon, flipU, params.flipV.get(), params.interpolateUV.get()); } /** * Gets the {@link MalisisIcon} corresponding to the specified {@link RenderParameters}.<br> * If {@link #block} or {@link #item} is an {@link IIconProvider} and give the right provider for the current context, gets the icon * from that provider. * * @param face the face * @param params the params * @return the icon */ protected MalisisIcon getIcon(Face face, RenderParameters params) { if (params.icon.get() != null) return params.icon.get(); IIconProvider iconProvider = getIconProvider(params); if (iconProvider instanceof IItemIconProvider && itemStack != null) return ((IItemIconProvider) iconProvider).getIcon(itemStack); if (iconProvider instanceof IBlockIconProvider && block != null) { IBlockIconProvider iblockp = (IBlockIconProvider) iconProvider; if (renderType == RenderType.BLOCK || renderType == RenderType.TILE_ENTITY) return iblockp.getIcon(world, pos, blockState, params.textureSide.get()); else if (renderType == RenderType.ITEM) return iblockp.getIcon(itemStack, params.textureSide.get()); } return iconProvider != null ? iconProvider.getIcon() : null; } /** * Gets the {@link IIconProvider} either from parameters, the block or the item. * * @return the icon provider */ protected IIconProvider getIconProvider(RenderParameters params) { if (params.iconProvider.get() != null) return params.iconProvider.get(); if (item instanceof IMetaIconProvider && ((IMetaIconProvider) item).getIconProvider() != null) return ((IMetaIconProvider) item).getIconProvider(); if (block instanceof IMetaIconProvider && ((IMetaIconProvider) block).getIconProvider() != null) return ((IMetaIconProvider) block).getIconProvider(); return null; } /** * Calculates the ambient occlusion for a {@link Vertex} and also applies the side dependent shade.<br> * <b>aoMatrix</b> is the list of block coordinates necessary to compute AO. If it's empty, only the global face shade is applied.<br> * Also, <i>params.colorMultiplier</i> is applied as well. * * @param vertex the vertex * @param aoMatrix the ao matrix * @return the int */ protected int calcVertexColor(Vertex vertex, int number, RenderParameters params) { int color = 0xFFFFFF; if (params == null) return color; if (params.usePerVertexColor.get()) //vertex should use their own colors color = vertex.getColor(); if (params.colorMultiplier.get() != null) //global color multiplier is set color = params.colorMultiplier.get(); else if (block != null) //use block color multiplier color = world != null ? block.colorMultiplier(world, pos, 0) : block.getRenderColor(blockState); if (drawMode == GL11.GL_LINE) //no AO for lines return color; if (renderType != RenderType.BLOCK && renderType != RenderType.TILE_ENTITY) //no AO for item/inventories return color; int[][] aoMatrix = (int[][]) params.aoMatrix.get(number); float factor = 1; //calculate AO if (params.calculateAOColor.get() && aoMatrix != null && Minecraft.isAmbientOcclusionEnabled() && block.getLightValue(world, pos) == 0 && params.direction.get() != null) { factor = getBlockAmbientOcclusion(world, pos.offset(params.direction.get())); for (int i = 0; i < aoMatrix.length; i++) factor += getBlockAmbientOcclusion(world, pos.add(aoMatrix[i][0], aoMatrix[i][1], aoMatrix[i][2])); factor /= (aoMatrix.length + 1); } //apply face dependent shading factor *= params.colorFactor.get(); int r = (int) ((color >> 16 & 255) * factor); int g = (int) ((color >> 8 & 255) * factor); int b = (int) ((color & 255) * factor); color = r << 16 | g << 8 | b; return color; } /** * Gets the base brightness for the current {@link Face}.<br> * If <i>params.useBlockBrightness</i> = false, <i>params.brightness</i>. Else, the brightness is determined based on * <i>params.offset</i> and <i>getBlockBounds()</i> * * @return the base brightness */ protected int getBaseBrightness(RenderParameters params) { if (!params.useEnvironmentBrightness.get()) return params.brightness.get(); if (block != null) { if (world != null && block.getLightValue(world, pos) != 0) return block.getLightValue(world, pos) << 4; else if (block.getLightValue() != 0) return block.getLightValue() << 4; } if (renderType == RenderType.ITEM) return Minecraft.getMinecraft().thePlayer.getBrightnessForRender(getPartialTick()); //not in world if (world == null || block == null) return params.brightness.get(); //no direction, we can only use current block brightness if (params.direction.get() == null && block != null) return block.getMixedBrightnessForBlock(world, pos); AxisAlignedBB bounds = getRenderBounds(params); EnumFacing dir = params.direction.get(); BlockPos p = pos; if (dir != null) p = p.offset(dir); //use the brightness of the block next to it if (bounds != null) { if (dir == EnumFacing.WEST && bounds.minX > 0) p = p.east(); else if (dir == EnumFacing.EAST && bounds.maxX < 1) p = p.west(); else if (dir == EnumFacing.NORTH && bounds.minZ > 0) p = p.south(); else if (dir == EnumFacing.SOUTH && bounds.maxZ < 1) p = p.north(); else if (dir == EnumFacing.DOWN && bounds.minY > 0) p = p.up(); else if (dir == EnumFacing.UP && bounds.maxY < 1) p = p.down(); } return getMixedBrightnessForBlock(world, p); } /** * Calculates the ambient occlusion brightness for a {@link Vertex}. <b>aoMatrix</b> is the list of block coordinates necessary to * compute AO. Only first 3 blocks are used.<br> * * @param vertex the vertex * @param aoMatrix the ao matrix * @return the int */ protected int calcVertexBrightness(Vertex vertex, int number, RenderParameters params) { if (params == null) return baseBrightness; if (params.usePerVertexBrightness.get()) return vertex.getBrightness(); if (drawMode == GL11.GL_LINE) //no AO for lines return baseBrightness; if (renderType != RenderType.BLOCK && renderType != RenderType.TILE_ENTITY) //not in world return baseBrightness; int[][] aoMatrix = (int[][]) params.aoMatrix.get(number); if (!params.calculateBrightness.get() || aoMatrix == null) //no data return baseBrightness; if (!Minecraft.isAmbientOcclusionEnabled() || block.getLightValue(world, pos) != 0) // emit light return baseBrightness; int[] b = new int[Math.max(3, aoMatrix.length)]; for (int i = 0; i < b.length; i++) b[i] += getMixedBrightnessForBlock(world, pos.add(aoMatrix[i][0], aoMatrix[i][1], aoMatrix[i][2])); int brightness = getAoBrightness(b[0], b[1], b[2], baseBrightness); return brightness; } /** * Does the actual brightness calculation (copied from net.minecraft.client.renderer.BlocksRenderer.java) * * @param b1 the b1 * @param b2 the b2 * @param b3 the b3 * @param base the base * @return the ao brightness */ protected int getAoBrightness(int b1, int b2, int b3, int base) { if (b1 == 0) b1 = base; if (b2 == 0) b2 = base; if (b3 == 0) b3 = base; return b1 + b2 + b3 + base >> 2 & 16711935; } /** * Gets the block ambient occlusion value. Contrary to base Minecraft code, it's the actual block at the <b>x</b>, <b>y</b> and <b>z</b> * coordinates which is used to get the value, and not value of the block drawn. This allows to have different logic behaviors for AO * values for a block. * * @param world the world * @param x the x * @param y the y * @param z the z * @return the block ambient occlusion */ protected float getBlockAmbientOcclusion(IBlockAccess world, BlockPos pos) { Block block = world.getBlockState(pos).getBlock(); if (block == null) return 1.0F; return block.getAmbientOcclusionLightValue(); } /** * Gets the mix brightness for a block (sky + block source). * * @param world the world * @param x the x * @param y the y * @param z the z * @return the mixed brightness for block */ protected int getMixedBrightnessForBlock(IBlockAccess world, BlockPos pos) { // return world.getLightBrightnessForSkyBlocks(x, y, z, 0); return world.getBlockState(pos).getBlock().getMixedBrightnessForBlock(world, pos); } /** * Gets the rendering bounds. If <i>params.useBlockBounds</i> = false, <i>params.renderBounds</i> is used instead of the actual block * bounds. * * @return the render bounds */ protected AxisAlignedBB getRenderBounds(RenderParameters params) { if (block == null || !params.useBlockBounds.get()) return params.renderBounds.get(); if (world != null) block.setBlockBoundsBasedOnState(world, pos); return new AxisAlignedBB(block.getBlockBoundsMinX(), block.getBlockBoundsMinY(), block.getBlockBoundsMinZ(), block.getBlockBoundsMaxX(), block.getBlockBoundsMaxY(), block.getBlockBoundsMaxZ()); } private static Timer timer = null; public static float getPartialTick() { if (timer == null) { Field f = AsmUtils.changeFieldAccess(Minecraft.class, "timer", "field_71428_T"); try { timer = (Timer) f.get(Minecraft.getMinecraft()); } catch (IllegalArgumentException | IllegalAccessException e) { MalisisCore.log.info("[MalisisRenderer] Failed to acces Minecraft timer."); timer = new Timer(20F); } } return timer.elapsedPartialTicks; } public static EnumWorldBlockLayer getRenderLayer() { return MinecraftForgeClient.getRenderLayer(); } /** * Registers this {@link MalisisRenderer} to be used for rendering the specified <b>block</b>. * * @param block the block */ public void registerFor(Block block) { registerFor(block, getDefaultRenderInfos()); } public void registerFor(Block block, IItemRenderInfo renderInfos) { MalisisRegistry.registerBlockRenderer(block, this, renderInfos); } /** * Registers this {@link MalisisRenderer} to be used for rendering the specified <b>item</b>. * * @param item the item */ public void registerFor(Item item) { registerFor(item, getDefaultRenderInfos()); } public void registerFor(Item item, IItemRenderInfo renderInfos) { MalisisRegistry.registerItemRenderer(item, this, renderInfos); } /** * Registers this {@link MalisisRenderer} to be used for rendering for a specified class.<br> * Class has to extend TileEntity.<br> * * @param clazz the clazz */ public void registerFor(Class<? extends TileEntity> clazz) { ClientRegistry.bindTileEntitySpecialRenderer(clazz, this); } /** * Registers this {@link MalisisRenderer} to be used for {@link RenderWorldLastEvent}. */ public void registerForRenderWorldLast() { MalisisRegistry.registerRenderWorldLast(this); } private IItemRenderInfo getDefaultRenderInfos() { return new IItemRenderInfo() { @Override public boolean isGui3d() { return MalisisRenderer.this.isGui3d(); } @Override public Matrix4f getTransform(TransformType tranformType) { return MalisisRenderer.this.getTransform(tranformType); } }; } }
package net.sf.jabref.logic; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import net.sf.jabref.model.EntryTypes; import net.sf.jabref.model.database.BibDatabaseMode; import net.sf.jabref.model.entry.CustomEntryType; import net.sf.jabref.preferences.JabRefPreferences; public class CustomEntryTypesManager { public static final Map<BibDatabaseMode, List<CustomEntryType>> CUSTOM_TYPES_BY_MODE_MAP = new HashMap<>(); /** * Load all custom entry types from preferences. This method is * called from JabRef when the program starts. */ public static void loadCustomEntryTypes(JabRefPreferences prefs) { List<CustomEntryType> customBibtexTypes = prefs.loadCustomEntryTypes(BibDatabaseMode.BIBTEX); for(CustomEntryType type : customBibtexTypes) { EntryTypes.addOrModifyCustomEntryType(type, BibDatabaseMode.BIBTEX); } CUSTOM_TYPES_BY_MODE_MAP.put(BibDatabaseMode.BIBTEX, customBibtexTypes); List<CustomEntryType> customBiblatexTypes = prefs.loadCustomEntryTypes(BibDatabaseMode.BIBLATEX); for(CustomEntryType type :customBiblatexTypes) { EntryTypes.addOrModifyCustomEntryType(type, BibDatabaseMode.BIBLATEX); } CUSTOM_TYPES_BY_MODE_MAP.put(BibDatabaseMode.BIBLATEX, customBiblatexTypes); } /** * Iterate through all entry types, and store those that are * custom defined to preferences. This method is called from * JabRefFrame when the program closes. */ public static void saveCustomEntryTypes(JabRefPreferences prefs) { saveCustomEntryTypes(prefs, BibDatabaseMode.BIBTEX); saveCustomEntryTypes(prefs, BibDatabaseMode.BIBLATEX); } private static void saveCustomEntryTypes(JabRefPreferences prefs, BibDatabaseMode mode) { List<CustomEntryType> customBiblatexTypes = EntryTypes.getAllValues(mode).stream() .filter(type -> type instanceof CustomEntryType) .map(entryType -> (CustomEntryType) entryType).collect(Collectors.toList()); prefs.storeCustomEntryTypes(customBiblatexTypes, mode); } }
package net.timbusproject.extractors.core; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; @XmlRootElement @XmlAccessorType(XmlAccessType.NONE) public class Parameter{ @XmlElement public String value; @XmlElement public boolean hidden; @XmlElement public ParameterType parameterType; @XmlElement public boolean mandatory; public Parameter(){ this.hidden = false; this.mandatory = true; this.parameterType = ParameterType.STRING; } public Parameter(boolean hidden){ this.hidden = hidden; this.mandatory = true; this.parameterType = ParameterType.STRING; } public Parameter(boolean hidden, boolean mandatory){ this.hidden = hidden; this.mandatory = mandatory; this.parameterType = ParameterType.STRING; } public Parameter(boolean hidden, ParameterType parameterType){ this.hidden = hidden; this.mandatory = true; this.parameterType = parameterType; } public Parameter(boolean hidden, boolean mandatory, ParameterType parameterType){ this.hidden = hidden; this.mandatory = mandatory; this.parameterType = parameterType; } public void setValue(String value){ this.value = value; } public String getValue(){ return value; } public void setObjectType(){ this.parameterType = ParameterType.OBJECT; } public void setStringType(){ this.parameterType = ParameterType.STRING; } public void setNumberType(){ this.parameterType = ParameterType.NUMBER; } public boolean isHidden(){ return hidden; } public boolean isMandatory(){ return mandatory; } public ParameterType getParameterType(){ return parameterType; } public void setMandatory(boolean is){ mandatory = is; } }
package net.zero918nobita.Xemime.interpreter; import net.zero918nobita.Xemime.entity.*; import net.zero918nobita.Xemime.parser.Parser; import net.zero918nobita.Xemime.utils.VirtualMemoryMonitor; import net.zero918nobita.Xemime.ast.*; import java.io.*; import java.util.ArrayList; import java.util.Arrays; import java.util.Map; import java.util.TreeMap; /** * * @author Kodai Matsumoto */ public class Main { private static Parser parser; private static TreeMap<Address, Node> entities = new TreeMap<Address, Node>() {{ put(new Address(0, 0), Bool.Nil); put(new Address(0, 1), Bool.T); }}; public static Default defaultObj = new Default() {{ Address addressOfDefaultObj = Main.register(defaultObj); setMember(Symbol.intern(0, "this"), addressOfDefaultObj); setMember(Symbol.intern(0, "THIS"), addressOfDefaultObj); setMember(Symbol.intern(0, "Default"), addressOfDefaultObj); setMember(Symbol.intern(0, "Core"), register(new X_Core())); setMember(Symbol.intern(0, "Object"), register(new X_Object())); }}; public static Frame frame = new Frame(); public static boolean allowExitMethod = true; /** * * @param table */ public static void loadLocalFrame(Handler table) { frame.loadLocalFrame(table); } public static void unloadLocalFrame() { frame.unloadLocalFrame(); } /** * * @param sym * @return true false */ public static boolean hasSymbol(Symbol sym) { return frame.hasSymbol(sym) || defaultObj.hasMember(sym); } /** * * @param sym * @return */ public static Address getAddressOfSymbol(Symbol sym) throws Exception { return (frame.hasSymbol(sym)) ? frame.getAddressOfSymbol(sym) : defaultObj.getAddressOfMember(sym); } /** * * @param sym * @return */ public static Node getValueOfSymbol(Symbol sym) throws Exception { if (frame.hasSymbol(sym)) { return frame.getValueOfSymbol(sym); } else { return (defaultObj.hasMember(sym)) ? defaultObj.message(0, sym) : null; } } /** * * @param address * @return */ public static Node getValueOfReference(Address address) { return entities.get(address); } /** * * @param sym * @param ref */ public static void setAddress(Symbol sym, Address ref) throws Exception { if (frame.hasSymbol(sym)) { frame.setAddress(sym, ref); return; } if (!defaultObj.hasMember(sym)) throw new Exception(parser.getLocation() + ": `" + sym.getName() + "` "); defaultObj.setMember(sym, ref); } /** * * @param sym * @param obj * @throws Exception */ public static void setValue(Symbol sym, Node obj) throws Exception { if (frame.hasSymbol(sym)) { frame.setValue(sym, obj); return; } Address ref = register(obj); if (!defaultObj.hasMember(sym)) throw new Exception(parser.getLocation() + ": `" + sym.getName() + "` "); defaultObj.setMember(sym, ref); } /** * * @param sym * @param ref */ public static void defAddress(Symbol sym, Address ref) throws Exception { if (frame.numberOfLayers() != 0) { frame.defAddress(sym, ref); return; } defaultObj.setMember(sym, ref); } /** * * @param sym * @param obj */ public static void defValue(Symbol sym, Node obj) throws Exception { if (frame.numberOfLayers() != 0) { frame.defValue(sym, obj); return; } Address ref = register(obj); defaultObj.setMember(sym, ref); } /** * Address * @param obj * @return Address */ public static Address register(Node obj) { entities.put(new Address(0,entities.lastKey().getAddress() + 1), obj); return new Address(0, entities.lastKey().getAddress()); } /** * Xemime * ( or ) * <br> * -debug * @param args */ public static void main(String[] args) { VirtualMemoryMonitor vmm; Thread vmmThread; boolean debug = Arrays.asList(args).contains("-debug"); if ((debug && args.length >= 3) || (!debug && args.length >= 2)) { usage(); System.out.println(System.lineSeparator() + "Usage: java -jar Xemime.jar [source file name]"); return; } if (debug) { vmm = new VirtualMemoryMonitor(); vmmThread = new Thread(vmm); vmmThread.start(); } try { parser = new Parser(); BufferedReader in; if ((debug && args.length == 1) || (!debug && args.length == 0)) { usage(); in = new BufferedReader(new InputStreamReader(System.in)); System.out.print(System.lineSeparator() + "[1]> "); String line; while (true) { line = in.readLine(); if (line != null && !line.equals("")) { ArrayList<Node> result; try { result = parser.parse(line); } catch(Exception e) { System.out.println(e.getMessage()); System.out.print("[" + (parser.getLocation() + 1) + "]> "); parser.goDown(1); continue; } for (Node c : result) { try { System.out.println(c.run()); } catch(Exception e) { System.out.println(e.getMessage()); break; } } System.out.print("[" + (parser.getLocation() + 1) + "]> "); parser.goDown(1); } else if (line == null) { break; } } } else { in = new BufferedReader(new FileReader(args[0])); StringBuilder stringBuilder = new StringBuilder(); String line; while ((line = in.readLine()) != null) { stringBuilder.append(line); stringBuilder.append('\n'); } ArrayList<Node> result = null; try { result = parser.parse(stringBuilder.toString()); } catch(Exception e) { System.out.println(e.getMessage()); System.exit(1); } for (Node c : result) { try { c.run(); } catch(Exception e) { System.out.println(e.getMessage()); System.exit(1); } } } in.close(); } catch(IOException e) { e.printStackTrace(); } } /** * Xemime * @param source * @throws Exception */ public static void exec(String source) throws Exception { parser = new Parser(); ArrayList<Node> result = parser.parse(source); for (Node node : result) node.run(); } private static void usage() { System.out.println(" _ __ _ \n" + " | |/ /__ ____ ___ (_)___ ___ ___ \n" + " | / _ \\/ __ `__ \\/ / __ `__ \\/ _ \\\n" + " / / __/ / / / / / / / / / / / __/\n" + "/_/|_\\___/_/ /_/ /_/_/_/ /_/ /_/\\___/ \n\n" + "Xemime Version 1.0.0 2017-09-05"); } /** * Object <br> * <br> * */ private static class X_Object extends Handler { X_Object() { super(0); setMember(Symbol.intern(0, "clone"), new X_Clone()); setMember(Symbol.intern(0, "new"), new X_New()); setMember(Symbol.intern(0, "proto"), new Bool(0, false)); } /** * Object.clone <br> * clone */ private static class X_Clone extends Native { X_Clone() { super(0, 0); } @Override protected Address exec(ArrayList<Node> params, Address self) throws Exception { return Main.register(params.get(0).run()); } } private static class X_New extends Native { X_New() { super(0, 0); } @Override protected Node exec(ArrayList<Node> params, Address self) throws Exception { Handler obj1 = (Handler) params.get(0).run(); Handler obj2 = new Handler(0); obj2.setMember(Symbol.intern(0, "proto"), new Bool(0, false)); if (obj1.hasMember(Symbol.intern(0, "proto"))) { Handler proto = (Handler) obj1.getMember(Symbol.intern(0, "proto")); for (Map.Entry<Symbol, Address> entry : proto.getMembers().entrySet()) { obj2.setMember(entry.getKey(), entry.getValue()); } } return Main.register(obj2); } } } /** * Core <br> * */ private static class X_Core extends Handler { X_Core() { super(0); setMember(Symbol.intern(0, "if"), new X_If()); setMember(Symbol.intern(0, "print"), new X_Print()); setMember(Symbol.intern(0, "println"), new X_Println()); setMember(Symbol.intern(0, "exit"), new X_Exit()); } /** * Core.exit <br> * Xemime */ private static class X_Exit extends Native { X_Exit() { super(0, 0); } @Override protected Node exec(ArrayList<Node> params, Address self) throws Exception { if (Main.allowExitMethod) System.exit(0); throw new Exception("exit"); } } /** * Core.print <br> * 1 */ private static class X_Print extends Native { X_Print() { super(0, 1); } @Override protected Node exec(ArrayList<Node> params, Address self) throws Exception { Node o = params.get(1).run(); System.out.print(o); return o; } } /** * Core.println <br> * 1 */ private static class X_Println extends Native { X_Println() { super(0, 1); } @Override protected Node exec(ArrayList<Node> params, Address self) throws Exception { Node o = params.get(1).run(); System.out.println(o); return o; } } /** * Core.if <br> * 2NIL2 * NIL3 */ private static class X_If extends Native { X_If(){ super(0, 3); } @Override protected Node exec(ArrayList<Node> params, Address self) throws Exception { return (params.get(1).run().equals(Bool.Nil)) ? params.get(3).run() : params.get(2).run(); } } } }
package net.zero918nobita.Xemime.interpreter; import net.zero918nobita.Xemime.entity.*; import net.zero918nobita.Xemime.parser.Parser; import net.zero918nobita.Xemime.utils.VirtualMemoryMonitor; import net.zero918nobita.Xemime.ast.*; import java.io.*; import java.util.ArrayList; import java.util.Arrays; import java.util.Map; import java.util.TreeMap; /** * * @author Kodai Matsumoto */ public class Main { private static Parser parser; private static TreeMap<Address, Node> entities = new TreeMap<Address, Node>() {{ put(new Address(0, 0), Bool.Nil); put(new Address(0, 1), Bool.T); }}; public static Default defaultObj = new Default() {{ Address addressOfDefaultObj = Main.register(defaultObj); setMember(Symbol.intern(0, "this"), addressOfDefaultObj); setMember(Symbol.intern(0, "THIS"), addressOfDefaultObj); setMember(Symbol.intern(0, "Default"), addressOfDefaultObj); setMember(Symbol.intern(0, "Core"), register(new X_Core())); setMember(Symbol.intern(0, "Object"), register(new X_Object())); }}; public static Frame frame = new Frame(); /** Core.exit(); true false */ private static boolean allowExitMethod; /** * * @param table */ public static void loadLocalFrame(Handler table) { frame.loadLocalFrame(table); } public static void unloadLocalFrame() { frame.unloadLocalFrame(); } /** * * @param sym * @return true false */ public static boolean hasSymbol(Symbol sym) { return frame.hasSymbol(sym) || defaultObj.hasMember(sym); } /** * * @param sym * @return */ public static Address getAddressOfSymbol(Symbol sym) throws Exception { return (frame.hasSymbol(sym)) ? frame.getAddressOfSymbol(sym) : defaultObj.getAddressOfMember(sym); } /** * * @param sym * @return */ public static Node getValueOfSymbol(Symbol sym) throws Exception { if (frame.hasSymbol(sym)) { return frame.getValueOfSymbol(sym); } else { return (defaultObj.hasMember(sym)) ? defaultObj.message(0, sym) : null; } } /** * * @param address * @return */ public static Node getValueOfReference(Address address) { return entities.get(address); } /** * * @param sym * @param ref */ public static void setAddress(Symbol sym, Address ref) throws Exception { if (frame.hasSymbol(sym)) { frame.setAddress(sym, ref); return; } if (!defaultObj.hasMember(sym)) throw new Exception(parser.getLocation() + ": `" + sym.getName() + "` "); defaultObj.setMember(sym, ref); } /** * * @param sym * @param obj * @throws Exception */ public static void setValue(Symbol sym, Node obj) throws Exception { if (frame.hasSymbol(sym)) { frame.setValue(sym, obj); return; } Address ref = register(obj); if (!defaultObj.hasMember(sym)) throw new Exception(parser.getLocation() + ": `" + sym.getName() + "` "); defaultObj.setMember(sym, ref); } /** * * @param sym * @param ref */ public static void defAddress(Symbol sym, Address ref) throws Exception { if (frame.numberOfLayers() != 0) { frame.defAddress(sym, ref); return; } defaultObj.setMember(sym, ref); } /** * * @param sym * @param obj */ public static void defValue(Symbol sym, Node obj) throws Exception { if (frame.numberOfLayers() != 0) { frame.defValue(sym, obj); return; } Address ref = register(obj); defaultObj.setMember(sym, ref); } /** * Address * @param node * @return Address */ public static Address register(Node node) { entities.put(new Address(0,entities.lastKey().getAddress() + 1), node); return new Address(0, entities.lastKey().getAddress()); } /** * Xemime * ( or ) * <br> * -debug * @param args */ public static void main(String[] args) { allowExitMethod = true; VirtualMemoryMonitor vmm; Thread vmmThread; boolean debug = Arrays.asList(args).contains("-debug"); if ((debug && args.length >= 3) || (!debug && args.length >= 2)) { usage(); System.out.println(System.lineSeparator() + "Usage: java -jar Xemime.jar [source file name]"); return; } if (debug) { vmm = new VirtualMemoryMonitor(); vmmThread = new Thread(vmm); vmmThread.start(); } try { parser = new Parser(); BufferedReader in; if ((debug && args.length == 1) || (!debug && args.length == 0)) { usage(); in = new BufferedReader(new InputStreamReader(System.in)); System.out.print(System.lineSeparator() + "[1]> "); String line; while (true) { line = in.readLine(); if (line != null && !line.equals("")) { ArrayList<Node> result; try { result = parser.parse(line); } catch(Exception e) { System.out.println(e.getMessage()); System.out.print("[" + (parser.getLocation() + 1) + "]> "); parser.goDown(1); continue; } for (Node c : result) { try { System.out.println(c.run()); } catch(Exception e) { System.out.println(e.getMessage()); break; } } System.out.print("[" + (parser.getLocation() + 1) + "]> "); parser.goDown(1); } else if (line == null) { break; } } } else { in = new BufferedReader(new FileReader(args[0])); StringBuilder stringBuilder = new StringBuilder(); String line; while ((line = in.readLine()) != null) { stringBuilder.append(line); stringBuilder.append('\n'); } ArrayList<Node> result = null; try { result = parser.parse(stringBuilder.toString()); } catch(Exception e) { System.out.println(e.getMessage()); System.exit(1); } for (Node c : result) { try { c.run(); } catch(Exception e) { System.out.println(e.getMessage()); System.exit(1); } } } in.close(); } catch(IOException e) { e.printStackTrace(); } } /** * Xemime * @param source * @throws Exception */ public static void exec(String source) throws Exception { allowExitMethod = false; parser = new Parser(); ArrayList<Node> result = parser.parse(source); for (Node node : result) node.run(); } public static boolean allowExitMethod() { return allowExitMethod; } private static void usage() { System.out.println(" _ __ _ \n" + " | |/ /__ ____ ___ (_)___ ___ ___ \n" + " | / _ \\/ __ `__ \\/ / __ `__ \\/ _ \\\n" + " / / __/ / / / / / / / / / / / __/\n" + "/_/|_\\___/_/ /_/ /_/_/_/ /_/ /_/\\___/ \n\n" + "Xemime Version 1.0.0 2017-09-05"); } /** * Object <br> * <br> * */ private static class X_Object extends Handler { X_Object() { super(0); setMember(Symbol.intern(0, "clone"), new X_Clone()); setMember(Symbol.intern(0, "new"), new X_New()); setMember(Symbol.intern(0, "proto"), new Bool(0, false)); } /** * Object.clone <br> * clone */ private static class X_Clone extends Native { X_Clone() { super(0, 0); } @Override protected Address exec(ArrayList<Node> params, Address self) throws Exception { return Main.register(params.get(0).run()); } } private static class X_New extends Native { X_New() { super(0, 0); } @Override protected Node exec(ArrayList<Node> params, Address self) throws Exception { Handler obj1 = (Handler) params.get(0).run(); Handler obj2 = new Handler(0); obj2.setMember(Symbol.intern(0, "proto"), new Bool(0, false)); if (obj1.hasMember(Symbol.intern(0, "proto"))) { Handler proto = (Handler) obj1.getMember(Symbol.intern(0, "proto")); for (Map.Entry<Symbol, Address> entry : proto.getMembers().entrySet()) { obj2.setMember(entry.getKey(), entry.getValue()); } } return Main.register(obj2); } } } /** * Core <br> * */ private static class X_Core extends Handler { X_Core() { super(0); setMember(Symbol.intern(0, "if"), new X_If()); setMember(Symbol.intern(0, "print"), new X_Print()); setMember(Symbol.intern(0, "println"), new X_Println()); setMember(Symbol.intern(0, "exit"), new X_Exit()); } /** * Core.exit <br> * Xemime */ private static class X_Exit extends Native { X_Exit() { super(0, 0); } @Override protected Node exec(ArrayList<Node> params, Address self) throws Exception { if (Main.allowExitMethod) System.exit(0); throw new Exception(" Core.exit "); } } /** * Core.print <br> * 1 */ private static class X_Print extends Native { X_Print() { super(0, 1); } @Override protected Node exec(ArrayList<Node> params, Address self) throws Exception { Node o = params.get(1).run(); System.out.print(o); return o; } } /** * Core.println <br> * 1 */ private static class X_Println extends Native { X_Println() { super(0, 1); } @Override protected Node exec(ArrayList<Node> params, Address self) throws Exception { Node o = params.get(1).run(); System.out.println(o); return o; } } /** * Core.if <br> * 2NIL2 * NIL3 */ private static class X_If extends Native { X_If(){ super(0, 3); } @Override protected Node exec(ArrayList<Node> params, Address self) throws Exception { return (params.get(1).run().equals(Bool.Nil)) ? params.get(3).run() : params.get(2).run(); } } } }
package org.agmip.translators.apsim.events; import java.text.ParseException; import java.util.Calendar; import java.util.Date; import org.agmip.ace.LookupCodes; import org.agmip.translators.apsim.core.Management; import org.agmip.translators.apsim.util.Util; import org.codehaus.jackson.annotate.JsonProperty; /** * @author Ioannis N. Athanasiadis, DUTh * @author Dean Holzworth, CSIRO * @since Jul 13, 2012 */ public class Planting extends Event{ // cropID @JsonProperty("crid") private String cropID = "?"; // cultivar @JsonProperty("apsim_cul_id") private String cultivar = "?"; @JsonProperty("cul_id") private String cultivarDef = "?"; // cultivar @JsonProperty("plftn") private double ftn = Util.missingValue; // planting depth. units=mm @JsonProperty("pldp") private double depth = Util.missingValue; public double depth() { return depth; } // plants per hill @JsonProperty("plph") private double plantsPerHill = Util.missingValue; public double plantsPerHill() { return plantsPerHill; } // plants per hill @JsonProperty("page") private int ageOfTransplant = Integer.MAX_VALUE; public int ageOfTransplant() { return ageOfTransplant; } // row spacing. units=cm @JsonProperty("plrs") double rowSpacing; public double rowSpacingAsMM() { if (rowSpacing == Util.missingValue) return Util.missingValue; else return rowSpacing * 10.0; } // Plant population at planting. units=number/m2 @JsonProperty("plpop") private double population = Util.missingValue; // Plant population at planting. units=number/m2 @JsonProperty("plpoe") private double population2 = Util.missingValue; // Number of plants in seedbed. units=number @JsonProperty("nplsb") private double numPlantsInSeedbed = Util.missingValue; public double numPlantsInSeedbed() { return numPlantsInSeedbed; } // Plant material // S=Dry seed, // T=Transplant, // N=Nursery, // P=Pregerminated seed, // R=Ratoon, @JsonProperty("plma") private String plantMaterial = "S"; private String pdate; public Date getPDate() { try { if (pdate == null) return null; else return Util.apsim.parse(pdate); } catch (ParseException ex) { return null; } } // Crop Name public String getCropName() { return LookupCodes.lookupCode("crid", cropID, "apsim"); } // Calculate number of hills private double numberOfHills() { return population / plantsPerHill; } @Override public String getApsimAction() { String actionLine = getSowLine(getCropName()); actionLine = actionLine.replace("$cropName", getCropName()); actionLine = actionLine.replace("$population", String.valueOf(population)); actionLine = actionLine.replace("$depth", String.valueOf(depth)); if ("?".equals(cultivar)) { actionLine = actionLine.replace("$cultivar",cultivarDef); } else { actionLine = actionLine.replace("$cultivar",cultivar); } actionLine = actionLine.replace("$row_spacing_m", String.valueOf(rowSpacingAsMM()/1000.0)); actionLine = actionLine.replace("$row_spacing", String.valueOf(rowSpacingAsMM())); actionLine = actionLine.replace("$ftn", String.valueOf(ftn)); actionLine = actionLine.replace("$plantsPerHill", String.valueOf(plantsPerHill)); actionLine = actionLine.replace("$ageOfTransplant", String.valueOf(ageOfTransplant)); actionLine = actionLine.replace("$numberOfHills", String.valueOf(numberOfHills())); actionLine = actionLine.replace("$numPlantsInSeedbed", String.valueOf(numPlantsInSeedbed())); return actionLine; } @Override public void initialise(Management management) { pdate = getDate(); if ("?".equals(getDate())) log += " * Operation planting ERROR: Date missing. '?' has been inserted.\r\n"; if ("?".equals(cropID)) log += " * Operation " + getDate() + " ERROR: Planting crop missing (crid).\r\n"; if (population == Util.missingValue) { if (population2 == Util.missingValue) log += " * Operation " + getDate() + " ERROR: Planting population missing (plpop, plpoe).\r\n"; else population = population2; } if (depth == Util.missingValue) log += " * Operation " + getDate() + " ERROR: Planting depth missing (pldp).\r\n"; if ("?".equals(cultivar) && "?".equals(cultivarDef)) log += " * Operation " + getDate() + " ERROR: Missing planting cultivar (apsim_cul_id and cul_id).\r\n"; if (rowSpacing == Util.missingValue) log += " * Operation " + getDate() + " ERROR: Planting row spacing missing (plrs).\r\n"; if (numPlantsInSeedbed == Util.missingValue && getCropName().equals("rice") && !plantMaterial.equals("S")) log += " * Operation " + getDate() + " ERROR: Number of plants in seedbed missing (nplsb).\r\n"; if (getCropName().equals("sorghum") && ftn == Util.missingValue) log += " * Operation " + getDate() + " ERROR: Planting fertile tiller number not specified for sorghum (plftn). \r\n"; if (getCropName().equals("rice") && plantMaterial.equals("T")) { if (plantsPerHill == Util.missingValue) log += " * Operation " + getDate() + " ERROR: Rice planting variable 'plants per hill (plph) missing. \r\n"; if (ageOfTransplant == Integer.MAX_VALUE) log += " * Operation " + getDate() + " ERROR: Rice planting variable 'age of transplant (page) missing. \r\n"; else { Calendar c = Calendar.getInstance(); c.setTime(getEventDate()); c.add(Calendar.DATE, -ageOfTransplant); setDate(Util.apsim.format(c.getTime())); } } } // Return a specific crop sow line. private String getSowLine(String cropName) { if (cropName.equals("maize")) return "$cropName sow plants = $population, sowing_depth = $depth (mm), cultivar = $cultivar, row_spacing = $row_spacing (mm)"; else if (cropName.equals("sorghum")) return "$cropName sow plants = $population, sowing_depth = $depth (mm), cultivar = $cultivar, row_spacing = $row_spacing (mm), skip = solid, tiller_no_fertile = $ftn"; else if (cropName.equals("sugar")) return "$cropName sow plants = $population, cultivar = $cultivar, sowing_depth = $depth"; else if (cropName.equals("millet")) return "$cropName sow plants = $population, sowing_depth = $depth (mm), cultivar = $cultivar, row_spacing = $row_spacing_m (m)"; else if (cropName.equals("rice")) { if (plantMaterial.equals("S")) return "$cropName sow cultivar = $cultivar, establishment = direct-seed, nplds = $population"; else return "$cropName sow cultivar = $cultivar, establishment = transplant, nplsb = $numPlantsInSeedbed, nplh = $plantsPerHill, sbdur = $ageOfTransplant, nh = $numberOfHills"; } else if (cropName.equals("cotton")) return "$cropName sow plants_pm = $population, cultivar = $cultivar, sowing_depth = $depth (mm), row_spacing = $row_spacing (mm), skiprow = 0"; else // Plant based crops return "$cropName sow plants = $population, sowing_depth = $depth (mm), cultivar = $cultivar, row_spacing = $row_spacing (mm), crop_class = plant"; } }
package nl.hsac.fitnesse.junit; import fitnesse.ContextConfigurator; import fitnesse.FitNesseContext; import fitnesse.components.PluginsClassLoader; import fitnesse.wiki.WikiPage; import nl.hsac.fitnesse.fixture.Environment; import nl.hsac.fitnesse.fixture.slim.web.SeleniumDriverSetup; import nl.hsac.fitnesse.fixture.util.FileUtil; import nl.hsac.fitnesse.fixture.util.selenium.SeleniumHelper; import nl.hsac.fitnesse.junit.patch948.PatchedFitNesseRunner; import nl.hsac.fitnesse.junit.selenium.LocalSeleniumDriverClassFactoryFactory; import nl.hsac.fitnesse.junit.selenium.LocalSeleniumDriverFactoryFactory; import nl.hsac.fitnesse.junit.selenium.SeleniumDriverFactoryFactory; import nl.hsac.fitnesse.junit.selenium.SeleniumGridDriverFactoryFactory; import nl.hsac.fitnesse.junit.selenium.SeleniumJsonGridDriverFactoryFactory; import nl.hsac.fitnesse.junit.selenium.SimpleSeleniumGridDriverFactoryFactory; import org.apache.commons.lang3.StringUtils; import org.junit.runner.notification.RunNotifier; import org.junit.runners.model.InitializationError; import java.io.File; import java.io.FileInputStream; import java.util.ArrayList; import java.util.List; /** * JUnit Runner to run a FitNesse suite or page as JUnit test. * * The suite/page to run must be specified either via the Java property * 'fitnesseSuiteToRun', or by adding a FitNesseSuite.Name annotation to the test class. * If both are present the system property is used. * * The Selenium driver used for tests may be overridden (from what is configured in the wiki) * by specifying the property 'seleniumGridUrl' and either 'seleniumBrowser' or 'seleniumCapabilities'. * The default timeout (in seconds) for Selenium tests may be overridden by specifying the property * 'seleniumDefaultTimeout'. * * The HTML generated for each page is saved in target/fitnesse-results */ public class HsacFitNesseRunner extends PatchedFitNesseRunner { /** Output path for HTML results */ public final static String FITNESSE_RESULTS_PATH = "target/fitnesse-results"; /** Property to override suite to run */ public final static String SUITE_OVERRIDE_VARIABLE_NAME = "fitnesseSuiteToRun"; private final static String SELENIUM_DEFAULT_TIMEOUT_PROP = "seleniumDefaultTimeout"; protected final List<SeleniumDriverFactoryFactory> factoryFactories = new ArrayList<>(); public HsacFitNesseRunner(Class<?> suiteClass) throws InitializationError { super(suiteClass); try { factoryFactories.add(new SimpleSeleniumGridDriverFactoryFactory()); factoryFactories.add(new SeleniumGridDriverFactoryFactory()); factoryFactories.add(new SeleniumJsonGridDriverFactoryFactory()); factoryFactories.add(new LocalSeleniumDriverFactoryFactory()); factoryFactories.add(new LocalSeleniumDriverClassFactoryFactory()); // we include images in output so build server will have single String outputDir = getOutputDir(suiteClass); new File(outputDir).mkdirs(); Environment.getInstance().setFitNesseRoot(outputDir); } catch (Exception e) { throw new InitializationError(e); } } @Override protected String getSuiteName(Class<?> klass) throws InitializationError { String name = System.getProperty(SUITE_OVERRIDE_VARIABLE_NAME); if (StringUtils.isEmpty(name)) { Suite nameAnnotation = klass.getAnnotation(Suite.class); if (nameAnnotation == null) { throw new InitializationError("There must be a @Suite annotation"); } name = nameAnnotation.value(); } return name; } @Override protected String getFitNesseDir(Class<?> suiteClass) { return "wiki"; } @Override protected String getOutputDir(Class<?> klass) throws InitializationError { return FITNESSE_RESULTS_PATH; } @Override protected String getFitNesseRoot(Class<?> suiteClass) { return ContextConfigurator.DEFAULT_ROOT; } @Override protected FitNesseContext createContext(Class<?> suiteClass) throws Exception { // disable maven-classpath-plugin, we expect all jars to be loaded as part of this jUnit run System.setProperty("fitnesse.wikitext.widgets.MavenClasspathSymbolType.Disable", "true"); new PluginsClassLoader(getFitNesseDir(suiteClass)).addPluginsToClassLoader(); return super.createContext(suiteClass); } @Override protected void runPages(List<WikiPage> pages, RunNotifier notifier) { boolean seleniumConfigOverridden = configureSeleniumIfNeeded(); try { super.runPages(pages, notifier); } finally { if (seleniumConfigOverridden) { try { shutdownSelenium(); } catch (Exception e) { } } try { Class<?> suiteClass = getTestClass().getJavaClass(); String outputDir = getOutputDir(suiteClass); String suiteName = getSuiteName(suiteClass); String filename = suiteName + ".html"; File overviewFile = new File(outputDir, filename); if (overviewFile.exists()) { String path = overviewFile.getAbsolutePath(); String overviewHtml = FileUtil.streamToString(new FileInputStream(path), path); if (overviewHtml != null) { String indexHtml = getIndexHtmlContent(overviewHtml); FileUtil.writeFile(new File(outputDir, "index.html").getAbsolutePath(), indexHtml); } } } catch (Exception e) { } } } /** * Determines whether system properties should override Selenium configuration in wiki. * If so Selenium will be configured according to property values, and locked so that wiki pages * no longer control Selenium setup. * @return true if Selenium was configured. */ protected boolean configureSeleniumIfNeeded() { setSeleniumDefaultTimeOut(); try { SeleniumHelper.DriverFactory factory = null; SeleniumDriverFactoryFactory factoryFactory = getSeleniumDriverFactoryFactory(); if (factoryFactory != null) { factory = factoryFactory.getDriverFactory(); if (factory != null) { SeleniumDriverSetup.lockConfig(); Environment.getInstance().getSeleniumHelper().setDriverFactory(factory); } } return factory != null; } catch (Exception e) { throw new RuntimeException("Error overriding Selenium config", e); } } protected void setSeleniumDefaultTimeOut() { String propValue = System.getProperty(SELENIUM_DEFAULT_TIMEOUT_PROP); if (StringUtils.isNotEmpty(propValue)) { try { int timeoutSeconds = Integer.parseInt(propValue); Environment.getInstance().getSeleniumHelper().setDefaultTimeoutSeconds(timeoutSeconds); } catch (NumberFormatException e) { throw new RuntimeException("Bad " + SELENIUM_DEFAULT_TIMEOUT_PROP + " system property: " + propValue, e); } } } protected SeleniumDriverFactoryFactory getSeleniumDriverFactoryFactory() { SeleniumDriverFactoryFactory result = null; for (SeleniumDriverFactoryFactory factory : factoryFactories) { if (factory.willOverride()) { result = factory; break; } } return result; } protected void shutdownSelenium() { SeleniumDriverSetup.unlockConfig(); new SeleniumDriverSetup().stopDriver(); } protected String getIndexHtmlContent(String overviewHtml) { String result = overviewHtml; String runSummary = SeleniumDriverSetup.getLastRunSummary(); if (runSummary != null) { result = overviewHtml.replace("<table", runSummary + "<table"); } return result; } }
package no.ntnu.okse.web.controller; import no.ntnu.okse.web.model.Log; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.TrueFileFilter; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; import java.io.File; import java.io.IOException; import java.lang.reflect.Array; import java.util.ArrayList; import java.util.Collection; import java.util.List; @RestController @RequestMapping(value = "/api/log") public class LogController { @RequestMapping(method = RequestMethod.GET) public ArrayList<Log> log() throws IOException { ArrayList<Log> logs = new ArrayList<>(); File dir = new File("logs"); Collection<File> files = FileUtils.listFiles(dir, TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE); for (File file : files) { String name = file.getName(); List<String> lines = FileUtils.readLines(file); Log log = new Log(name, lines); logs.add(log); } return logs; } }
package Alg.Kernelization; import org.jgrapht.graph.DefaultEdge; import org.jgrapht.graph.Multigraph; import java.util.*; import java.util.stream.Collectors; import java.util.stream.Stream; /** * Kernelization rules mainly focused on the Simple Disjoint Kernelization Problem. * * See Section 4.3.1 from the book */ public class SimpleDisjointKernelization extends Kernelization { /** * remove any vertex v not in prohibited that is part of a cycle where all other vertices * are in prohibited. Add v to the solution.s * * @param solution * @param graph * @param prohibited * @return Was a change done on the graph? */ public static boolean removeOnlyVertexInProhibitedCycle( ReductionSolution solution, Multigraph<Integer, DefaultEdge> graph, HashSet<Integer> prohibited ) { Integer[] vertices = (graph.vertexSet()).toArray(new Integer[graph.vertexSet().size()]); boolean changed = false; for (int v: vertices) { // We do not have to check prohibited vertices if (prohibited.contains(v)) { continue; } if (SimpleDisjointKernelization.inCycleWith(v, graph, prohibited)) { Kernelization.removeVertex(solution, v, true); changed = true; } } return changed; } /** * Checks if vertex v is in a cycle with only edges from set withSet * * @param v * @param graph * @param withSet * @return */ public static boolean inCycleWith(Integer v, Multigraph<Integer, DefaultEdge> graph, Set<Integer> withSet) { return SimpleDisjointKernelization.inCycleWithRecursive(v, graph, withSet, v, v, new HashSet<>(v)); } /** * Recursive function to check if vertex v is in a cycle with only edges from set withSet * * @param v target vertex we have to search * @param graph base graph * @param withSet set of other vertices we can use * @param currentVertex current vertex we are searching at * @param lastVertex Last vertex, needed to ensure we do not go back the same way * @return */ protected static boolean inCycleWithRecursive( Integer v, Multigraph<Integer, DefaultEdge> graph, Set<Integer> withSet, Integer currentVertex, Integer lastVertex, Set<Integer> done ) { Collection<Integer> neighbours = SimpleDisjointKernelization.getNeighbours(graph, currentVertex) .collect(Collectors.toCollection(ArrayList<Integer>::new)); boolean secondBack = false; for (int vertex : neighbours) { // Do not go back the same way // unless the lastVertex is the vertex we are looking for (v) and this is the second time // we can go back (meaning two edges going back, one already visited to get here, the // other one completing the cycle) if (vertex == lastVertex && !(secondBack && lastVertex.equals(v))) { secondBack = true; continue; } // We found a cycle if (vertex == v) { return true; } // We have already checked this vertex. if (done.contains(vertex)) { continue; } if (withSet.contains(vertex)) { done.add(vertex); if (SimpleDisjointKernelization.inCycleWithRecursive(v, graph, withSet, vertex, currentVertex, done)) { return true; } //done.remove(vertex); } } // We could not find return false; } /** * remove any vertex v not in prohibited with degree 2 and at least one of its * neighbours also not in prohibited. Connect the neighbours of v. * * @param solution * @param graph * @return Was a change done on the graph? */ public static boolean removeNonProhibitedVertexWithDegree2( ReductionSolution solution, Multigraph<Integer, DefaultEdge> graph, HashSet<Integer> prohibited ) { Integer[] vertices = (graph.vertexSet()).toArray(new Integer[graph.vertexSet().size()]); boolean changed = false; for (int v:vertices) { // Skip prohibited vertices if (prohibited.contains(v)) { continue; } // Make sure that the vertex is of degree 2 if (graph.degreeOf(v) == 2) { ArrayList<Integer> neighbours = SimpleDisjointKernelization.getNeighbours(graph, v) .collect(Collectors.toCollection(ArrayList<Integer>::new)); // Both neighbours are prohibited, so we can not do anything about it if (prohibited.containsAll(neighbours)) { continue; } changed = true; // Now we can remove the vertex, and join the neighbours graph.addEdge(neighbours.get(0), neighbours.get(1)); Kernelization.removeVertex(solution, v, false); } } return changed; } /** * Get the neighbours of vertex v in graph graph * * @param graph * @param v * @return */ public static Stream<Integer> getNeighbours(Multigraph<Integer, DefaultEdge> graph, Integer v) { return graph.edgesOf(v) .stream() .map((DefaultEdge e)-> new Integer(graph.getEdgeSource(e).equals(v) ? graph.getEdgeTarget(e) : graph.getEdgeSource(e))); } /** * Returns all the edges that have either the source or target in v * * @param graph * @param v * @return */ public static Set<DefaultEdge> getEdgesOf(Multigraph<Integer, DefaultEdge> graph, Integer v) { return graph.edgesOf(v); /* Set<DefaultEdge> edges = graph.incomingEdgesOf(v); edges.addAll(graph.outgoingEdgesOf(v)); return edges; */ } }
package org.broad.igv.ui.commandbar; import com.jidesoft.swing.JideBoxLayout; import com.jidesoft.swing.JideButton; import com.jidesoft.swing.JideToggleButton; import org.apache.log4j.Logger; import org.broad.igv.Globals; import org.broad.igv.event.*; import org.broad.igv.feature.genome.Genome; import org.broad.igv.feature.genome.GenomeListItem; import org.broad.igv.feature.genome.GenomeManager; import org.broad.igv.prefs.Constants; import org.broad.igv.prefs.PreferencesManager; import org.broad.igv.session.History; import org.broad.igv.ui.IGV; import org.broad.igv.ui.ShowDetailsBehavior; import org.broad.igv.ui.UIConstants; import org.broad.igv.ui.action.FitDataToWindowMenuAction; import org.broad.igv.ui.action.ReloadTracksMenuAction; import org.broad.igv.ui.panel.FrameManager; import org.broad.igv.ui.panel.IGVPopupMenu; import org.broad.igv.ui.panel.ReferenceFrame; import org.broad.igv.ui.panel.ZoomSliderPanel; import org.broad.igv.ui.util.IconFactory; import org.broad.igv.ui.util.MessageUtils; import org.broad.igv.ui.util.UIUtilities; import javax.swing.*; import javax.swing.border.LineBorder; import javax.swing.event.PopupMenuEvent; import javax.swing.event.PopupMenuListener; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.io.IOException; /** * @author jrobinso */ public class IGVCommandBar extends javax.swing.JPanel implements IGVEventObserver { private static Logger log = Logger.getLogger(IGVCommandBar.class); final static String MODIFY_DETAILS_TOOLTIP = "Modify popup text behavior in data panels"; final static int DEFAULT_CHROMOSOME_DROPDOWN_WIDTH = 120; private ChromosomeComboBox chromosomeComboBox; private GenomeComboBox genomeComboBox; private JideButton goButton; private JideButton homeButton; private JPanel locationPanel; private JideButton refreshButton; private JideToggleButton roiToggleButton; private JideButton detailsBehaviorButton; private JideToggleButton rulerLineButton; private SearchTextField searchTextField; private JPanel toolPanel; private JPanel zoomControl; private JideButton backButton; private JideButton forwardButton; private JideButton fitToWindowButton; private ShowDetailsBehavior detailsBehavior; public IGVCommandBar() { initComponents(); // Post creation widget setup. refreshGenomeListComboBox(); String currentChr = FrameManager.getDefaultFrame().getChrName(); boolean isWholeGenome = currentChr.equals(Globals.CHR_ALL); chromosomeComboBox.setSelectedItem(currentChr); roiToggleButton.setEnabled(!isWholeGenome); zoomControl.setEnabled(!isWholeGenome); detailsBehaviorButton.addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent e) { getPopupMenuToolTipBehavior().show(e.getComponent(), e.getX(), e.getY()); } }); IGVEventBus.getInstance().subscribe(ViewChange.class, this); IGVEventBus.getInstance().subscribe(GenomeChangeEvent.class, this); IGVEventBus.getInstance().subscribe(GenomeResetEvent.class, this); } private JPopupMenu getPopupMenuToolTipBehavior() { final JPopupMenu popup = new IGVPopupMenu(); for (final ShowDetailsBehavior behavior : ShowDetailsBehavior.values()) { JCheckBoxMenuItem menuItem = new JCheckBoxMenuItem(behavior.getLabel()); menuItem.setSelected(detailsBehavior == behavior); menuItem.addActionListener(new AbstractAction() { public void actionPerformed(ActionEvent e) { detailsBehavior = behavior; PreferencesManager.getPreferences().put(Constants.DETAILS_BEHAVIOR_KEY, behavior.name()); } }); popup.add(menuItem); } return popup; } public ShowDetailsBehavior getDetailsBehavior() { return detailsBehavior; } public void setGeneListMode(boolean geneListMode) { // locationPanel.setEnabled(!geneListMode); chromosomeComboBox.setEnabled(!geneListMode); if(geneListMode) searchTextField.setText(""); // searchTextField.setEnabled(!geneListMode); // goButton.setEnabled(!geneListMode); zoomControl.setEnabled(!geneListMode); // homeButton.setEnabled(true); // roiToggleButton.setEnabled(!geneListMode); } /** * Selects the first genome from the list which matches this genomeId. * If not found, checks genomes from the server/user-defined list * * @param genomeId */ public void selectGenome(String genomeId) { //log.info("Selecting genome " + genomeId); GenomeListItem selectedItem = GenomeListManager.getInstance().getGenomeListItem(genomeId); if (selectedItem == null || !genomeComboBox.hasItem(selectedItem)) { try { GenomeManager.getInstance().loadGenomeById(genomeId); } catch (IOException e) { MessageUtils.showErrorMessage("Error loading genome: " + genomeId, e); log.error("Error loading genome: " + genomeId, e); } } if (selectedItem != null) { UIUtilities.invokeAndWaitOnEventThread(() -> genomeComboBox.setSelectedItem(selectedItem)); } } public void updateCurrentCoordinates() { if(IGV.hasInstance()) { String p = ""; ReferenceFrame defaultFrame = FrameManager.getDefaultFrame(); final String chrName = defaultFrame.getChrName(); if (!Globals.CHR_ALL.equals(chrName) && !FrameManager.isGeneListMode()) { p = defaultFrame.getFormattedLocusString(); } final String position = p; final History history = IGV.getInstance().getSession().getHistory(); UIUtilities.invokeOnEventThread(new Runnable() { public void run() { searchTextField.setText(position); forwardButton.setEnabled(history.canGoForward()); backButton.setEnabled(history.canGoBack()); roiToggleButton.setEnabled(!Globals.CHR_ALL.equals(chrName)); zoomControl.setEnabled(!Globals.CHR_ALL.equals(chrName)); } }); } } public void refreshGenomeListComboBox() { UIUtilities.invokeAndWaitOnEventThread(() -> { genomeComboBox.refreshGenomeListComboBox(); }); } /** * Adjust the popup for the combobox to be at least as wide as * the widest item. * * @param box */ private void adjustPopupWidth(JComboBox box) { if (box.getItemCount() == 0) return; Object comp = box.getUI().getAccessibleChild(box, 0); if (!(comp instanceof JPopupMenu)) { return; } JPopupMenu popup = (JPopupMenu) comp; JScrollPane scrollPane = null; for (Component scomp : popup.getComponents()) { if (scomp instanceof JScrollPane) { scrollPane = (JScrollPane) scomp; } } if (scrollPane == null) return; //Loop through and set width to widest component, plus some padding int rendererWidth = box.getWidth(); for (int index = 0; index < box.getItemCount(); index++) { Object value = box.getItemAt(index); Component rendererComp = box.getRenderer().getListCellRendererComponent(null, value, index, false, false); } Dimension size = scrollPane.getPreferredSize(); size.width = Math.max(size.width, rendererWidth); scrollPane.setPreferredSize(size); scrollPane.setMaximumSize(size); scrollPane.revalidate(); } //<editor-fold desc="Action methods"> private void homeButtonActionPerformed(java.awt.event.ActionEvent evt) { Genome genome = GenomeManager.getInstance().getCurrentGenome(); if (FrameManager.isGeneListMode()) { IGV.getInstance().setGeneList(null); } if (genome != null) { String chrName = genome.getHomeChromosome(); if (chrName != null && !chrName.equals(chromosomeComboBox.getSelectedItem())) { FrameManager.getDefaultFrame().changeChromosome(chrName, false); } } } private void refreshButtonActionPerformed(java.awt.event.ActionEvent evt) { IGVEventBus.getInstance().post(new org.broad.igv.event.RefreshEvent()); (new ReloadTracksMenuAction("",-1, IGV.getInstance())).actionPerformed(evt); } private void goButtonActionPerformed(java.awt.event.ActionEvent evt) { // GEN-FIRST:event_goButtonActionPerformed String searchText = searchTextField.getText(); searchByLocus(searchText); } private void roiToggleButtonActionPerformed(java.awt.event.ActionEvent evt) { // GEN-FIRST:event_roiToggleButtonActionPerformed if (roiToggleButton.isSelected()) { IGV.getInstance().beginROI(roiToggleButton); } else { IGV.getInstance().endROI(); } } //</editor-fold> public void receiveEvent(Object e) { if (e instanceof ViewChange) { ViewChange event = (ViewChange) e; if (event.type == ViewChange.Type.ChromosomeChange || event.type == ViewChange.Type.LocusChange) { String chrName = FrameManager.getDefaultFrame().getChrName(); roiToggleButton.setEnabled(!Globals.CHR_ALL.equals(chrName)); zoomControl.setEnabled(!Globals.CHR_ALL.equals(chrName)); if (!chrName.equals(chromosomeComboBox.getSelectedItem())) { chromosomeComboBox.setSelectedItem(chrName); } } updateCurrentCoordinates(); repaint(); // TODO Is this neccessary? } else if (e instanceof GenomeChangeEvent) { GenomeChangeEvent event = (GenomeChangeEvent) e; Genome genome = event.genome; refreshGenomeListComboBox(); chromosomeComboBox.updateChromosFromGenome(genome); String chrName = FrameManager.getDefaultFrame().getChrName(); zoomControl.setEnabled(!Globals.CHR_ALL.equals(chrName)); } else if (e instanceof GenomeResetEvent) { refreshGenomeListComboBox(); } else { log.info("Unknown event class: " + e.getClass()); } } //<editor-fold desc="Search box"> // Set the focus in the search box public void focusSearchBox() { searchTextField.requestFocusInWindow(); searchTextField.selectAll(); } public void searchByLocus(final String searchText) { if ((searchText != null) && (searchText.length() > 0)) { String homeChr = IGV.getInstance().getGenomeManager().getCurrentGenome().getHomeChromosome(); if (searchText.equalsIgnoreCase("home") || searchText.equalsIgnoreCase(homeChr)) { homeButtonActionPerformed(null); } else { searchTextField.setText(searchText); searchTextField.searchByLocus(searchText); } } } /** * This method is called from within the constructor */ private void initComponents() { setMinimumSize(new Dimension(200, 32)); JideBoxLayout layout = new JideBoxLayout(this, JideBoxLayout.X_AXIS); setLayout(layout); final String detailsPreference = PreferencesManager.getPreferences().get(Constants.DETAILS_BEHAVIOR_KEY); detailsBehavior = ShowDetailsBehavior.valueOf((detailsPreference.toUpperCase())); // This controls the vertical height of the command bar locationPanel = new javax.swing.JPanel(); locationPanel.setBorder(new LineBorder(Color.lightGray, 1, true)); // BorderFactory.createMatteBorder(2, 2, 2, 2, Color.lightGray)); // new javax.swing.border.SoftBevelBorder(javax.swing.border.BevelBorder.RAISED)); locationPanel.setPreferredSize(new java.awt.Dimension(150, 20)); locationPanel.setLayout(new JideBoxLayout(locationPanel, JideBoxLayout.X_AXIS)); locationPanel.setAlignmentY(CENTER_ALIGNMENT); locationPanel.add(Box.createRigidArea(new Dimension(10, 36)), JideBoxLayout.FIX); genomeComboBox = new GenomeComboBox(); genomeComboBox.setMinimumSize(new Dimension(180, 27)); genomeComboBox.setPreferredSize(new Dimension(180, 27)); genomeComboBox.setToolTipText(UIConstants.CHANGE_GENOME_TOOLTIP); genomeComboBox.addPopupMenuListener(new PopupMenuListener() { @Override public void popupMenuWillBecomeVisible(PopupMenuEvent e) { try { adjustPopupWidth(genomeComboBox); } catch (Exception e1) { log.warn(e1.getMessage(), e1); } } @Override public void popupMenuWillBecomeInvisible(PopupMenuEvent e) { //TODO } @Override public void popupMenuCanceled(PopupMenuEvent e) { //TODO } }); locationPanel.add(genomeComboBox, JideBoxLayout.FIX); locationPanel.add(Box.createHorizontalStrut(5), JideBoxLayout.FIX); chromosomeComboBox = new ChromosomeComboBox(); chromosomeComboBox.setToolTipText("Select a chromosome to view"); chromosomeComboBox.setMaximumSize(new java.awt.Dimension(DEFAULT_CHROMOSOME_DROPDOWN_WIDTH, 30)); chromosomeComboBox.setMinimumSize(new java.awt.Dimension(DEFAULT_CHROMOSOME_DROPDOWN_WIDTH, 30)); chromosomeComboBox.setPreferredSize(new java.awt.Dimension(DEFAULT_CHROMOSOME_DROPDOWN_WIDTH, 30)); locationPanel.add(chromosomeComboBox, JideBoxLayout.FIX); locationPanel.add(Box.createHorizontalStrut(5), JideBoxLayout.FIX); searchTextField = new SearchTextField(); searchTextField.setMaximumSize(new java.awt.Dimension(250, 15)); searchTextField.setMinimumSize(new java.awt.Dimension(100, 28)); searchTextField.setPreferredSize(new java.awt.Dimension(230, 28)); searchTextField.setAlignmentY(CENTER_ALIGNMENT); locationPanel.add(searchTextField, JideBoxLayout.FIX); goButton = new JideButton("Go"); // goButton.setButtonStyle(ButtonStyle.TOOLBOX_STYLE); // goButton.setPreferredSize(new java.awt.Dimension(30, 30)); // goButton.setMaximumSize(new java.awt.Dimension(30, 30)); // goButton.setMinimumSize(new java.awt.Dimension(30, 30)); // goButton.setText("Go"); goButton.setToolTipText("Jump to gene or locus"); goButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { goButtonActionPerformed(evt); } }); locationPanel.add(goButton, JideBoxLayout.FIX); add(locationPanel, JideBoxLayout.FIX); add(Box.createHorizontalStrut(10), JideBoxLayout.FIX); toolPanel = new javax.swing.JPanel(); toolPanel.setAlignmentX(RIGHT_ALIGNMENT); toolPanel.setLayout(new JideBoxLayout(toolPanel, JideBoxLayout.X_AXIS)); //final Border toolButtonBorder = BorderFactory.createLineBorder(Color.gray, 1); homeButton = new com.jidesoft.swing.JideButton(); homeButton.setAlignmentX(RIGHT_ALIGNMENT); //homeButton.setButtonStyle(JideButton.TOOLBOX_STYLE); // homeButton.setBorder(toolButtonBorder); homeButton.setIcon(new javax.swing.ImageIcon( getClass().getResource("/toolbarButtonGraphics/navigation/Home24.gif"))); homeButton.setMaximumSize(new java.awt.Dimension(32, 32)); homeButton.setMinimumSize(new java.awt.Dimension(32, 32)); homeButton.setPreferredSize(new java.awt.Dimension(32, 32)); homeButton.setToolTipText("Jump to whole genome view"); homeButton.addActionListener(evt -> homeButtonActionPerformed(evt)); toolPanel.add(homeButton, JideBoxLayout.FIX); // toolPanel.setBorder( // new javax.swing.border.SoftBevelBorder(javax.swing.border.BevelBorder.RAISED)); backButton = new JideButton(); //backButton.setButtonStyle(JideButton.TOOLBOX_STYLE); //backButton.setBorder(toolButtonBorder); backButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/left-arrow.gif"))); backButton.setToolTipText("Go back"); backButton.setMaximumSize(new java.awt.Dimension(32, 32)); backButton.setMinimumSize(new java.awt.Dimension(32, 32)); backButton.setPreferredSize(new java.awt.Dimension(32, 32)); backButton.addActionListener(evt -> { final History history = IGV.getInstance().getSession().getHistory(); history.back(); }); backButton.setEnabled(false); toolPanel.add(backButton, JideBoxLayout.FIX); forwardButton = new JideButton(); //forwardButton.setButtonStyle(JideButton.TOOLBOX_STYLE); //forwardButton.setBorder(toolButtonBorder); forwardButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/right-arrow.gif"))); forwardButton.setToolTipText("Go forward"); forwardButton.setMaximumSize(new java.awt.Dimension(32, 32)); forwardButton.setMinimumSize(new java.awt.Dimension(32, 32)); forwardButton.setPreferredSize(new java.awt.Dimension(32, 32)); forwardButton.addActionListener(evt -> { final History history = IGV.getInstance().getSession().getHistory(); history.forward(); }); forwardButton.setEnabled(false); toolPanel.add(forwardButton, JideBoxLayout.FIX); refreshButton = new com.jidesoft.swing.JideButton(); //refreshButton.setButtonStyle(JideButton.TOOLBOX_STYLE); //refreshButton.setBorder(toolButtonBorder); refreshButton.setAlignmentX(RIGHT_ALIGNMENT); refreshButton.setIcon(new javax.swing.ImageIcon( getClass().getResource("/toolbarButtonGraphics/general/Refresh24.gif"))); // NOI18N refreshButton.setMaximumSize(new java.awt.Dimension(32, 32)); refreshButton.setMinimumSize(new java.awt.Dimension(32, 32)); refreshButton.setPreferredSize(new java.awt.Dimension(32, 32)); refreshButton.setToolTipText("Reload tracks and refresh the screen"); refreshButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { refreshButtonActionPerformed(evt); } }); toolPanel.add(refreshButton, JideBoxLayout.FIX); Icon regionOfInterestIcon = IconFactory.getInstance().getIcon(IconFactory.IconID.REGION_OF_INTEREST); roiToggleButton = new JideToggleButton(regionOfInterestIcon); //roiToggleButton.setButtonStyle(JideButton.TOOLBOX_STYLE); //roiToggleButton.setBorder(toolButtonBorder); roiToggleButton.setAlignmentX(RIGHT_ALIGNMENT); roiToggleButton.setToolTipText("Define a region of interest."); roiToggleButton.setMaximumSize(new java.awt.Dimension(32, 32)); roiToggleButton.setMinimumSize(new java.awt.Dimension(32, 32)); roiToggleButton.setPreferredSize(new java.awt.Dimension(32, 32)); roiToggleButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { roiToggleButtonActionPerformed(evt); } }); toolPanel.add(roiToggleButton, JideBoxLayout.FIX); fitToWindowButton = new JideButton(); //fitToWindowButton.setButtonStyle(JideButton.TOOLBOX_STYLE); //fitToWindowButton.setBorder(toolButtonBorder); fitToWindowButton.setAlignmentX(RIGHT_ALIGNMENT); fitToWindowButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/collapseall.gif"))); fitToWindowButton.setMaximumSize(new java.awt.Dimension(32, 32)); fitToWindowButton.setMinimumSize(new java.awt.Dimension(32, 32)); fitToWindowButton.setPreferredSize(new java.awt.Dimension(32, 32)); fitToWindowButton.setToolTipText("Resize tracks to fit in window."); fitToWindowButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { (new FitDataToWindowMenuAction(null, 0, IGV.getInstance())).actionPerformed(evt); } }); toolPanel.add(fitToWindowButton, JideBoxLayout.FIX); final Icon noTooltipIcon = IconFactory.getInstance().getIcon(IconFactory.IconID.NO_TOOLTIP); final Icon tooltipIcon = IconFactory.getInstance().getIcon(IconFactory.IconID.TOOLTIP); detailsBehaviorButton = new JideButton(noTooltipIcon); //detailsBehaviorButton.setButtonStyle(JideButton.TOOLBOX_STYLE); //detailsBehaviorButton.setBorder(toolButtonBorder); detailsBehaviorButton.setAlignmentX(RIGHT_ALIGNMENT); detailsBehaviorButton.setToolTipText(MODIFY_DETAILS_TOOLTIP); detailsBehaviorButton.setMaximumSize(new java.awt.Dimension(32, 32)); detailsBehaviorButton.setMinimumSize(new java.awt.Dimension(32, 32)); detailsBehaviorButton.setPreferredSize(new java.awt.Dimension(32, 32)); toolPanel.add(detailsBehaviorButton, JideBoxLayout.FIX); rulerLineButton = new JideToggleButton(); //roiToggleButton.setButtonStyle(JideButton.TOOLBOX_STYLE); //roiToggleButton.setBorder(toolButtonBorder); rulerLineButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/vertical-line.gif"))); rulerLineButton.setAlignmentX(RIGHT_ALIGNMENT); rulerLineButton.setToolTipText("Enable ruler line in data panels"); rulerLineButton.setMaximumSize(new java.awt.Dimension(32, 32)); rulerLineButton.setMinimumSize(new java.awt.Dimension(32, 32)); rulerLineButton.setPreferredSize(new java.awt.Dimension(32, 32)); rulerLineButton.addActionListener(evt -> { IGV.getInstance().setRulerEnabled(rulerLineButton.isSelected()); IGV.getInstance().repaint(); }); toolPanel.add(rulerLineButton, JideBoxLayout.FIX); this.add(toolPanel); this.add(Box.createHorizontalGlue(), JideBoxLayout.VARY); zoomControl = new ZoomSliderPanel(); // zoomControl.setAlignmentX(RIGHT_ALIGNMENT); Dimension dimSize = new Dimension(200, 30); zoomControl.setPreferredSize(dimSize); zoomControl.setMinimumSize(dimSize); zoomControl.setMaximumSize(dimSize); zoomControl.setToolTipText("Click + to zoom in, - to zoom out"); zoomControl.setOpaque(false); this.add(zoomControl, JideBoxLayout.FIX); this.add(Box.createHorizontalStrut(20), JideBoxLayout.FIX); } }
package org.cytoscape.hybrid.internal.ui; import static org.cytoscape.hybrid.internal.ui.UiTheme.DEF_FONT; import java.awt.Color; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import javax.swing.BorderFactory; import javax.swing.JEditorPane; import javax.swing.border.Border; import org.cytoscape.hybrid.events.InterAppMessage; import org.cytoscape.hybrid.internal.ws.ExternalAppManager; import org.cytoscape.hybrid.internal.ws.WSClient; import com.fasterxml.jackson.databind.ObjectMapper; public class SearchPane extends JEditorPane implements PropertyChangeListener { private static final long serialVersionUID = -6181889059129429760L; protected static final String ACTION_CLEAR = "clear"; protected static final String ACTION_SEARCH = "search"; private static final String PLACEHOLDER = "Enter search terms here..."; // WS Client private final WSClient client; private final ObjectMapper mapper; // States private Boolean searchBoxClicked = false; // For child process management private final ExternalAppManager pm; private final String command; public SearchPane(final WSClient client, final ExternalAppManager pm, String command) { this.mapper = new ObjectMapper(); this.client = client; this.pm = pm; this.command = command; setFont(DEF_FONT); setBackground(Color.decode("#FFFFFF")); setForeground(Color.decode("#555555")); final Border paddingBorder3 = BorderFactory.createEmptyBorder(7, 7, 7, 7); setBorder(paddingBorder3); setText(PLACEHOLDER); addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { if(!isEnabled()) { MessageUtil.reauestExternalAppFocus(client); return; } if (!searchBoxClicked) { setText(""); searchBoxClicked = true; } } }); } private final void search(String query) throws Exception { // Use empty String if default text is used. if(query.equals(PLACEHOLDER)) { query = ""; } pm.setQuery(query); final String dest = "ws://localhost:8025/ws/echo"; client.start(dest); if (pm.isActive()) { final InterAppMessage focus = InterAppMessage.create() .setFrom(InterAppMessage.FROM_CY3) .setType(InterAppMessage.TYPE_FOCUS); this.client.getSocket().sendMessage(mapper.writeValueAsString(focus)); return; } final ExecutorService executor = Executors.newSingleThreadExecutor(); executor.submit(() -> { try { // Set application type: this.client.getSocket().setApplication("ndex"); pm.setProcess(Runtime.getRuntime().exec(command)); } catch (Exception e) { e.printStackTrace(); } }); } @Override public void propertyChange(PropertyChangeEvent evt) { if (evt.getPropertyName().equals(ACTION_CLEAR)) { this.setText(""); } else if (evt.getPropertyName().equals(ACTION_SEARCH)){ setEnabled(false); try { search(getText()); } catch (Exception e) { e.printStackTrace(); } } } }
package org.embulk.input.zendesk; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import org.embulk.config.Config; import org.embulk.config.ConfigDefault; import org.embulk.config.ConfigDiff; import org.embulk.config.ConfigException; import org.embulk.config.ConfigSource; import org.embulk.config.Task; import org.embulk.config.TaskReport; import org.embulk.config.TaskSource; import org.embulk.exec.GuessExecutor; import org.embulk.input.zendesk.models.AuthenticationMethod; import org.embulk.input.zendesk.models.Target; import org.embulk.input.zendesk.services.ZendeskCustomObjectService; import org.embulk.input.zendesk.services.ZendeskNPSService; import org.embulk.input.zendesk.services.ZendeskService; import org.embulk.input.zendesk.services.ZendeskSupportAPIService; import org.embulk.input.zendesk.services.ZendeskUserEventService; import org.embulk.input.zendesk.utils.ZendeskConstants; import org.embulk.input.zendesk.utils.ZendeskDateUtils; import org.embulk.input.zendesk.utils.ZendeskUtils; import org.embulk.spi.Buffer; import org.embulk.spi.Exec; import org.embulk.spi.InputPlugin; import org.embulk.spi.PageBuilder; import org.embulk.spi.PageOutput; import org.embulk.spi.Schema; import org.embulk.spi.SchemaConfig; import org.embulk.spi.type.Types; import org.slf4j.Logger; import javax.validation.constraints.Max; import javax.validation.constraints.Min; import java.nio.charset.StandardCharsets; import java.time.Instant; import java.time.OffsetDateTime; import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Optional; import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.StreamSupport; public class ZendeskInputPlugin implements InputPlugin { public interface PluginTask extends Task { @Config("login_url") String getLoginUrl(); @Config("auth_method") @ConfigDefault("\"basic\"") AuthenticationMethod getAuthenticationMethod(); @Config("target") Target getTarget(); @Config("username") @ConfigDefault("null") Optional<String> getUsername(); @Config("password") @ConfigDefault("null") Optional<String> getPassword(); @Config("token") @ConfigDefault("null") Optional<String> getToken(); @Config("access_token") @ConfigDefault("null") Optional<String> getAccessToken(); @Config("start_time") @ConfigDefault("null") Optional<String> getStartTime(); @Min(1) @Max(30) @Config("retry_limit") @ConfigDefault("5") int getRetryLimit(); @Min(1) @Max(3600) @Config("retry_initial_wait_sec") @ConfigDefault("4") int getRetryInitialWaitSec(); @Min(30) @Max(3600) @Config("max_retry_wait_sec") @ConfigDefault("60") int getMaxRetryWaitSec(); @Config("incremental") @ConfigDefault("true") boolean getIncremental(); @Config("includes") @ConfigDefault("[]") List<String> getIncludes(); @Config("dedup") @ConfigDefault("true") boolean getDedup(); @Config("app_marketplace_integration_name") @ConfigDefault("null") Optional<String> getAppMarketPlaceIntegrationName(); @Config("app_marketplace_org_id") @ConfigDefault("null") Optional<String> getAppMarketPlaceOrgId(); @Config("app_marketplace_app_id") @ConfigDefault("null") Optional<String> getAppMarketPlaceAppId(); @Config("object_types") @ConfigDefault("[]") List<String> getObjectTypes(); @Config("relationship_types") @ConfigDefault("[]") List<String> getRelationshipTypes(); @Config("profile_source") @ConfigDefault("null") Optional<String> getProfileSource(); @Config("end_time") @ConfigDefault("null") Optional<String> getEndTime(); @Config("user_event_type") @ConfigDefault("null") Optional<String> getUserEventType(); @Config("user_event_source") @ConfigDefault("null") Optional<String> getUserEventSource(); @Config("columns") SchemaConfig getColumns(); } private ZendeskService zendeskService; private RecordImporter recordImporter; private static final Logger logger = Exec.getLogger(ZendeskInputPlugin.class); @Override public ConfigDiff transaction(final ConfigSource config, final Control control) { final PluginTask task = config.loadConfig(PluginTask.class); validateInputTask(task); final Schema schema = task.getColumns().toSchema(); int taskCount = 1; // For non-incremental target, we will split records based on number of pages. 100 records per page // In preview, run with taskCount = 1 if (!Exec.isPreview() && !getZendeskService(task).isSupportIncremental() && getZendeskService(task) instanceof ZendeskSupportAPIService) { final JsonNode result = getZendeskService(task).getDataFromPath("", 0, false, 0); if (result.has(ZendeskConstants.Field.COUNT) && !result.get(ZendeskConstants.Field.COUNT).isNull() && result.get(ZendeskConstants.Field.COUNT).isInt()) { taskCount = ZendeskUtils.numberToSplitWithHintingInTask(result.get(ZendeskConstants.Field.COUNT).asInt()); } } return resume(task.dump(), schema, taskCount, control); } @Override public ConfigDiff resume(final TaskSource taskSource, final Schema schema, final int taskCount, final Control control) { final PluginTask task = taskSource.loadTask(PluginTask.class); final List<TaskReport> taskReports = control.run(taskSource, schema, taskCount); return this.buildConfigDiff(task, taskReports); } @Override public void cleanup(final TaskSource taskSource, final Schema schema, final int taskCount, final List<TaskReport> successTaskReports) { } @Override public TaskReport run(final TaskSource taskSource, final Schema schema, final int taskIndex, final PageOutput output) { final PluginTask task = taskSource.loadTask(PluginTask.class); if (getZendeskService(task).isSupportIncremental() && !isValidTimeRange(task)) { if (Exec.isPreview()) { throw new ConfigException("Invalid End time. End time is greater than current time"); } logger.warn("The end time, '" + task.getEndTime().get() + "', is greater than the current time. No records will be imported"); // we just need to store config_diff when incremental_mode is enable if (task.getIncremental()) { return buildTaskReportKeepOldStartAndEndTime(task); } return Exec.newTaskReport(); } try (final PageBuilder pageBuilder = getPageBuilder(schema, output)) { final TaskReport taskReport = getZendeskService(task).addRecordToImporter(taskIndex, getRecordImporter(schema, pageBuilder)); pageBuilder.finish(); return taskReport; } } @Override public ConfigDiff guess(final ConfigSource config) { config.set("columns", new ObjectMapper().createArrayNode()); final PluginTask task = config.loadConfig(PluginTask.class); validateInputTask(task); if (!isValidTimeRange(task)) { throw new ConfigException("Invalid End time. End time is greater than current time"); } return Exec.newConfigDiff().set("columns", buildColumns(task)); } @VisibleForTesting protected PageBuilder getPageBuilder(final Schema schema, final PageOutput output) { return new PageBuilder(Exec.getBufferAllocator(), schema, output); } private ConfigDiff buildConfigDiff(final PluginTask task, final List<TaskReport> taskReports) { final ConfigDiff configDiff = Exec.newConfigDiff(); if (!taskReports.isEmpty() && task.getIncremental()) { final TaskReport taskReport = taskReports.get(0); if (taskReport.has(ZendeskConstants.Field.START_TIME)) { final OffsetDateTime offsetDateTime = OffsetDateTime.ofInstant(Instant.ofEpochSecond( taskReport.get(JsonNode.class, ZendeskConstants.Field.START_TIME).asLong()), ZoneOffset.UTC); configDiff.set(ZendeskConstants.Field.START_TIME, offsetDateTime.format(DateTimeFormatter.ofPattern(ZendeskConstants.Misc.RUBY_TIMESTAMP_FORMAT_INPUT))); } if (taskReport.has(ZendeskConstants.Field.END_TIME)) { final OffsetDateTime offsetDateTime = OffsetDateTime.ofInstant(Instant.ofEpochSecond( taskReport.get(JsonNode.class, ZendeskConstants.Field.END_TIME).asLong()), ZoneOffset.UTC); configDiff.set(ZendeskConstants.Field.END_TIME, offsetDateTime.format(DateTimeFormatter.ofPattern(ZendeskConstants.Misc.RUBY_TIMESTAMP_FORMAT_INPUT))); } } return configDiff; } private JsonNode buildColumns(final PluginTask task) { JsonNode jsonNode = getZendeskService(task).getDataFromPath("", 0, true, 0); String targetName = task.getTarget().getJsonName(); if (jsonNode.has(targetName) && !jsonNode.get(targetName).isNull() && jsonNode.get(targetName).isArray() && jsonNode.get(targetName).size() > 0) { return addAllColumnsToSchema(jsonNode, task.getTarget(), task.getIncludes()); } throw new ConfigException("Could not guess schema due to empty data set"); } private final Pattern idPattern = Pattern.compile(ZendeskConstants.Regex.ID); private JsonNode addAllColumnsToSchema(final JsonNode jsonNode, final Target target, final List<String> includes) { final JsonNode sample = new ObjectMapper().valueToTree(StreamSupport.stream( jsonNode.get(target.getJsonName()).spliterator(), false).limit(10).collect(Collectors.toList())); final Buffer bufferSample = Buffer.copyOf(sample.toString().getBytes(StandardCharsets.UTF_8)); final JsonNode columns = Exec.getInjector().getInstance(GuessExecutor.class) .guessParserConfig(bufferSample, Exec.newConfigSource(), createGuessConfig()) .getObjectNode().get("columns"); final Iterator<JsonNode> ite = columns.elements(); while (ite.hasNext()) { final ObjectNode entry = (ObjectNode) ite.next(); final String name = entry.get("name").asText(); final String type = entry.get("type").asText(); if (type.equals(Types.TIMESTAMP.getName())) { entry.put("format", ZendeskConstants.Misc.RUBY_TIMESTAMP_FORMAT); } if (name.equals("id")) { if (!type.equals(Types.LONG.getName())) { if (type.equals(Types.TIMESTAMP.getName())) { entry.remove("format"); } entry.put("type", Types.LONG.getName()); } // Id of User Events target is more suitable for String if (target.equals(Target.USER_EVENTS)) { entry.put("type", Types.STRING.getName()); } } else if (idPattern.matcher(name).find()) { if (type.equals(Types.TIMESTAMP.getName())) { entry.remove("format"); } entry.put("type", Types.STRING.getName()); } } addIncludedObjectsToSchema((ArrayNode) columns, includes); return columns; } private void addIncludedObjectsToSchema(final ArrayNode arrayNode, final List<String> includes) { final ObjectMapper mapper = new ObjectMapper(); includes.stream() .map((include) -> mapper.createObjectNode() .put("name", include) .put("type", Types.JSON.getName())) .forEach(arrayNode::add); } private ConfigSource createGuessConfig() { return Exec.newConfigSource() .set("guess_plugins", ImmutableList.of("zendesk")) .set("guess_sample_buffer_bytes", ZendeskConstants.Misc.GUESS_BUFFER_SIZE); } private ZendeskService getZendeskService(PluginTask task) { if (zendeskService == null) { zendeskService = dispatchPerTarget(task); } return zendeskService; } @VisibleForTesting protected ZendeskService dispatchPerTarget(ZendeskInputPlugin.PluginTask task) { switch (task.getTarget()) { case TICKETS: case USERS: case ORGANIZATIONS: case TICKET_METRICS: case TICKET_EVENTS: case TICKET_FORMS: case TICKET_FIELDS: return new ZendeskSupportAPIService(task); case RECIPIENTS: case SCORES: return new ZendeskNPSService(task); case OBJECT_RECORDS: case RELATIONSHIP_RECORDS: return new ZendeskCustomObjectService(task); case USER_EVENTS: return new ZendeskUserEventService(task); default: throw new ConfigException("Unsupported " + task.getTarget() + ", supported values: '" + Arrays.toString(Target.values()) + "'"); } } private RecordImporter getRecordImporter(Schema schema, PageBuilder pageBuilder) { if (recordImporter == null) { recordImporter = new RecordImporter(schema, pageBuilder); } return recordImporter; } private void validateInputTask(PluginTask task) { validateAppMarketPlace(task.getAppMarketPlaceIntegrationName().isPresent(), task.getAppMarketPlaceAppId().isPresent(), task.getAppMarketPlaceOrgId().isPresent()); validateCredentials(task); validateIncremental(task); validateCustomObject(task); validateUserEvent(task); validateTime(task); } private void validateCredentials(PluginTask task) { switch (task.getAuthenticationMethod()) { case OAUTH: if (!task.getAccessToken().isPresent()) { throw new ConfigException(String.format("access_token is required for authentication method '%s'", task.getAuthenticationMethod().name().toLowerCase())); } break; case TOKEN: if (!task.getUsername().isPresent() || !task.getToken().isPresent()) { throw new ConfigException(String.format("username and token are required for authentication method '%s'", task.getAuthenticationMethod().name().toLowerCase())); } break; case BASIC: if (!task.getUsername().isPresent() || !task.getPassword().isPresent()) { throw new ConfigException(String.format("username and password are required for authentication method '%s'", task.getAuthenticationMethod().name().toLowerCase())); } break; default: throw new ConfigException("Unknown authentication method"); } } private void validateAppMarketPlace(final boolean isAppMarketIntegrationNamePresent, final boolean isAppMarketAppIdPresent, final boolean isAppMarketOrgIdPresent) { final boolean isAllAvailable = isAppMarketIntegrationNamePresent && isAppMarketAppIdPresent && isAppMarketOrgIdPresent; final boolean isAllUnAvailable = !isAppMarketIntegrationNamePresent && !isAppMarketAppIdPresent && !isAppMarketOrgIdPresent; // All or nothing needed if (!(isAllAvailable || isAllUnAvailable)) { throw new ConfigException("All of app_marketplace_integration_name, app_marketplace_org_id, " + "app_marketplace_app_id " + "are required to fill out for Apps Marketplace API header"); } } private void validateIncremental(PluginTask task) { if (task.getIncremental() && getZendeskService(task).isSupportIncremental()) { if (!task.getDedup()) { logger.warn("You've selected to skip de-duplicating records, result may contain duplicated data"); } if (!getZendeskService(task).isSupportIncremental() && task.getStartTime().isPresent()) { logger.warn(String.format("Target: '%s' doesn't support incremental export API. Will be ignored start_time option", task.getTarget())); } } } private void validateCustomObject(PluginTask task) { if (task.getTarget().equals(Target.OBJECT_RECORDS) && task.getObjectTypes().isEmpty()) { throw new ConfigException("Should have at least one Object Type"); } if (task.getTarget().equals(Target.RELATIONSHIP_RECORDS) && task.getRelationshipTypes().isEmpty()) { throw new ConfigException("Should have at least one Relationship Type"); } } private void validateUserEvent(PluginTask task) { if (task.getTarget().equals(Target.USER_EVENTS)) { if (!task.getProfileSource().isPresent()) { throw new ConfigException("Profile Source is required for User Event Target"); } } } private void validateTime(PluginTask task) { if (getZendeskService(task).isSupportIncremental()) { // Can't set end_time to 0, so it should be valid task.getEndTime().ifPresent(time -> { if (!ZendeskDateUtils.supportedTimeFormat(task.getEndTime().get()).isPresent()) { throw new ConfigException("End Time should follow these format " + ZendeskConstants.Misc.SUPPORT_DATE_TIME_FORMAT.toString()); } }); if (task.getStartTime().isPresent() && task.getEndTime().isPresent() && ZendeskDateUtils.getStartTime(task.getStartTime().get()) > ZendeskDateUtils.isoToEpochSecond(task.getEndTime().get())) { throw new ConfigException("End Time should be later or equal than Start Time"); } } } private boolean isValidTimeRange(PluginTask task) { return !task.getEndTime().isPresent() || ZendeskDateUtils.isoToEpochSecond(task.getEndTime().get()) <= Instant.now().getEpochSecond(); } private TaskReport buildTaskReportKeepOldStartAndEndTime(PluginTask task) { final TaskReport taskReport = Exec.newTaskReport(); if (task.getStartTime().isPresent()) { taskReport.set(ZendeskConstants.Field.START_TIME, ZendeskDateUtils.isoToEpochSecond(task.getStartTime().get())); } if (task.getEndTime().isPresent()) { taskReport.set(ZendeskConstants.Field.END_TIME, ZendeskDateUtils.isoToEpochSecond(task.getEndTime().get())); } return taskReport; } }
package org.irmacard.api.common; import org.irmacard.credentials.info.IssuerIdentifier; import java.math.BigInteger; import java.util.HashMap; public class JwtSessionRequest { private String jwt; private BigInteger nonce; private BigInteger context; private HashMap<IssuerIdentifier, Integer> keys; public JwtSessionRequest(String jwt, BigInteger nonce, BigInteger context) { this.jwt = jwt; this.nonce = nonce; this.context = context; } public JwtSessionRequest(String jwt, BigInteger nonce, BigInteger context, HashMap<IssuerIdentifier, Integer> keys) { this(jwt, nonce, context); this.keys = keys; } public String getJwt() { return jwt; } public BigInteger getNonce() { return nonce; } public BigInteger getContext() { return context; } public HashMap<IssuerIdentifier, Integer> getPublicKeys() { return keys; } }
package org.imirsel.nema.flowservice; import java.util.logging.Logger; import org.springframework.beans.BeansException; import org.springframework.context.support.ClassPathXmlApplicationContext; /** * Main class for the NEMA Flow Service. * * @author shirk * @since 0.4.0 */ public class FlowServiceApp { private static final Logger logger = Logger.getLogger(FlowServiceApp.class .getName()); /** * Main method for executing the application. * * @param args Arguments for the application. */ public static void main(String[] args) { String title = FlowServiceApp.class.getPackage().getImplementationTitle(); String version = FlowServiceApp.class.getPackage() .getImplementationVersion(); if (!(title == null || version == null)) { logger.config("Starting: " + title + " Implementation Version: " + version); } try { try { java.rmi.registry.LocateRegistry.createRegistry(1099); logger.config("RMI registry ready."); } catch (Exception e) { logger.config("Exception starting RMI registry:"); e.printStackTrace(); } new ClassPathXmlApplicationContext("applicationContext.xml"); logger.info("NEMA Flow Service successfully started..."); } catch (BeansException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } } }
package org.java_websocket.server; import java.io.IOException; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.nio.channels.ByteChannel; import java.nio.channels.CancelledKeyException; import java.nio.channels.ClosedByInterruptException; import java.nio.channels.SelectableChannel; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Set; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CopyOnWriteArraySet; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.java_websocket.SocketChannelIOHelper; import org.java_websocket.WebSocket; import org.java_websocket.WebSocketAdapter; import org.java_websocket.WebSocketFactory; import org.java_websocket.WebSocketImpl; import org.java_websocket.WrappedByteChannel; import org.java_websocket.drafts.Draft; import org.java_websocket.exceptions.InvalidDataException; import org.java_websocket.framing.CloseFrame; import org.java_websocket.framing.Framedata; import org.java_websocket.handshake.ClientHandshake; import org.java_websocket.handshake.Handshakedata; import org.java_websocket.handshake.ServerHandshakeBuilder; /** * <tt>WebSocketServer</tt> is an abstract class that only takes care of the * HTTP handshake portion of WebSockets. It's up to a subclass to add * functionality/purpose to the server. * */ public abstract class WebSocketServer extends WebSocketAdapter implements Runnable { public static int DECODERS = Runtime.getRuntime().availableProcessors(); /** * Holds the list of active WebSocket connections. "Active" means WebSocket * handshake is complete and socket can be written to, or read from. */ private final Collection<WebSocket> connections; /** * The port number that this WebSocket server should listen on. Default is * WebSocket.DEFAULT_PORT. */ private final InetSocketAddress address; /** * The socket channel for this WebSocket server. */ private ServerSocketChannel server; /** * The 'Selector' used to get event keys from the underlying socket. */ private Selector selector; /** * The Draft of the WebSocket protocol the Server is adhering to. */ private List<Draft> drafts; private Thread selectorthread; private volatile AtomicBoolean isclosed = new AtomicBoolean( false ); private List<WebSocketWorker> decoders; private List<WebSocketImpl> iqueue; private BlockingQueue<ByteBuffer> buffers; private int queueinvokes = 0; private AtomicInteger queuesize = new AtomicInteger( 0 ); private WebSocketServerFactory wsf = new DefaultWebSocketServerFactory(); /** * Creates a WebSocketServer that will attempt to * listen on port <var>WebSocket.DEFAULT_PORT</var>. * * @see #WebSocketServer(InetSocketAddress, int, List, Collection) more details here */ public WebSocketServer() throws UnknownHostException { this( new InetSocketAddress( WebSocket.DEFAULT_PORT ), DECODERS, null ); } /** * Creates a WebSocketServer that will attempt to bind/listen on the given <var>address</var>. * * @see #WebSocketServer(InetSocketAddress, int, List, Collection) more details here */ public WebSocketServer( InetSocketAddress address ) { this( address, DECODERS, null ); } /** * @see #WebSocketServer(InetSocketAddress, int, List, Collection) more details here */ public WebSocketServer( InetSocketAddress address , int decoders ) { this( address, decoders, null ); } /** * @see #WebSocketServer(InetSocketAddress, int, List, Collection) more details here */ public WebSocketServer( InetSocketAddress address , List<Draft> drafts ) { this( address, DECODERS, drafts ); } /** * @see #WebSocketServer(InetSocketAddress, int, List, Collection) more details here */ public WebSocketServer( InetSocketAddress address , int decodercount , List<Draft> drafts ) { this( address, decodercount, drafts, new HashSet<WebSocket>() ); } public WebSocketServer( InetSocketAddress address , int decodercount , List<Draft> drafts , Collection<WebSocket> connectionscontainer ) { if( address == null || decodercount < 1 || connectionscontainer == null ) { throw new IllegalArgumentException( "address and connectionscontainer must not be null and you need at least 1 decoder" ); } if( drafts == null ) this.drafts = Collections.emptyList(); else this.drafts = drafts; this.address = address; this.connections = connectionscontainer; iqueue = new LinkedList<WebSocketImpl>(); decoders = new ArrayList<WebSocketWorker>( decodercount ); buffers = new LinkedBlockingQueue<ByteBuffer>(); for( int i = 0 ; i < decodercount ; i++ ) { WebSocketWorker ex = new WebSocketWorker(); decoders.add( ex ); ex.start(); } } public void start() { if( selectorthread != null ) throw new IllegalStateException( getClass().getName() + " can only be started once." ); new Thread( this ).start(); } /** * Closes all connected clients sockets, then closes the underlying * ServerSocketChannel, effectively killing the server socket selectorthread, * freeing the port the server was bound to and stops all internal workerthreads. * * If this method is called before the server is started it will never start. * * @param timeout * Specifies how many milliseconds the overall close handshaking may take altogether before the connections are closed without proper close handshaking.<br> * * @throws IOException * When {@link ServerSocketChannel}.close throws an IOException * @throws InterruptedException */ public void stop( int timeout ) throws InterruptedException { if( !isclosed.compareAndSet( false, true ) ) { // this also makes sure that no further connections will be added to this.connections return; } List<WebSocket> socketsToClose = null; // copy the connections in a list (prevent callback deadlocks) synchronized ( connections ) { socketsToClose = new ArrayList<WebSocket>( connections ); } for( WebSocket ws : socketsToClose ) { ws.close( CloseFrame.GOING_AWAY ); } synchronized ( this ) { if( selectorthread != null ) { if( Thread.currentThread() != selectorthread ) { } if( selectorthread != Thread.currentThread() ) { if( socketsToClose.size() > 0 ) selectorthread.join( timeout );// isclosed will tell the selectorthread to go down after the last connection was closed selectorthread.interrupt();// in case the selectorthread did not terminate in time we send the interrupt selectorthread.join(); } } } } public void stop() throws IOException , InterruptedException { stop( 0 ); } /** * Returns a WebSocket[] of currently connected clients. * Its iterators will be failfast and its not judicious * to modify it. * * @return The currently connected clients. */ public Collection<WebSocket> connections() { return this.connections; } public InetSocketAddress getAddress() { return this.address; } /** * Gets the port number that this server listens on. * * @return The port number. */ public int getPort() { int port = getAddress().getPort(); if( port == 0 && server != null ) { port = server.socket().getLocalPort(); } return port; } public List<Draft> getDraft() { return Collections.unmodifiableList( drafts ); } // Runnable IMPLEMENTATION ///////////////////////////////////////////////// public void run() { synchronized ( this ) { if( selectorthread != null ) throw new IllegalStateException( getClass().getName() + " can only be started once." ); selectorthread = Thread.currentThread(); if( isclosed.get() ) { return; } } selectorthread.setName( "WebsocketSelector" + selectorthread.getId() ); try { server = ServerSocketChannel.open(); server.configureBlocking( false ); ServerSocket socket = server.socket(); socket.setReceiveBufferSize( WebSocketImpl.RCVBUF ); socket.bind( address ); selector = Selector.open(); server.register( selector, server.validOps() ); } catch ( IOException ex ) { handleFatal( null, ex ); return; } try { while ( !selectorthread.isInterrupted() ) { SelectionKey key = null; WebSocketImpl conn = null; try { selector.select(); Set<SelectionKey> keys = selector.selectedKeys(); Iterator<SelectionKey> i = keys.iterator(); while ( i.hasNext() ) { key = i.next(); if( !key.isValid() ) { // Object o = key.attachment(); continue; } if( key.isAcceptable() ) { if( !onConnect( key ) ) { key.cancel(); continue; } SocketChannel channel = server.accept(); channel.configureBlocking( false ); WebSocketImpl w = wsf.createWebSocket( this, drafts, channel.socket() ); w.key = channel.register( selector, SelectionKey.OP_READ, w ); w.channel = wsf.wrapChannel( channel, w.key ); i.remove(); allocateBuffers( w ); continue; } if( key.isReadable() ) { conn = (WebSocketImpl) key.attachment(); ByteBuffer buf = takeBuffer(); try { if( SocketChannelIOHelper.read( buf, conn, conn.channel ) ) { if( buf.hasRemaining() ) { conn.inQueue.put( buf ); queue( conn ); i.remove(); if( conn.channel instanceof WrappedByteChannel ) { if( ( (WrappedByteChannel) conn.channel ).isNeedRead() ) { iqueue.add( conn ); } } } else pushBuffer( buf ); } else { pushBuffer( buf ); } } catch ( IOException e ) { pushBuffer( buf ); throw e; } } if( key.isWritable() ) { conn = (WebSocketImpl) key.attachment(); if( SocketChannelIOHelper.batch( conn, conn.channel ) ) { if( key.isValid() ) key.interestOps( SelectionKey.OP_READ ); } } } while ( !iqueue.isEmpty() ) { conn = iqueue.remove( 0 ); WrappedByteChannel c = ( (WrappedByteChannel) conn.channel ); ByteBuffer buf = takeBuffer(); try { if( SocketChannelIOHelper.readMore( buf, conn, c ) ) iqueue.add( conn ); if( buf.hasRemaining() ) { conn.inQueue.put( buf ); queue( conn ); } else { pushBuffer( buf ); } } catch ( IOException e ) { pushBuffer( buf ); throw e; } } } catch ( CancelledKeyException e ) { // an other thread may cancel the key } catch ( ClosedByInterruptException e ) { return; // do the same stuff as when InterruptedException is thrown } catch ( IOException ex ) { if( key != null ) key.cancel(); handleIOException( key, conn, ex ); } catch ( InterruptedException e ) { return;// FIXME controlled shutdown (e.g. take care of buffermanagement) } } } catch ( RuntimeException e ) { // should hopefully never occur handleFatal( null, e ); } finally { if( decoders != null ) { for( WebSocketWorker w : decoders ) { w.interrupt(); } } if( server != null ) { try { server.close(); } catch ( IOException e ) { onError( null, e ); } } } } protected void allocateBuffers( WebSocket c ) throws InterruptedException { if( queuesize.get() >= 2 * decoders.size() + 1 ) { return; } queuesize.incrementAndGet(); buffers.put( createBuffer() ); } protected void releaseBuffers( WebSocket c ) throws InterruptedException { // queuesize.decrementAndGet(); // takeBuffer(); } public ByteBuffer createBuffer() { return ByteBuffer.allocate( WebSocketImpl.RCVBUF ); } private void queue( WebSocketImpl ws ) throws InterruptedException { if( ws.workerThread == null ) { ws.workerThread = decoders.get( queueinvokes % decoders.size() ); queueinvokes++; } ws.workerThread.put( ws ); } private ByteBuffer takeBuffer() throws InterruptedException { return buffers.take(); } private void pushBuffer( ByteBuffer buf ) throws InterruptedException { if( buffers.size() > queuesize.intValue() ) return; buffers.put( buf ); } private void handleIOException( SelectionKey key, WebSocket conn, IOException ex ) { // onWebsocketError( conn, ex );// conn may be null here if( conn != null ) { conn.closeConnection( CloseFrame.ABNORMAL_CLOSE, ex.getMessage() ); } else if( key != null ) { SelectableChannel channel = key.channel(); if( channel != null && channel.isOpen() ) { // this could be the case if the IOException ex is a SSLException try { channel.close(); } catch ( IOException e ) { // there is nothing that must be done here } if( WebSocketImpl.DEBUG ) System.out.println( "Connection closed because of" + ex ); } } } private void handleFatal( WebSocket conn, Exception e ) { onError( conn, e ); try { stop(); } catch ( IOException e1 ) { onError( null, e1 ); } catch ( InterruptedException e1 ) { Thread.currentThread().interrupt(); onError( null, e1 ); } } protected String getFlashSecurityPolicy() { return "<cross-domain-policy><allow-access-from domain=\"*\" to-ports=\"" + getPort() + "\" /></cross-domain-policy>"; } @Override public final void onWebsocketMessage( WebSocket conn, String message ) { onMessage( conn, message ); } @Override @Deprecated public/*final*/void onWebsocketMessageFragment( WebSocket conn, Framedata frame ) {// onFragment should be overloaded instead onFragment( conn, frame ); } @Override public final void onWebsocketMessage( WebSocket conn, ByteBuffer blob ) { onMessage( conn, blob ); } @Override public final void onWebsocketOpen( WebSocket conn, Handshakedata handshake ) { if( addConnection( conn ) ) { onOpen( conn, (ClientHandshake) handshake ); } } @Override public final void onWebsocketClose( WebSocket conn, int code, String reason, boolean remote ) { selector.wakeup(); try { if( removeConnection( conn ) ) { onClose( conn, code, reason, remote ); } } finally { try { releaseBuffers( conn ); } catch ( InterruptedException e ) { Thread.currentThread().interrupt(); } } } /** * This method performs remove operations on the connection and therefore also gives control over whether the operation shall be synchronized * <p> * {@link #WebSocketServer(InetSocketAddress, int, List, Collection)} allows to specify a collection which will be used to store current connections in.<br> * Depending on the type on the connection, modifications of that collection may have to be synchronized. **/ protected boolean removeConnection( WebSocket ws ) { boolean removed; synchronized ( connections ) { removed = this.connections.remove( ws ); assert ( removed ); } if( isclosed.get() && connections.size() == 0 ) { selectorthread.interrupt(); } return removed; } @Override public ServerHandshakeBuilder onWebsocketHandshakeReceivedAsServer( WebSocket conn, Draft draft, ClientHandshake request ) throws InvalidDataException { return super.onWebsocketHandshakeReceivedAsServer( conn, draft, request ); } /** @see #removeConnection(WebSocket) */ protected boolean addConnection( WebSocket ws ) { if( !isclosed.get() ) { synchronized ( connections ) { boolean succ = this.connections.add( ws ); assert ( succ ); return succ; } } else { // This case will happen when a new connection gets ready while the server is already stopping. ws.close( CloseFrame.GOING_AWAY ); return true;// for consistency sake we will make sure that both onOpen will be called } } /** * @param conn * may be null if the error does not belong to a single connection */ @Override public final void onWebsocketError( WebSocket conn, Exception ex ) { onError( conn, ex ); } @Override public final void onWriteDemand( WebSocket w ) { WebSocketImpl conn = (WebSocketImpl) w; try { conn.key.interestOps( SelectionKey.OP_READ | SelectionKey.OP_WRITE ); } catch ( CancelledKeyException e ) { // the thread which cancels key is responsible for possible cleanup conn.outQueue.clear(); } selector.wakeup(); } @Override public void onWebsocketCloseInitiated( WebSocket conn, int code, String reason ) { onCloseInitiated( conn, code, reason ); } @Override public void onWebsocketClosing( WebSocket conn, int code, String reason, boolean remote ) { onClosing( conn, code, reason, remote ); } public void onCloseInitiated( WebSocket conn, int code, String reason ) { } public void onClosing( WebSocket conn, int code, String reason, boolean remote ) { } public final void setWebSocketFactory( WebSocketServerFactory wsf ) { this.wsf = wsf; } public final WebSocketFactory getWebSocketFactory() { return wsf; } /** * Returns whether a new connection shall be accepted or not.<br> * Therefore method is well suited to implement some kind of connection limitation.<br> * * @see {@link #onOpen(WebSocket, ClientHandshake)}, {@link #onWebsocketHandshakeReceivedAsServer(WebSocket, Draft, ClientHandshake)} **/ protected boolean onConnect( SelectionKey key ) { return true; } private Socket getSocket( WebSocket conn ) { WebSocketImpl impl = (WebSocketImpl) conn; return ( (SocketChannel) impl.key.channel() ).socket(); } @Override public InetSocketAddress getLocalSocketAddress( WebSocket conn ) { return (InetSocketAddress) getSocket( conn ).getLocalSocketAddress(); } @Override public InetSocketAddress getRemoteSocketAddress( WebSocket conn ) { return (InetSocketAddress) getSocket( conn ).getRemoteSocketAddress(); } /** Called after an opening handshake has been performed and the given websocket is ready to be written on. */ public abstract void onOpen( WebSocket conn, ClientHandshake handshake ); /** * Called after the websocket connection has been closed. * * @param code * The codes can be looked up here: {@link CloseFrame} * @param reason * Additional information string * @param remote * Returns whether or not the closing of the connection was initiated by the remote host. **/ public abstract void onClose( WebSocket conn, int code, String reason, boolean remote ); /** * Callback for string messages received from the remote host * * @see #onMessage(WebSocket, ByteBuffer) **/ public abstract void onMessage( WebSocket conn, String message ); /** * Called when errors occurs. If an error causes the websocket connection to fail {@link #onClose(WebSocket, int, String, boolean)} will be called additionally.<br> * This method will be called primarily because of IO or protocol errors.<br> * If the given exception is an RuntimeException that probably means that you encountered a bug.<br> * * @param con * Can be null if there error does not belong to one specific websocket. For example if the servers port could not be bound. **/ public abstract void onError( WebSocket conn, Exception ex ); /** * Callback for binary messages received from the remote host * * @see #onMessage(WebSocket, String) **/ public void onMessage( WebSocket conn, ByteBuffer message ) { } /** * @see WebSocket#sendFragmentedFrame(org.java_websocket.framing.Framedata.Opcode, ByteBuffer, boolean) */ public void onFragment( WebSocket conn, Framedata fragment ) { } public class WebSocketWorker extends Thread { private BlockingQueue<WebSocketImpl> iqueue; public WebSocketWorker() { iqueue = new LinkedBlockingQueue<WebSocketImpl>(); setName( "WebSocketWorker-" + getId() ); setUncaughtExceptionHandler( new UncaughtExceptionHandler() { @Override public void uncaughtException( Thread t, Throwable e ) { getDefaultUncaughtExceptionHandler().uncaughtException( t, e ); } } ); } public void put( WebSocketImpl ws ) throws InterruptedException { iqueue.put( ws ); } @Override public void run() { WebSocketImpl ws = null; try { while ( true ) { ByteBuffer buf = null; ws = iqueue.take(); buf = ws.inQueue.poll(); assert ( buf != null ); try { ws.decode( buf ); } finally { pushBuffer( buf ); } } } catch ( InterruptedException e ) { } catch ( RuntimeException e ) { handleFatal( ws, e ); } } } public interface WebSocketServerFactory extends WebSocketFactory { @Override public WebSocketImpl createWebSocket( WebSocketAdapter a, Draft d, Socket s ); public WebSocketImpl createWebSocket( WebSocketAdapter a, List<Draft> drafts, Socket s ); /** * Allows to wrap the Socketchannel( key.channel() ) to insert a protocol layer( like ssl or proxy authentication) beyond the ws layer. * * @param key * a SelectionKey of an open SocketChannel. * @return The channel on which the read and write operations will be performed.<br> */ public ByteChannel wrapChannel( SocketChannel channel, SelectionKey key ) throws IOException; } }
package org.jsoftware.command; import org.jsoftware.config.AbstractPatch; import org.jsoftware.config.Patch; import org.jsoftware.config.RollbackPatch; import org.jsoftware.impl.CloseUtil; import org.jsoftware.impl.DuplicatePatchNameException; import org.jsoftware.impl.PatchParser; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.sql.SQLException; import java.util.Collections; import java.util.LinkedList; import java.util.List; /** * Command: Runs auto-rollback mode * * @author szalik */ public class RollbackListCommand extends AbstractListCommand<RollbackPatch> { protected boolean output = true; @Override protected List<RollbackPatch> generateList(List<Patch> inListIn) throws IOException, SQLException, DuplicatePatchNameException { List<Patch> missingRollback = new LinkedList<Patch>(); List<RollbackPatch> rollbacks = new LinkedList<RollbackPatch>(); StringBuilder sb = new StringBuilder("Patch list:\n"); List<Patch> inList = new LinkedList<Patch>(inListIn); Collections.reverse(inList); for (Patch p : inList) { getConfigurationEntry().getPatchParser().parse(p, getConfigurationEntry()); sb.append('\t'); if (p.getDbState() == AbstractPatch.DbState.COMMITTED) sb.append('*'); if (p.getDbState() == AbstractPatch.DbState.IN_PROGRESS) sb.append('P'); if (p.getDbState() == AbstractPatch.DbState.NOT_AVAILABLE) sb.append(' '); sb.append(' ').append(p.getName()); for (int a = p.getName().length(); a < SPACES; a++) { sb.append(' '); } sb.append(" rollback: "); RollbackPatch rollbackPatch = findRollback(p); if (rollbackPatch.isMissing()) { missingRollback.add(p); sb.append("MISSING OR EMPTY"); } else { sb.append("FOUND"); } rollbacks.add(rollbackPatch); sb.append('\n'); } if (output) { log.info(sb.toString().trim()); if (!missingRollback.isEmpty()) { sb = new StringBuilder(); for (Patch mp : missingRollback) { sb.append(' ').append(mp.getName()); } log.warn("Missing rollback patches: \n" + sb); } } return rollbacks; } private RollbackPatch findRollback(Patch patch) throws IOException, DuplicatePatchNameException { File rf = getConfigurationEntry().getPatchScanner().findRollbackFile(directory, configurationEntry.getRollbackDirs().split(","), patch); if (rf == null) { return new RollbackPatch(patch); } else { FileInputStream fis = null; try { fis = new FileInputStream(rf); PatchParser.ParseResult pr = configurationEntry.getPatchParser().parse(fis, configurationEntry); int sc = pr.executableCount(); if (sc == 0) { log.warn("Rollback file patch found (" + rf.getAbsolutePath() + "), but contains zero statements!"); return new RollbackPatch(patch); } return new RollbackPatch(patch, rf, sc); } finally { CloseUtil.close(fis); } } } @Override protected void executeInternal() throws Exception { getList(); } }
package org.jtrfp.trcl.game; import org.jtrfp.trcl.conf.ConfigRootFeature; import org.jtrfp.trcl.core.Feature; import org.jtrfp.trcl.core.FeatureFactory; import org.jtrfp.trcl.core.Features; import org.jtrfp.trcl.core.FeaturesImpl.FeatureNotFoundException; import org.jtrfp.trcl.core.LoadOrderAware; import org.jtrfp.trcl.core.SavestateSaveLoadConfigurationFactory.SavestateSaveLoadConfiguration; import org.jtrfp.trcl.core.TRFactory.TR; import org.springframework.stereotype.Component; @Component public class GameConfigRootFactory implements FeatureFactory<TVF3Game>, LoadOrderAware{ public static String SAVE_URI_SUFFIX = ".sav.trcl.xml"; public static String DEFAULT_SAVE_URI = "game"+SAVE_URI_SUFFIX; public static class GameConfigRootFeature extends ConfigRootFeature<TVF3Game>{ private SavestateSaveLoadConfiguration savestateSaveLoadConf; @Override public void apply(TVF3Game target){ super.apply(target); getSavestateSaveLoadConf();//Kludge to ensure Feature is extracted in non-display() thread. } @Override public void destruct(TVF3Game target) { } @Override protected String getDefaultSaveURI() { return null; } @Override public String getConfigSaveURI(){ SavestateSaveLoadConfiguration conf = getSavestateSaveLoadConf(); if(conf != null){ final String uri = conf.getDefaultSavestateURI(); System.out.println("getConfigSaveURI returning "+uri); return uri; } return super.getConfigSaveURI(); }//end getConfigSaveURI() @Override public void setConfigSaveURI(String uri){ SavestateSaveLoadConfiguration conf = getSavestateSaveLoadConf(); if(conf != null) conf.setDefaultSavestateURI(uri); else super.setConfigSaveURI(uri); } protected SavestateSaveLoadConfiguration getSavestateSaveLoadConf() { if(savestateSaveLoadConf == null){ final TR tr = getTarget().getTr(); try{ final SavestateSaveLoadConfiguration conf = Features.get(tr, SavestateSaveLoadConfiguration.class); setSavestateSaveLoadConf(conf); }catch(FeatureNotFoundException e){ System.out.println("Warning: GameConfigRootFeature failed to find SavestateSaveLoadConfiguration feature in TR."); } }//end if(null) return savestateSaveLoadConf; } protected void setSavestateSaveLoadConf( SavestateSaveLoadConfiguration savestateSaveLoadConf) { this.savestateSaveLoadConf = savestateSaveLoadConf; } }//end GameConfigRootFeature @Override public Feature<TVF3Game> newInstance(TVF3Game target) { final GameConfigRootFeature result = new GameConfigRootFeature(); return result; } @Override public Class<TVF3Game> getTargetClass() { return TVF3Game.class; } @Override public Class<? extends Feature> getFeatureClass() { return GameConfigRootFeature.class; } @Override public int getFeatureLoadPriority() { return LoadOrderAware.LAST; } }//end GameConfigRoot
package org.jenkinsci.plugins.ghprb; import hudson.Extension; import hudson.model.AbstractProject; import hudson.model.UnprotectedRootAction; import hudson.security.ACL; import hudson.security.csrf.CrumbExclusion; import jenkins.model.Jenkins; import org.acegisecurity.Authentication; import org.acegisecurity.context.SecurityContextHolder; import org.apache.commons.io.IOUtils; import org.kohsuke.github.GHEventPayload.IssueComment; import org.kohsuke.github.GHEventPayload.PullRequest; import org.kohsuke.github.GHIssueState; import org.kohsuke.github.GitHub; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import java.io.BufferedReader; import java.io.IOException; import java.io.StringReader; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.HashSet; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import javax.servlet.FilterChain; import javax.servlet.ServletException; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletRequest; @Extension public class GhprbRootAction implements UnprotectedRootAction { static final String URL = "ghprbhook"; private static final Logger logger = Logger.getLogger(GhprbRootAction.class.getName()); public String getIconFileName() { return null; } public String getDisplayName() { return null; } public String getUrlName() { return URL; } public void doIndex(StaplerRequest req, StaplerResponse resp) { String event = req.getHeader("X-GitHub-Event"); String signature = req.getHeader("X-Hub-Signature"); String type = req.getContentType(); String payload = null; String body = null; if (type.startsWith("application/json")) { body = extractRequestBody(req); if (body == null) { logger.log(Level.SEVERE, "Can't get request body for application/json."); resp.setStatus(StaplerResponse.SC_BAD_REQUEST); return; } payload = body; } else if (type.startsWith("application/x-www-form-urlencoded")) { body = extractRequestBody(req); if (body == null || body.length() <= 8) { logger.log(Level.SEVERE, "Request doesn't contain payload. " + "You're sending url encoded request, so you should pass github payload through 'payload' request parameter"); resp.setStatus(StaplerResponse.SC_BAD_REQUEST); return; } try { String encoding = req.getCharacterEncoding(); payload = URLDecoder.decode(body.substring(8), encoding != null ? encoding : "UTF-8"); } catch (UnsupportedEncodingException e) { logger.log(Level.SEVERE, "Error while trying to decode the payload"); resp.setStatus(StaplerResponse.SC_BAD_REQUEST); return; } } if (payload == null) { logger.log(Level.SEVERE, "Payload is null, maybe content type ''{0}'' is not supported by this plugin. " + "Please use 'application/json' or 'application/x-www-form-urlencoded'", new Object[] { type }); resp.setStatus(StaplerResponse.SC_UNSUPPORTED_MEDIA_TYPE); return; } logger.log(Level.FINE, "Got payload event: {0}", event); try { GitHub gh = GitHub.connectAnonymously(); if ("issue_comment".equals(event)) { IssueComment issueComment = getIssueComment(payload, gh); GHIssueState state = issueComment.getIssue().getState(); if (state == GHIssueState.CLOSED) { logger.log(Level.INFO, "Skip comment on closed PR"); return; } String repoName = issueComment.getRepository().getFullName(); logger.log(Level.INFO, "Checking issue comment ''{0}'' for repo {1}", new Object[] { issueComment.getComment(), repoName }); for (GhprbWebHook webHook : getWebHooks()) { try { if (webHook.matchRepo(repoName) && webHook.checkSignature(body, signature)) { IssueComment authedComment = getIssueComment(payload, webHook.getGitHub()); webHook.handleComment(authedComment); } } catch (Exception e) { logger.log(Level.SEVERE, "Unable to process web hook for: " + webHook.getProjectName(), e); } } } else if ("pull_request".equals(event)) { PullRequest pr = getPullRequest(payload, gh); String repoName = pr.getRepository().getFullName(); logger.log(Level.INFO, "Checking PR #{1} for {0}", new Object[] { repoName, pr.getNumber() }); for (GhprbWebHook webHook : getWebHooks()) { try { if (webHook.matchRepo(repoName) && webHook.checkSignature(body, signature)) { PullRequest authedPr = getPullRequest(payload, webHook.getGitHub()); webHook.handlePR(authedPr); } } catch (Exception e) { logger.log(Level.SEVERE, "Unable to process web hook for: " + webHook.getProjectName(), e); } } } else { logger.log(Level.WARNING, "Request not known"); } } catch (IOException e) { logger.log(Level.SEVERE, "Unable to connect to GitHub anonymously", e); } } private PullRequest getPullRequest(String payload, GitHub gh) throws IOException { PullRequest pr = gh.parseEventPayload(new StringReader(payload), PullRequest.class); return pr; } private IssueComment getIssueComment(String payload, GitHub gh) throws IOException { IssueComment issueComment = gh.parseEventPayload(new StringReader(payload), IssueComment.class); return issueComment; } private String extractRequestBody(StaplerRequest req) { String body = null; BufferedReader br = null; try { br = req.getReader(); body = IOUtils.toString(br); } catch (IOException e) { body = null; } finally { IOUtils.closeQuietly(br); } return body; } private Set<GhprbWebHook> getWebHooks() { final Set<GhprbWebHook> webHooks = new HashSet<GhprbWebHook>(); // We need this to get access to list of repositories Authentication old = SecurityContextHolder.getContext().getAuthentication(); SecurityContextHolder.getContext().setAuthentication(ACL.SYSTEM); try { for (AbstractProject<?, ?> job : Jenkins.getInstance().getAllItems(AbstractProject.class)) { GhprbTrigger trigger = job.getTrigger(GhprbTrigger.class); if (trigger == null || trigger.getWebHook() == null) { continue; } webHooks.add(trigger.getWebHook()); } } finally { SecurityContextHolder.getContext().setAuthentication(old); } if (webHooks.size() == 0) { logger.log(Level.WARNING, "No projects found using GitHub pull request trigger"); } return webHooks; } @Extension public static class GhprbRootActionCrumbExclusion extends CrumbExclusion { @Override public boolean process(HttpServletRequest req, HttpServletResponse resp, FilterChain chain) throws IOException, ServletException { String pathInfo = req.getPathInfo(); if (pathInfo != null && pathInfo.equals(getExclusionPath())) { chain.doFilter(req, resp); return true; } return false; } public String getExclusionPath() { return "/" + URL + "/"; } } }
package org.kairosdb.core.http.rest; import com.google.common.collect.SetMultimap; import com.google.gson.JsonArray; import com.google.gson.JsonObject; import org.kairosdb.core.datastore.QueryMetric; import org.kairosdb.core.datastore.QueryQueuingManager; import org.kairosdb.core.http.rest.json.ErrorResponse; import org.kairosdb.plugin.Aggregator; import org.kairosdb.plugin.GroupBy; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.agileclick.genorm.runtime.Pair; import javax.inject.Inject; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.util.ArrayList; import static com.google.common.base.Preconditions.checkNotNull; import static org.kairosdb.core.http.rest.MetricsResource.setHeaders; @Path("api/v1/admin") public class AdminResource { private static final Logger logger = LoggerFactory.getLogger(AdminResource.class); private final QueryQueuingManager m_queuingManager; @Inject public AdminResource(QueryQueuingManager queuingManager) { this.m_queuingManager = checkNotNull(queuingManager, "queuingManager cannot be null."); } @GET @Produces(MediaType.APPLICATION_JSON + "; charset=UTF-8") @Path("/runningqueries") public Response listRunningQueries() { try { int queriesWaitingCount = m_queuingManager.getQueryWaitingCount(); ArrayList<Pair<String, QueryMetric>> runningQueries = m_queuingManager.getRunningQueries(); JsonArray queryInfo = new JsonArray(); JsonObject responseJson = new JsonObject(); for (Pair<String, QueryMetric> query : runningQueries) { JsonObject queryJson = new JsonObject(); String queryHash = query.getFirst(); QueryMetric queryMetric = query.getSecond(); queryJson.addProperty("query hash", queryHash); queryJson.addProperty("metric name", queryMetric.getName()); queryJson.add("query JSON", queryMetric.getJsonObj()); queryInfo.add(queryJson); } responseJson.add("queries", queryInfo); responseJson.addProperty("queries waiting", queriesWaitingCount); Response.ResponseBuilder responseBuilder = Response.status(Response.Status.OK).entity(responseJson.toString()); setHeaders(responseBuilder); return responseBuilder.build(); } catch (Exception e) { logger.error("Failed to get running queries.", e); return setHeaders(Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new ErrorResponse(e.getMessage()))).build(); } } }
package org.kitteh.irc.client.library.element; /** * Represents an entity on an IRC server which can perform actions. */ public interface Actor { /** * Gets the Actor's name. * * @return the Actor's name */ String getName(); }
package org.lightmare.jpa.jta; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Stack; import javax.persistence.EntityManager; import javax.persistence.EntityTransaction; import javax.transaction.HeuristicMixedException; import javax.transaction.HeuristicRollbackException; import javax.transaction.NotSupportedException; import javax.transaction.RollbackException; import javax.transaction.SystemException; import javax.transaction.UserTransaction; import org.lightmare.ejb.handlers.BeanHandler; import org.lightmare.jpa.JpaManager; import org.lightmare.utils.CollectionUtils; import org.lightmare.utils.ObjectUtils; /** * Implementation of {@link UserTransaction} interface for JNDI and EJB beans * * @author levan * */ public class UserTransactionImpl implements UserTransaction { // Caches EntityTransaction instances for immediate commit or join with // other transactions private Stack<EntityTransaction> transactions; // Caches EntityManager instances for clear up private Stack<EntityManager> ems; // Caches EntityTransaction instances for immediate commit private Stack<EntityTransaction> requareNews; // Caches EntityManager instances for immediate clean up private Stack<EntityManager> requareNewEms; // Object which first called this (UserTransaction) instance private Object caller; // Denotes active transaction private static int ACTIVE = 1; // Denotes inactive transaction private static int INACTIVE = 0; private static final String TIMEOUT_NOT_SUPPORTED_ERROR = "Timeouts are not supported yet"; protected UserTransactionImpl(EntityTransaction... transactions) { this.transactions = new Stack<EntityTransaction>(); if (CollectionUtils.valid(transactions)) { addTransactions(transactions); } } private void closeEms(Stack<EntityManager> entityManagers) { if (CollectionUtils.valid(entityManagers)) { EntityManager em; while (CollectionUtils.notEmpty(entityManagers)) { em = entityManagers.pop(); JpaManager.closeEntityManager(em); } } } private void beginAll() throws NotSupportedException, SystemException { for (EntityTransaction transaction : transactions) { transaction.begin(); } } @Override public void begin() throws NotSupportedException, SystemException { if (CollectionUtils.valid(transactions)) { beginAll(); } } private void commit(EntityTransaction transaction) throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException, IllegalStateException, SystemException { if (transaction.isActive()) { transaction.commit(); } } private void commitAll() throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException, IllegalStateException, SystemException { EntityTransaction transaction; while (CollectionUtils.notEmpty(transactions)) { transaction = transactions.pop(); commit(transaction); } } @Override public void commit() throws RollbackException, HeuristicMixedException, HeuristicRollbackException, SecurityException, IllegalStateException, SystemException { try { if (CollectionUtils.valid(transactions)) { commitAll(); } } finally { closeEntityManagers(); } } @Override public int getStatus() throws SystemException { int active = INACTIVE; if (CollectionUtils.valid(transactions)) { for (EntityTransaction transaction : transactions) { boolean isActive = transaction.isActive(); active += isActive ? ACTIVE : INACTIVE; } } if (CollectionUtils.valid(requareNews)) { for (EntityTransaction transaction : requareNews) { boolean isActive = transaction.isActive(); active += isActive ? ACTIVE : INACTIVE; } } return active; } /** * Rollbacks passed {@link EntityTransaction} if it is active * * @param transaction */ private void rollback(EntityTransaction transaction) { if (transaction.isActive()) { transaction.rollback(); } } /** * Rollbacks new {@link EntityTransaction} at the end of * {@link javax.ejb.TransactionAttributeType#REQUIRES_NEW} annotated bean * methods */ public void rollbackReqNews() throws IllegalStateException, SecurityException, SystemException { try { if (checkNews()) { while (CollectionUtils.notEmpty(requareNews)) { EntityTransaction entityTransaction = requareNews.pop(); rollback(entityTransaction); } } } finally { closeReqNew(); } } private void rollbackAll() throws IllegalStateException, SecurityException, SystemException { EntityTransaction transaction; while (CollectionUtils.notEmpty(transactions)) { transaction = transactions.pop(); rollback(transaction); } } @Override public void rollback() throws IllegalStateException, SecurityException, SystemException { try { if (CollectionUtils.valid(transactions)) { rollbackAll(); } } finally { closeEntityManagers(); } } private void setRollbackOnly(EntityTransaction transaction) throws IllegalStateException, SystemException { if (transaction.isActive()) { transaction.setRollbackOnly(); } } private void setRollbackOnlyAll() throws IllegalStateException, SystemException { for (EntityTransaction transaction : transactions) { setRollbackOnly(transaction); } } @Override public void setRollbackOnly() throws IllegalStateException, SystemException { if (CollectionUtils.valid(transactions)) { setRollbackOnlyAll(); } } @Override public void setTransactionTimeout(int time) throws SystemException { throw new UnsupportedOperationException(TIMEOUT_NOT_SUPPORTED_ERROR); } private Stack<EntityTransaction> getNews() { if (requareNews == null) { requareNews = new Stack<EntityTransaction>(); } return requareNews; } private Stack<EntityManager> getNewEms() { if (requareNewEms == null) { requareNewEms = new Stack<EntityManager>(); } return requareNewEms; } /** * Check if {@link javax.ejb.TransactionAttributeType#REQUIRES_NEW} type * transactions stack is empty * * @return <code>boolean</code> */ private boolean checkNews() { boolean notEmpty = CollectionUtils.valid(requareNews); return notEmpty; } /** * Check if {@link javax.ejb.TransactionAttributeType#REQUIRES_NEW} type * transactions referenced {@link EntityManager} stack is empty * * @return <code>boolean</code> */ private boolean checkNewEms() { boolean notEmpty = CollectionUtils.valid(requareNewEms); return notEmpty; } /** * Adds new {@link EntityTransaction} for * {@link javax.ejb.TransactionAttributeType#REQUIRES_NEW} annotated bean * methods * * @param entityTransaction */ public void pushReqNew(EntityTransaction entityTransaction) { getNews().push(entityTransaction); } /** * Adds {@link EntityManager} to collection to close after * {@link javax.ejb.TransactionAttributeType#REQUIRES_NEW} type transactions * processing * * @param em */ public void pushReqNewEm(EntityManager em) { getNewEms().push(em); } public void commitReqNew() throws SecurityException, IllegalStateException, RollbackException, HeuristicMixedException, HeuristicRollbackException, SystemException { try { if (checkNews()) { while (CollectionUtils.notEmpty(requareNews)) { EntityTransaction entityTransaction = requareNews.pop(); commit(entityTransaction); } } } finally { closeReqNew(); } } /** * Closes all cached immediate {@link EntityManager} instances */ private void closeReqNew() { if (checkNewEms()) { while (CollectionUtils.notEmpty(requareNewEms)) { EntityManager em = requareNewEms.pop(); JpaManager.closeEntityManager(em); } } } /** * Adds {@link EntityTransaction} to transactions {@link List} for further * processing * * @param transaction */ public void addTransaction(EntityTransaction transaction) { transactions.add(transaction); } /** * Adds {@link EntityTransaction}s to transactions {@link List} for further * processing * * @param transactions */ public void addTransactions(EntityTransaction... transactions) { Collections.addAll(this.transactions, transactions); } /** * Adds {@link EntityManager} to collection to close after transactions * processing * * @param em */ public void addEntityManager(EntityManager em) { if (ObjectUtils.notNull(em)) { if (ems == null) { ems = new Stack<EntityManager>(); } ems.push(em); } } /** * Adds {@link EntityManager}'s to collection to close after transactions * processing * * @param em */ public void addEntityManagers(Collection<EntityManager> ems) { if (CollectionUtils.valid(ems)) { for (EntityManager em : ems) { addEntityManager(em); } } } /** * Closes all cached {@link EntityManager} instances */ private void closeAllEntityManagers() { EntityManager em; while (CollectionUtils.notEmpty(ems)) { em = ems.pop(); JpaManager.closeEntityManager(em); } } /** * Closes all contained {@link EntityManager}s */ public void closeEntityManagers() { if (CollectionUtils.valid(ems)) { closeAllEntityManagers(); } } public boolean checkCaller(BeanHandler handler) { boolean check = ObjectUtils.notNull(caller); if (check) { check = caller.equals(handler.getBean()); } return check; } public void setCaller(BeanHandler handler) { caller = handler.getBean(); } public Object getCaller() { return caller; } public void close() { closeEntityManagers(); closeReqNew(); } }
package org.mahjong4j.yaku.yakuman; import org.mahjong4j.hands.Kotsu; import org.mahjong4j.hands.MentsuComp; import java.util.List; import static org.mahjong4j.yaku.yakuman.MahjongYakumanEnum.SUANKO; /** * * 4 * * * @author yu1ro */ public class SuankoResolver implements YakumanResolver { private final int count; private final List<Kotsu> kotsuList; public SuankoResolver(MentsuComp comp) { kotsuList = comp.getKotsuKantsu(); count = comp.getKotsuCount() + comp.getKantsuCount(); } public MahjongYakumanEnum getYakuman() { return SUANKO; } public boolean isMatch() { if (count < 4) { return false; } for (Kotsu kotsu : kotsuList) { if (kotsu.getIsOpen()) { return false; } } return true; } }