code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9
values | license stringclasses 15
values | size int32 3 1.05M |
|---|---|---|---|---|---|
/*
*
*/
package net.community.chest.swing.text;
import java.lang.reflect.Method;
import java.util.Map;
import javax.swing.Icon;
import javax.swing.text.MutableAttributeSet;
import javax.swing.text.SimpleAttributeSet;
import net.community.chest.awt.dom.UIReflectiveAttributesProxy;
import net.community.chest.reflect.AttributeAccessor;
/**
* <P>Copyright 2009 as per GPLv2</P>
*
* @param <V> Type of {@link MutableAttributeSet} being proxy-ed
* @author Lyor G.
* @since Jul 30, 2009 9:20:48 AM
*/
public class MutableAttributeSetXmlProxy<V extends MutableAttributeSet>
extends UIReflectiveAttributesProxy<V> {
protected MutableAttributeSetXmlProxy (Class<V> objClass, boolean registerAsDefault)
throws IllegalArgumentException, IllegalStateException
{
super(objClass, registerAsDefault);
}
public MutableAttributeSetXmlProxy (Class<V> vc)
{
this(vc, false);
}
/*
* @see net.community.chest.dom.proxy.ReflectiveAttributesProxy#updateObjectResourceAttribute(java.lang.Object, java.lang.String, java.lang.String, java.lang.Class, java.lang.reflect.Method)
*/
@Override
protected V updateObjectResourceAttribute (final V src, final String aName, final String aValue, final Class<?> t, final Method setter) throws Exception
{
final Object o=loadObjectResourceAttribute(src, aName, aValue, t);
setter.invoke(null, src, o);
return src;
}
public V updateStyleAttribute (final V src, final String aName, final String aValue,
final Class<?> aType, final Method setter) throws Exception
{
if ((aType != null) && Icon.class.isAssignableFrom(aType))
return updateObjectResourceAttribute(src, aName, aValue, Icon.class, setter);
final Object objValue=getObjectAttributeValue(src, aName, aValue, aType);
setter.invoke(null, src, objValue);
return src;
}
public V updateStyleAttribute (final V src, final String aName, final String aValue, final Map<String,? extends Method> accsMap) throws Exception
{
final Map<String,? extends AttributeAccessor> aMap=
((null == aName) || (aName.length() <= 0)) ? null : StyleConstantsUtils.getDefaultStyleConstantsSetters();
final AttributeAccessor aa=
((null == aMap) || (aMap.size() <= 0)) ? null : aMap.get(aName);
final Class<?> aType=
(null == aa) ? null : aa.getType();
final Method setter=
(null == aa) ? null : aa.getSetter();
if ((null == aType) || (null == setter))
return super.handleUnknownAttribute(src, aName, aValue, accsMap);
return updateStyleAttribute(src, aName, aValue, aType, setter);
}
/*
* @see net.community.chest.dom.proxy.ReflectiveAttributesProxy#handleUnknownAttribute(java.lang.Object, java.lang.String, java.lang.String, java.util.Map)
*/
@Override
protected V handleUnknownAttribute (V src, String name, String value, Map<String,? extends Method> accsMap) throws Exception
{
if (NAME_ATTR.equalsIgnoreCase(name))
return src;
return updateStyleAttribute(src, name, value, accsMap);
}
public static final MutableAttributeSetXmlProxy<SimpleAttributeSet> SIMPLESET=
new MutableAttributeSetXmlProxy<SimpleAttributeSet>(SimpleAttributeSet.class, true);
}
| lgoldstein/communitychest | chest/gui/swing/src/main/java/net/community/chest/swing/text/MutableAttributeSetXmlProxy.java | Java | apache-2.0 | 3,524 |
package com.lhl.zk.interceptor;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.handler.HandlerInterceptorAdapter;
import com.lhl.zk.util.AuthUtils;
public class AuthInterceptor extends HandlerInterceptorAdapter {
private static final Logger log =LoggerFactory.getLogger(AuthInterceptor.class);
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {
// if (!AuthUtils.isLogin()) {
// throw new RuntimeException("没有登录");
//
// }
return true;
}
@Override
public void postHandle(HttpServletRequest request,
HttpServletResponse response, Object handler,
ModelAndView modelAndView) throws Exception {
modelAndView.addObject("host", request.getContextPath());
modelAndView.addObject("isLogin", AuthUtils.isLogin());
log.info("=========contextPath:{}",request.getContextPath());
super.postHandle(request, response, handler, modelAndView);
}
}
| liuhailin/zookeeper-console | src/main/java/com/lhl/zk/interceptor/AuthInterceptor.java | Java | apache-2.0 | 1,148 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Microsoft.Win32;
namespace IISUtil.Core
{
public class UtilFactory
{
public static IUtil GetUtil()
{
IUtil instance = null;
int majorVersion = GetIISMajorVersion();
switch(majorVersion)
{
case 6:
instance = new UtilForIIS6();
break;
case 7:
case 8:
instance = new UtilForIIS7();
break;
}
return instance;
}
private static int GetIISMajorVersion()
{
int majorVersion = 0;
try
{
var key = Registry.LocalMachine.OpenSubKey(@"SOFTWARE\Microsoft\INetStp");
if(key != null)
majorVersion = Convert.ToInt32(key.GetValue("MajorVersion"));
}
catch{}
return majorVersion;
}
}
}
| BoyTNT/iisutil | Core/UtilFactory.cs | C# | apache-2.0 | 764 |
<?php
/**
* Zend Framework
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://framework.zend.com/license/new-bsd
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@zend.com so we can send you a copy immediately.
*
* @category Zend
* @package Zend_Validate
* @copyright Copyright (c) 2005-2008 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @version $Id: $
*/
/**
* @see Zend_Validate_Abstract
*/
require_once 'Zend/Validate/Abstract.php';
/**
* Validator which checks if the file already exists in the directory
*
* @category Zend
* @package Zend_Validate
* @copyright Copyright (c) 2005-2008 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
class Zend_Validate_File_Exists extends Zend_Validate_Abstract
{
/**
* @const string Error constants
*/
const DOES_NOT_EXIST = 'fileExistsDoesNotExist';
/**
* @var array Error message templates
*/
protected $_messageTemplates = array(
self::DOES_NOT_EXIST => "The file '%value%' does not exist"
);
/**
* Internal list of directories
* @var string
*/
protected $_directory = '';
/**
* @var array Error message template variables
*/
protected $_messageVariables = array(
'directory' => '_directory'
);
/**
* Sets validator options
*
* @param string|array $directory
* @return void
*/
public function __construct($directory = array())
{
if ($directory instanceof Zend_Config) {
$directory = $directory->toArray();
} else if (is_string($directory)) {
$directory = explode(',', $directory);
} else if (!is_array($directory)) {
require_once 'Zend/Validate/Exception.php';
throw new Zend_Validate_Exception ('Invalid options to validator provided');
}
$this->setDirectory($directory);
}
/**
* Returns the set file directories which are checked
*
* @param boolean $asArray Returns the values as array, when false an concated string is returned
* @return string
*/
public function getDirectory($asArray = false)
{
$asArray = (bool) $asArray;
$directory = (string) $this->_directory;
if ($asArray) {
$directory = explode(',', $directory);
}
return $directory;
}
/**
* Sets the file directory which will be checked
*
* @param string|array $directory The directories to validate
* @return Zend_Validate_File_Extension Provides a fluent interface
*/
public function setDirectory($directory)
{
$this->_directory = null;
$this->addDirectory($directory);
return $this;
}
/**
* Adds the file directory which will be checked
*
* @param string|array $directory The directory to add for validation
* @return Zend_Validate_File_Extension Provides a fluent interface
*/
public function addDirectory($directory)
{
$directories = $this->getDirectory(true);
if (is_string($directory)) {
$directory = explode(',', $directory);
} else if (!is_array($directory)) {
require_once 'Zend/Validate/Exception.php';
throw new Zend_Validate_Exception ('Invalid options to validator provided');
}
foreach ($directory as $content) {
if (empty($content) || !is_string($content)) {
continue;
}
$directories[] = trim($content);
}
$directories = array_unique($directories);
// Sanity check to ensure no empty values
foreach ($directories as $key => $dir) {
if (empty($dir)) {
unset($directories[$key]);
}
}
$this->_directory = implode(',', $directories);
return $this;
}
/**
* Defined by Zend_Validate_Interface
*
* Returns true if and only if the file already exists in the set directories
*
* @param string $value Real file to check for existance
* @param array $file File data from Zend_File_Transfer
* @return boolean
*/
public function isValid($value, $file = null)
{
$directories = $this->getDirectory(true);
if (($file !== null) and (!empty($file['destination']))) {
$directories[] = $file['destination'];
} else if (!isset($file['name'])) {
$file['name'] = $value;
}
$check = false;
foreach ($directories as $directory) {
if (empty($directory)) {
continue;
}
$check = true;
if (!file_exists($directory . DIRECTORY_SEPARATOR . $file['name'])) {
return $this->_throw($file, self::DOES_NOT_EXIST);
}
}
if (!$check) {
return $this->_throw($file, self::DOES_NOT_EXIST);
}
return true;
}
/**
* Throws an error of the given type
*
* @param string $file
* @param string $errorType
* @return false
*/
protected function _throw($file, $errorType)
{
if ($file !== null) {
$this->_value = $file['name'];
}
$this->_error($errorType);
return false;
}
}
| ankuradhey/dealtrip | library/Zend/Validate/File/Exists.php | PHP | apache-2.0 | 5,927 |
//===--- GenCall.cpp - Swift IR Generation for Function Calls -------------===//
//
// This source file is part of the Swift.org open source project
//
// Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
// Licensed under Apache License v2.0 with Runtime Library Exception
//
// See https://swift.org/LICENSE.txt for license information
// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
//
//===----------------------------------------------------------------------===//
//
// This file implements IR generation for function signature lowering
// in Swift. This includes creating the IR type, collecting IR attributes,
// performing calls, and supporting prologue and epilogue emission.
//
//===----------------------------------------------------------------------===//
#include "GenCall.h"
#include "Signature.h"
#include "clang/AST/ASTContext.h"
#include "clang/AST/RecordLayout.h"
#include "clang/Basic/TargetInfo.h"
#include "clang/CodeGen/CodeGenABITypes.h"
#include "clang/CodeGen/ModuleBuilder.h"
#include "swift/AST/GenericEnvironment.h"
#include "swift/SIL/SILType.h"
#include "swift/ABI/MetadataValues.h"
#include "swift/Runtime/Config.h"
#include "llvm/IR/CallSite.h"
#include "llvm/Support/Compiler.h"
#include "CallEmission.h"
#include "Explosion.h"
#include "GenObjC.h"
#include "GenPoly.h"
#include "GenProto.h"
#include "GenType.h"
#include "IRGenFunction.h"
#include "IRGenModule.h"
#include "LoadableTypeInfo.h"
#include "NativeConventionSchema.h"
using namespace swift;
using namespace irgen;
static Size getYieldOnceCoroutineBufferSize(IRGenModule &IGM) {
return NumWords_YieldOnceBuffer * IGM.getPointerSize();
}
static Alignment getYieldOnceCoroutineBufferAlignment(IRGenModule &IGM) {
return IGM.getPointerAlignment();
}
static Size getYieldManyCoroutineBufferSize(IRGenModule &IGM) {
return NumWords_YieldManyBuffer * IGM.getPointerSize();
}
static Alignment getYieldManyCoroutineBufferAlignment(IRGenModule &IGM) {
return IGM.getPointerAlignment();
}
static Size getCoroutineContextSize(IRGenModule &IGM,
CanSILFunctionType fnType) {
switch (fnType->getCoroutineKind()) {
case SILCoroutineKind::None:
llvm_unreachable("expand a coroutine");
case SILCoroutineKind::YieldOnce:
return getYieldOnceCoroutineBufferSize(IGM);
case SILCoroutineKind::YieldMany:
return getYieldManyCoroutineBufferSize(IGM);
}
llvm_unreachable("bad kind");
}
llvm::Type *ExplosionSchema::getScalarResultType(IRGenModule &IGM) const {
if (size() == 0) {
return IGM.VoidTy;
} else if (size() == 1) {
return begin()->getScalarType();
} else {
SmallVector<llvm::Type*, 16> elts;
for (auto &elt : *this) elts.push_back(elt.getScalarType());
return llvm::StructType::get(IGM.getLLVMContext(), elts);
}
}
static void addDereferenceableAttributeToBuilder(IRGenModule &IGM,
llvm::AttrBuilder &b,
const TypeInfo &ti) {
// The addresses of empty values are undefined, so we can't safely mark them
// dereferenceable.
if (ti.isKnownEmpty(ResilienceExpansion::Maximal))
return;
// If we know the type to have a fixed nonempty size, then the pointer is
// dereferenceable to at least that size.
// TODO: Would be nice to have a "getMinimumKnownSize" on TypeInfo for
// dynamic-layout aggregates.
if (auto fixedTI = dyn_cast<FixedTypeInfo>(&ti)) {
b.addAttribute(
llvm::Attribute::getWithDereferenceableBytes(IGM.LLVMContext,
fixedTI->getFixedSize().getValue()));
}
}
static void addIndirectValueParameterAttributes(IRGenModule &IGM,
llvm::AttributeList &attrs,
const TypeInfo &ti,
unsigned argIndex) {
llvm::AttrBuilder b;
// Value parameter pointers can't alias or be captured.
b.addAttribute(llvm::Attribute::NoAlias);
b.addAttribute(llvm::Attribute::NoCapture);
// The parameter must reference dereferenceable memory of the type.
addDereferenceableAttributeToBuilder(IGM, b, ti);
attrs = attrs.addAttributes(IGM.LLVMContext,
argIndex + llvm::AttributeList::FirstArgIndex, b);
}
static void addInoutParameterAttributes(IRGenModule &IGM,
llvm::AttributeList &attrs,
const TypeInfo &ti, unsigned argIndex,
bool aliasable) {
llvm::AttrBuilder b;
// Aliasing inouts is unspecified, but we still want aliasing to be memory-
// safe, so we can't mark inouts as noalias at the LLVM level.
// They still can't be captured without doing unsafe stuff, though.
b.addAttribute(llvm::Attribute::NoCapture);
// The inout must reference dereferenceable memory of the type.
addDereferenceableAttributeToBuilder(IGM, b, ti);
attrs = attrs.addAttributes(IGM.LLVMContext,
argIndex + llvm::AttributeList::FirstArgIndex, b);
}
static llvm::CallingConv::ID getFreestandingConvention(IRGenModule &IGM) {
// TODO: use a custom CC that returns three scalars efficiently
return IGM.SwiftCC;
}
/// Expand the requirements of the given abstract calling convention
/// into a "physical" calling convention.
llvm::CallingConv::ID irgen::expandCallingConv(IRGenModule &IGM,
SILFunctionTypeRepresentation convention) {
switch (convention) {
case SILFunctionTypeRepresentation::CFunctionPointer:
case SILFunctionTypeRepresentation::ObjCMethod:
case SILFunctionTypeRepresentation::Block:
return llvm::CallingConv::C;
case SILFunctionTypeRepresentation::Method:
case SILFunctionTypeRepresentation::WitnessMethod:
case SILFunctionTypeRepresentation::Closure:
case SILFunctionTypeRepresentation::Thin:
case SILFunctionTypeRepresentation::Thick:
return getFreestandingConvention(IGM);
}
llvm_unreachable("bad calling convention!");
}
static void addIndirectResultAttributes(IRGenModule &IGM,
llvm::AttributeList &attrs,
unsigned paramIndex, bool allowSRet) {
llvm::AttrBuilder b;
b.addAttribute(llvm::Attribute::NoAlias);
b.addAttribute(llvm::Attribute::NoCapture);
if (allowSRet)
b.addAttribute(llvm::Attribute::StructRet);
attrs = attrs.addAttributes(IGM.LLVMContext,
paramIndex + llvm::AttributeList::FirstArgIndex,
b);
}
void IRGenModule::addSwiftSelfAttributes(llvm::AttributeList &attrs,
unsigned argIndex) {
llvm::AttrBuilder b;
b.addAttribute(llvm::Attribute::SwiftSelf);
attrs = attrs.addAttributes(this->LLVMContext,
argIndex + llvm::AttributeList::FirstArgIndex, b);
}
void IRGenModule::addSwiftErrorAttributes(llvm::AttributeList &attrs,
unsigned argIndex) {
llvm::AttrBuilder b;
// Don't add the swifterror attribute on ABIs that don't pass it in a register.
// We create a shadow stack location of the swifterror parameter for the
// debugger on such platforms and so we can't mark the parameter with a
// swifterror attribute.
if (IsSwiftErrorInRegister)
b.addAttribute(llvm::Attribute::SwiftError);
// The error result should not be aliased, captured, or pointed at invalid
// addresses regardless.
b.addAttribute(llvm::Attribute::NoAlias);
b.addAttribute(llvm::Attribute::NoCapture);
b.addDereferenceableAttr(getPointerSize().getValue());
auto attrIndex = argIndex + llvm::AttributeList::FirstArgIndex;
attrs = attrs.addAttributes(this->LLVMContext, attrIndex, b);
}
void irgen::addByvalArgumentAttributes(IRGenModule &IGM,
llvm::AttributeList &attrs,
unsigned argIndex, Alignment align) {
llvm::AttrBuilder b;
b.addAttribute(llvm::Attribute::ByVal);
b.addAttribute(llvm::Attribute::getWithAlignment(IGM.LLVMContext,
align.getValue()));
attrs = attrs.addAttributes(IGM.LLVMContext,
argIndex + llvm::AttributeList::FirstArgIndex, b);
}
void irgen::addExtendAttribute(IRGenModule &IGM, llvm::AttributeList &attrs,
unsigned index, bool signExtend) {
llvm::AttrBuilder b;
if (signExtend)
b.addAttribute(llvm::Attribute::SExt);
else
b.addAttribute(llvm::Attribute::ZExt);
attrs = attrs.addAttributes(IGM.LLVMContext, index, b);
}
namespace swift {
namespace irgen {
namespace {
class SignatureExpansion {
IRGenModule &IGM;
CanSILFunctionType FnType;
public:
SmallVector<llvm::Type*, 8> ParamIRTypes;
llvm::Type *ResultIRType = nullptr;
llvm::AttributeList Attrs;
ForeignFunctionInfo ForeignInfo;
CoroutineInfo CoroInfo;
bool CanUseSRet = true;
bool CanUseError = true;
bool CanUseSelf = true;
SignatureExpansion(IRGenModule &IGM, CanSILFunctionType fnType)
: IGM(IGM), FnType(fnType) {}
/// Expand the components of the primary entrypoint of the function type.
void expandFunctionType();
/// Expand the components of the continuation entrypoint of the
/// function type.
void expandCoroutineContinuationType();
Signature getSignature();
private:
void expand(SILParameterInfo param);
llvm::Type *addIndirectResult();
SILFunctionConventions getSILFuncConventions() const {
return SILFunctionConventions(FnType, IGM.getSILModule());
}
unsigned getCurParamIndex() {
return ParamIRTypes.size();
}
bool claimSRet() {
bool result = CanUseSRet;
CanUseSRet = false;
return result;
}
bool claimSelf() {
auto Ret = CanUseSelf;
assert(CanUseSelf && "Multiple self parameters?!");
CanUseSelf = false;
return Ret;
}
bool claimError() {
auto Ret = CanUseError;
assert(CanUseError && "Multiple error parameters?!");
CanUseError = false;
return Ret;
}
/// Add a pointer to the given type as the next parameter.
void addPointerParameter(llvm::Type *storageType) {
ParamIRTypes.push_back(storageType->getPointerTo());
}
void addCoroutineContextParameter();
void expandResult();
llvm::Type *expandDirectResult();
void expandParameters();
void expandExternalSignatureTypes();
void expandCoroutineResult(bool forContinuation);
void expandCoroutineContinuationParameters();
};
} // end anonymous namespace
} // end namespace irgen
} // end namespace swift
llvm::Type *SignatureExpansion::addIndirectResult() {
auto resultType = getSILFuncConventions().getSILResultType();
const TypeInfo &resultTI = IGM.getTypeInfo(resultType);
addIndirectResultAttributes(IGM, Attrs, ParamIRTypes.size(), claimSRet());
addPointerParameter(resultTI.getStorageType());
return IGM.VoidTy;
}
/// Expand all of the direct and indirect result types.
void SignatureExpansion::expandResult() {
if (FnType->isCoroutine()) {
// This should be easy enough to support if we need to: use the
// same algorithm but add the direct results to the results as if
// they were unioned in.
return expandCoroutineResult(/*for continuation*/ false);
}
auto fnConv = getSILFuncConventions();
// Disable the use of sret if we have multiple indirect results.
if (fnConv.getNumIndirectSILResults() > 1)
CanUseSRet = false;
// Expand the direct result.
ResultIRType = expandDirectResult();
// Expand the indirect results.
for (auto indirectResultType : fnConv.getIndirectSILResultTypes()) {
addIndirectResultAttributes(IGM, Attrs, ParamIRTypes.size(), claimSRet());
addPointerParameter(IGM.getStorageType(indirectResultType));
}
}
namespace {
class YieldSchema {
SILType YieldTy;
const TypeInfo &YieldTI;
Optional<NativeConventionSchema> NativeSchema;
bool IsIndirect;
public:
YieldSchema(IRGenModule &IGM, SILFunctionConventions fnConv,
SILYieldInfo yield)
: YieldTy(fnConv.getSILType(yield)),
YieldTI(IGM.getTypeInfo(YieldTy)) {
if (isFormalIndirect()) {
IsIndirect = true;
} else {
NativeSchema.emplace(IGM, &YieldTI, /*result*/ true);
IsIndirect = NativeSchema->requiresIndirect();
}
}
SILType getSILType() const {
return YieldTy;
}
const TypeInfo &getTypeInfo() const {
return YieldTI;
}
/// Should the yielded value be yielded as a pointer?
bool isIndirect() const { return IsIndirect; }
/// Is the yielded value formally indirect?
bool isFormalIndirect() const { return YieldTy.isAddress(); }
llvm::PointerType *getIndirectPointerType() const {
assert(isIndirect());
return YieldTI.getStorageType()->getPointerTo();
}
const NativeConventionSchema &getDirectSchema() const {
assert(!isIndirect());
return *NativeSchema;
}
void enumerateDirectComponents(llvm::function_ref<void(llvm::Type*)> fn) {
getDirectSchema().enumerateComponents([&](clang::CharUnits begin,
clang::CharUnits end,
llvm::Type *componentTy) {
fn(componentTy);
});
}
};
}
void SignatureExpansion::expandCoroutineResult(bool forContinuation) {
assert(FnType->getNumResults() == 0 &&
"having both normal and yield results is currently unsupported");
// The return type may be different for the ramp function vs. the
// continuations.
if (forContinuation) {
switch (FnType->getCoroutineKind()) {
case SILCoroutineKind::None:
llvm_unreachable("should have been filtered out before here");
// Yield-once coroutines just return void from the continuation.
case SILCoroutineKind::YieldOnce:
ResultIRType = IGM.VoidTy;
return;
// Yield-many coroutines yield the same types from the continuation
// as they do from the ramp function.
case SILCoroutineKind::YieldMany:
break;
}
}
SmallVector<llvm::Type*, 8> components;
// The continuation pointer.
components.push_back(IGM.Int8PtrTy);
auto fnConv = getSILFuncConventions();
for (auto yield : FnType->getYields()) {
YieldSchema schema(IGM, fnConv, yield);
// If the individual value must be yielded indirectly, add a pointer.
if (schema.isIndirect()) {
components.push_back(schema.getIndirectPointerType());
continue;
}
// Otherwise, collect all the component types.
schema.enumerateDirectComponents([&](llvm::Type *type) {
components.push_back(type);
});
}
// Find the maximal sequence of the component types that we can
// convince the ABI to pass directly.
// When counting components, ignore the continuation pointer.
unsigned numDirectComponents = components.size() - 1;
SmallVector<llvm::Type*, 8> overflowTypes;
while (clang::CodeGen::swiftcall::
shouldPassIndirectly(IGM.ClangCodeGen->CGM(), components,
/*asReturnValue*/ true)) {
// If we added a pointer to the end of components, remove it.
if (!overflowTypes.empty()) components.pop_back();
// Remove the last component and add it as an overflow type.
overflowTypes.push_back(components.pop_back_val());
numDirectComponents--;
// Add a pointer to the end of components.
components.push_back(IGM.Int8PtrTy);
}
// We'd better have been able to pass at least two pointers.
assert(components.size() >= 2 || overflowTypes.empty());
CoroInfo.NumDirectYieldComponents = numDirectComponents;
// Replace the pointer type we added to components with the real
// pointer-to-overflow type.
if (!overflowTypes.empty()) {
std::reverse(overflowTypes.begin(), overflowTypes.end());
// TODO: should we use some sort of real layout here instead of
// trusting LLVM's?
components.back() =
llvm::StructType::get(IGM.getLLVMContext(), overflowTypes)
->getPointerTo();
}
ResultIRType = components.size() == 1
? components.front()
: llvm::StructType::get(IGM.getLLVMContext(), components);
}
void SignatureExpansion::expandCoroutineContinuationParameters() {
// The coroutine context.
addCoroutineContextParameter();
// Whether this is an unwind resumption.
ParamIRTypes.push_back(IGM.Int1Ty);
}
void SignatureExpansion::addCoroutineContextParameter() {
// Flag that the context is dereferenceable and unaliased.
auto contextSize = getCoroutineContextSize(IGM, FnType);
Attrs = Attrs.addDereferenceableParamAttr(IGM.getLLVMContext(),
getCurParamIndex(),
contextSize.getValue());
Attrs = Attrs.addParamAttribute(IGM.getLLVMContext(),
getCurParamIndex(),
llvm::Attribute::NoAlias);
ParamIRTypes.push_back(IGM.Int8PtrTy);
}
NativeConventionSchema::NativeConventionSchema(IRGenModule &IGM,
const TypeInfo *ti,
bool IsResult)
: Lowering(IGM.ClangCodeGen->CGM()) {
if (auto *loadable = dyn_cast<LoadableTypeInfo>(ti)) {
// Lower the type according to the Swift ABI.
loadable->addToAggLowering(IGM, Lowering, Size(0));
Lowering.finish();
// Should we pass indirectly according to the ABI?
RequiresIndirect = Lowering.shouldPassIndirectly(IsResult);
} else {
Lowering.finish();
RequiresIndirect = true;
}
}
llvm::Type *NativeConventionSchema::getExpandedType(IRGenModule &IGM) const {
if (empty())
return IGM.VoidTy;
SmallVector<llvm::Type *, 8> elts;
Lowering.enumerateComponents([&](clang::CharUnits offset,
clang::CharUnits end,
llvm::Type *type) { elts.push_back(type); });
if (elts.size() == 1)
return elts[0];
auto &ctx = IGM.getLLVMContext();
return llvm::StructType::get(ctx, elts, /*packed*/ false);
}
std::pair<llvm::StructType *, llvm::StructType *>
NativeConventionSchema::getCoercionTypes(
IRGenModule &IGM, SmallVectorImpl<unsigned> &expandedTyIndicesMap) const {
auto &ctx = IGM.getLLVMContext();
if (empty()) {
auto type = llvm::StructType::get(ctx);
return {type, type};
}
clang::CharUnits lastEnd = clang::CharUnits::Zero();
llvm::SmallSet<unsigned, 8> overlappedWithSuccessor;
unsigned idx = 0;
// Mark overlapping ranges.
Lowering.enumerateComponents(
[&](clang::CharUnits offset, clang::CharUnits end, llvm::Type *type) {
if (offset < lastEnd) {
overlappedWithSuccessor.insert(idx);
}
lastEnd = end;
++idx;
});
// Create the coercion struct with only the integer portion of overlapped
// components and non-overlapped components.
idx = 0;
lastEnd = clang::CharUnits::Zero();
SmallVector<llvm::Type *, 8> elts;
bool packed = false;
Lowering.enumerateComponents(
[&](clang::CharUnits begin, clang::CharUnits end, llvm::Type *type) {
bool overlapped = overlappedWithSuccessor.count(idx) ||
(idx && overlappedWithSuccessor.count(idx - 1));
++idx;
if (overlapped && !isa<llvm::IntegerType>(type)) {
// keep the old lastEnd for padding.
return;
}
// Add padding (which may include padding for overlapped non-integer
// components).
if (begin != lastEnd) {
auto paddingSize = begin - lastEnd;
assert(!paddingSize.isNegative());
auto padding = llvm::ArrayType::get(llvm::Type::getInt8Ty(ctx),
paddingSize.getQuantity());
elts.push_back(padding);
}
if (!packed &&
!begin.isMultipleOf(clang::CharUnits::fromQuantity(
IGM.DataLayout.getABITypeAlignment(type))))
packed = true;
elts.push_back(type);
expandedTyIndicesMap.push_back(idx - 1);
lastEnd = begin + clang::CharUnits::fromQuantity(
IGM.DataLayout.getTypeAllocSize(type));
assert(end <= lastEnd);
});
auto *coercionType = llvm::StructType::get(ctx, elts, packed);
if (overlappedWithSuccessor.empty())
return {coercionType, llvm::StructType::get(ctx)};
// Create the coercion struct with only the non-integer overlapped
// components.
idx = 0;
lastEnd = clang::CharUnits::Zero();
elts.clear();
packed = false;
Lowering.enumerateComponents(
[&](clang::CharUnits begin, clang::CharUnits end, llvm::Type *type) {
bool overlapped = overlappedWithSuccessor.count(idx) ||
(idx && overlappedWithSuccessor.count(idx - 1));
++idx;
if (!overlapped || (overlapped && isa<llvm::IntegerType>(type))) {
// Ignore and keep the old lastEnd for padding.
return;
}
// Add padding.
if (begin != lastEnd) {
auto paddingSize = begin - lastEnd;
assert(!paddingSize.isNegative());
auto padding = llvm::ArrayType::get(llvm::Type::getInt8Ty(ctx),
paddingSize.getQuantity());
elts.push_back(padding);
}
if (!packed &&
!begin.isMultipleOf(clang::CharUnits::fromQuantity(
IGM.DataLayout.getABITypeAlignment(type))))
packed = true;
elts.push_back(type);
expandedTyIndicesMap.push_back(idx - 1);
lastEnd = begin + clang::CharUnits::fromQuantity(
IGM.DataLayout.getTypeAllocSize(type));
assert(end <= lastEnd);
});
auto *overlappedCoercionType = llvm::StructType::get(ctx, elts, packed);
return {coercionType, overlappedCoercionType};
}
// TODO: Direct to Indirect result conversion could be handled in a SIL
// AddressLowering pass.
llvm::Type *SignatureExpansion::expandDirectResult() {
// Handle the direct result type, checking for supposedly scalar
// result types that we actually want to return indirectly.
auto resultType = getSILFuncConventions().getSILResultType();
// Fast-path the empty tuple type.
if (auto tuple = resultType.getAs<TupleType>())
if (tuple->getNumElements() == 0)
return IGM.VoidTy;
switch (FnType->getLanguage()) {
case SILFunctionLanguage::C:
llvm_unreachable("Expanding C/ObjC parameters in the wrong place!");
break;
case SILFunctionLanguage::Swift: {
auto &ti = IGM.getTypeInfo(resultType);
auto &native = ti.nativeReturnValueSchema(IGM);
if (native.requiresIndirect())
return addIndirectResult();
// Disable the use of sret if we have a non-trivial direct result.
if (!native.empty()) CanUseSRet = false;
return native.getExpandedType(IGM);
}
}
llvm_unreachable("Not a valid SILFunctionLanguage.");
}
static const clang::FieldDecl *
getLargestUnionField(const clang::RecordDecl *record,
const clang::ASTContext &ctx) {
const clang::FieldDecl *largestField = nullptr;
clang::CharUnits unionSize = clang::CharUnits::Zero();
for (auto field : record->fields()) {
assert(!field->isBitField());
clang::CharUnits fieldSize = ctx.getTypeSizeInChars(field->getType());
if (unionSize < fieldSize) {
unionSize = fieldSize;
largestField = field;
}
}
assert(largestField && "empty union?");
return largestField;
}
namespace {
/// A CRTP class for working with Clang's ABIArgInfo::Expand
/// argument type expansions.
template <class Impl, class... Args> struct ClangExpand {
IRGenModule &IGM;
const clang::ASTContext &Ctx;
ClangExpand(IRGenModule &IGM) : IGM(IGM), Ctx(IGM.getClangASTContext()) {}
Impl &asImpl() { return *static_cast<Impl*>(this); }
void visit(clang::CanQualType type, Args... args) {
switch (type->getTypeClass()) {
#define TYPE(Class, Base)
#define NON_CANONICAL_TYPE(Class, Base) \
case clang::Type::Class:
#define DEPENDENT_TYPE(Class, Base) \
case clang::Type::Class:
#define NON_CANONICAL_UNLESS_DEPENDENT_TYPE(Class, Base) \
case clang::Type::Class:
#include "clang/AST/TypeNodes.def"
llvm_unreachable("canonical or dependent type in ABI lowering");
// These shouldn't occur in expandable struct types.
case clang::Type::IncompleteArray:
case clang::Type::VariableArray:
llvm_unreachable("variable-sized or incomplete array in ABI lowering");
// We should only ever get ObjC pointers, not underlying objects.
case clang::Type::ObjCInterface:
case clang::Type::ObjCObject:
llvm_unreachable("ObjC object type in ABI lowering");
// We should only ever get function pointers.
case clang::Type::FunctionProto:
case clang::Type::FunctionNoProto:
llvm_unreachable("non-pointer function type in ABI lowering");
// We currently never import C++ code, and we should be able to
// kill Expand before we do.
case clang::Type::LValueReference:
case clang::Type::RValueReference:
case clang::Type::MemberPointer:
case clang::Type::Auto:
case clang::Type::DeducedTemplateSpecialization:
llvm_unreachable("C++ type in ABI lowering?");
case clang::Type::Pipe:
llvm_unreachable("OpenCL type in ABI lowering?");
case clang::Type::ConstantArray: {
auto array = Ctx.getAsConstantArrayType(type);
auto elt = Ctx.getCanonicalType(array->getElementType());
auto &&context = asImpl().beginArrayElements(elt);
uint64_t n = array->getSize().getZExtValue();
for (uint64_t i = 0; i != n; ++i) {
asImpl().visitArrayElement(elt, i, context, args...);
}
return;
}
case clang::Type::Record: {
auto record = cast<clang::RecordType>(type)->getDecl();
if (record->isUnion()) {
auto largest = getLargestUnionField(record, Ctx);
asImpl().visitUnionField(record, largest, args...);
} else {
auto &&context = asImpl().beginStructFields(record);
for (auto field : record->fields()) {
asImpl().visitStructField(record, field, context, args...);
}
}
return;
}
case clang::Type::Complex: {
auto elt = type.castAs<clang::ComplexType>().getElementType();
asImpl().visitComplexElement(elt, 0, args...);
asImpl().visitComplexElement(elt, 1, args...);
return;
}
// Just handle this types as opaque integers.
case clang::Type::Enum:
case clang::Type::Atomic:
asImpl().visitScalar(convertTypeAsInteger(type), args...);
return;
case clang::Type::Builtin:
asImpl().visitScalar(
convertBuiltinType(type.castAs<clang::BuiltinType>()),
args...);
return;
case clang::Type::Vector:
case clang::Type::ExtVector:
asImpl().visitScalar(
convertVectorType(type.castAs<clang::VectorType>()),
args...);
return;
case clang::Type::Pointer:
case clang::Type::BlockPointer:
case clang::Type::ObjCObjectPointer:
asImpl().visitScalar(IGM.Int8PtrTy, args...);
return;
}
llvm_unreachable("bad type kind");
}
Size getSizeOfType(clang::QualType type) {
auto clangSize = Ctx.getTypeSizeInChars(type);
return Size(clangSize.getQuantity());
}
private:
llvm::Type *convertVectorType(clang::CanQual<clang::VectorType> type) {
auto eltTy =
convertBuiltinType(type->getElementType().castAs<clang::BuiltinType>());
return llvm::VectorType::get(eltTy, type->getNumElements());
}
llvm::Type *convertBuiltinType(clang::CanQual<clang::BuiltinType> type) {
switch (type.getTypePtr()->getKind()) {
#define BUILTIN_TYPE(Id, SingletonId)
#define PLACEHOLDER_TYPE(Id, SingletonId) \
case clang::BuiltinType::Id:
#include "clang/AST/BuiltinTypes.def"
case clang::BuiltinType::Dependent:
llvm_unreachable("placeholder type in ABI lowering");
// We should never see these unadorned.
case clang::BuiltinType::ObjCId:
case clang::BuiltinType::ObjCClass:
case clang::BuiltinType::ObjCSel:
llvm_unreachable("bare Objective-C object type in ABI lowering");
// This should never be the type of an argument or field.
case clang::BuiltinType::Void:
llvm_unreachable("bare void type in ABI lowering");
// We should never see the OpenCL builtin types at all.
case clang::BuiltinType::OCLImage1dRO:
case clang::BuiltinType::OCLImage1dRW:
case clang::BuiltinType::OCLImage1dWO:
case clang::BuiltinType::OCLImage1dArrayRO:
case clang::BuiltinType::OCLImage1dArrayRW:
case clang::BuiltinType::OCLImage1dArrayWO:
case clang::BuiltinType::OCLImage1dBufferRO:
case clang::BuiltinType::OCLImage1dBufferRW:
case clang::BuiltinType::OCLImage1dBufferWO:
case clang::BuiltinType::OCLImage2dRO:
case clang::BuiltinType::OCLImage2dRW:
case clang::BuiltinType::OCLImage2dWO:
case clang::BuiltinType::OCLImage2dArrayRO:
case clang::BuiltinType::OCLImage2dArrayRW:
case clang::BuiltinType::OCLImage2dArrayWO:
case clang::BuiltinType::OCLImage2dDepthRO:
case clang::BuiltinType::OCLImage2dDepthRW:
case clang::BuiltinType::OCLImage2dDepthWO:
case clang::BuiltinType::OCLImage2dArrayDepthRO:
case clang::BuiltinType::OCLImage2dArrayDepthRW:
case clang::BuiltinType::OCLImage2dArrayDepthWO:
case clang::BuiltinType::OCLImage2dMSAARO:
case clang::BuiltinType::OCLImage2dMSAARW:
case clang::BuiltinType::OCLImage2dMSAAWO:
case clang::BuiltinType::OCLImage2dArrayMSAARO:
case clang::BuiltinType::OCLImage2dArrayMSAARW:
case clang::BuiltinType::OCLImage2dArrayMSAAWO:
case clang::BuiltinType::OCLImage2dMSAADepthRO:
case clang::BuiltinType::OCLImage2dMSAADepthRW:
case clang::BuiltinType::OCLImage2dMSAADepthWO:
case clang::BuiltinType::OCLImage2dArrayMSAADepthRO:
case clang::BuiltinType::OCLImage2dArrayMSAADepthRW:
case clang::BuiltinType::OCLImage2dArrayMSAADepthWO:
case clang::BuiltinType::OCLImage3dRO:
case clang::BuiltinType::OCLImage3dRW:
case clang::BuiltinType::OCLImage3dWO:
case clang::BuiltinType::OCLSampler:
case clang::BuiltinType::OCLEvent:
case clang::BuiltinType::OCLClkEvent:
case clang::BuiltinType::OCLQueue:
case clang::BuiltinType::OCLReserveID:
llvm_unreachable("OpenCL type in ABI lowering");
// Handle all the integer types as opaque values.
#define BUILTIN_TYPE(Id, SingletonId)
#define SIGNED_TYPE(Id, SingletonId) \
case clang::BuiltinType::Id:
#define UNSIGNED_TYPE(Id, SingletonId) \
case clang::BuiltinType::Id:
#include "clang/AST/BuiltinTypes.def"
return convertTypeAsInteger(type);
// Lower all the floating-point values by their semantics.
case clang::BuiltinType::Half:
return convertFloatingType(Ctx.getTargetInfo().getHalfFormat());
case clang::BuiltinType::Float:
return convertFloatingType(Ctx.getTargetInfo().getFloatFormat());
case clang::BuiltinType::Double:
return convertFloatingType(Ctx.getTargetInfo().getDoubleFormat());
case clang::BuiltinType::LongDouble:
return convertFloatingType(Ctx.getTargetInfo().getLongDoubleFormat());
case clang::BuiltinType::Float16:
llvm_unreachable("When upstream support is added for Float16 in "
"clang::TargetInfo, use the implementation here");
case clang::BuiltinType::Float128:
return convertFloatingType(Ctx.getTargetInfo().getFloat128Format());
// nullptr_t -> void*
case clang::BuiltinType::NullPtr:
return IGM.Int8PtrTy;
}
llvm_unreachable("bad builtin type");
}
llvm::Type *convertFloatingType(const llvm::fltSemantics &format) {
if (&format == &llvm::APFloat::IEEEhalf())
return llvm::Type::getHalfTy(IGM.getLLVMContext());
if (&format == &llvm::APFloat::IEEEsingle())
return llvm::Type::getFloatTy(IGM.getLLVMContext());
if (&format == &llvm::APFloat::IEEEdouble())
return llvm::Type::getDoubleTy(IGM.getLLVMContext());
if (&format == &llvm::APFloat::IEEEquad())
return llvm::Type::getFP128Ty(IGM.getLLVMContext());
if (&format == &llvm::APFloat::PPCDoubleDouble())
return llvm::Type::getPPC_FP128Ty(IGM.getLLVMContext());
if (&format == &llvm::APFloat::x87DoubleExtended())
return llvm::Type::getX86_FP80Ty(IGM.getLLVMContext());
llvm_unreachable("bad float format");
}
llvm::Type *convertTypeAsInteger(clang::QualType type) {
auto size = getSizeOfType(type);
return llvm::IntegerType::get(IGM.getLLVMContext(),
size.getValueInBits());
}
};
/// A CRTP specialization of ClangExpand which projects down to
/// various aggregate elements of an address.
///
/// Subclasses should only have to define visitScalar.
template <class Impl>
class ClangExpandProjection : public ClangExpand<Impl, Address> {
using super = ClangExpand<Impl, Address>;
using super::asImpl;
using super::IGM;
using super::Ctx;
using super::getSizeOfType;
protected:
IRGenFunction &IGF;
ClangExpandProjection(IRGenFunction &IGF)
: super(IGF.IGM), IGF(IGF) {}
public:
void visit(clang::CanQualType type, Address addr) {
assert(addr.getType() == IGM.Int8PtrTy);
super::visit(type, addr);
}
Size beginArrayElements(clang::CanQualType element) {
return getSizeOfType(element);
}
void visitArrayElement(clang::CanQualType element, unsigned i,
Size elementSize, Address arrayAddr) {
asImpl().visit(element, createGEPAtOffset(arrayAddr, elementSize * i));
}
void visitComplexElement(clang::CanQualType element, unsigned i,
Address complexAddr) {
Address addr = complexAddr;
if (i) { addr = createGEPAtOffset(complexAddr, getSizeOfType(element)); }
asImpl().visit(element, addr);
}
void visitUnionField(const clang::RecordDecl *record,
const clang::FieldDecl *field,
Address structAddr) {
asImpl().visit(Ctx.getCanonicalType(field->getType()), structAddr);
}
const clang::ASTRecordLayout &
beginStructFields(const clang::RecordDecl *record) {
return Ctx.getASTRecordLayout(record);
}
void visitStructField(const clang::RecordDecl *record,
const clang::FieldDecl *field,
const clang::ASTRecordLayout &layout,
Address structAddr) {
auto fieldIndex = field->getFieldIndex();
assert(!field->isBitField());
auto fieldOffset = Size(layout.getFieldOffset(fieldIndex) / 8);
asImpl().visit(Ctx.getCanonicalType(field->getType()),
createGEPAtOffset(structAddr, fieldOffset));
}
private:
Address createGEPAtOffset(Address addr, Size offset) {
if (offset.isZero()) {
return addr;
} else {
return IGF.Builder.CreateConstByteArrayGEP(addr, offset);
}
}
};
/// A class for collecting the types of a Clang ABIArgInfo::Expand
/// argument expansion.
struct ClangExpandTypeCollector : ClangExpand<ClangExpandTypeCollector> {
SmallVectorImpl<llvm::Type*> &Types;
ClangExpandTypeCollector(IRGenModule &IGM,
SmallVectorImpl<llvm::Type*> &types)
: ClangExpand(IGM), Types(types) {}
bool beginArrayElements(clang::CanQualType element) { return true; }
void visitArrayElement(clang::CanQualType element, unsigned i, bool _) {
visit(element);
}
void visitComplexElement(clang::CanQualType element, unsigned i) {
visit(element);
}
void visitUnionField(const clang::RecordDecl *record,
const clang::FieldDecl *field) {
visit(Ctx.getCanonicalType(field->getType()));
}
bool beginStructFields(const clang::RecordDecl *record) { return true; }
void visitStructField(const clang::RecordDecl *record,
const clang::FieldDecl *field,
bool _) {
visit(Ctx.getCanonicalType(field->getType()));
}
void visitScalar(llvm::Type *type) {
Types.push_back(type);
}
};
} // end anonymous namespace
static bool doesClangExpansionMatchSchema(IRGenModule &IGM,
clang::CanQualType type,
const ExplosionSchema &schema) {
assert(!schema.containsAggregate());
SmallVector<llvm::Type *, 4> expansion;
ClangExpandTypeCollector(IGM, expansion).visit(type);
if (expansion.size() != schema.size())
return false;
for (size_t i = 0, e = schema.size(); i != e; ++i) {
if (schema[i].getScalarType() != expansion[i])
return false;
}
return true;
}
/// Expand the result and parameter types to the appropriate LLVM IR
/// types for C and Objective-C signatures.
void SignatureExpansion::expandExternalSignatureTypes() {
assert(FnType->getLanguage() == SILFunctionLanguage::C);
// Convert the SIL result type to a Clang type.
auto clangResultTy = IGM.getClangType(FnType->getFormalCSemanticResult());
// Now convert the parameters to Clang types.
auto params = FnType->getParameters();
SmallVector<clang::CanQualType,4> paramTys;
auto const &clangCtx = IGM.getClangASTContext();
switch (FnType->getRepresentation()) {
case SILFunctionTypeRepresentation::ObjCMethod: {
// ObjC methods take their 'self' argument first, followed by an
// implicit _cmd argument.
auto &self = params.back();
auto clangTy = IGM.getClangType(self);
paramTys.push_back(clangTy);
paramTys.push_back(clangCtx.VoidPtrTy);
params = params.drop_back();
break;
}
case SILFunctionTypeRepresentation::Block:
// Blocks take their context argument first.
paramTys.push_back(clangCtx.VoidPtrTy);
break;
case SILFunctionTypeRepresentation::CFunctionPointer:
// No implicit arguments.
break;
case SILFunctionTypeRepresentation::Thin:
case SILFunctionTypeRepresentation::Thick:
case SILFunctionTypeRepresentation::Method:
case SILFunctionTypeRepresentation::WitnessMethod:
case SILFunctionTypeRepresentation::Closure:
llvm_unreachable("not a C representation");
}
// Given an index within the clang parameters list, what do we need
// to subtract from it to get to the corresponding index within the
// Swift parameters list?
size_t clangToSwiftParamOffset = paramTys.size();
// Convert each parameter to a Clang type.
for (auto param : params) {
auto clangTy = IGM.getClangType(param);
paramTys.push_back(clangTy);
}
// Generate function info for this signature.
auto extInfo = clang::FunctionType::ExtInfo();
auto &FI = clang::CodeGen::arrangeFreeFunctionCall(IGM.ClangCodeGen->CGM(),
clangResultTy, paramTys, extInfo,
clang::CodeGen::RequiredArgs::All);
ForeignInfo.ClangInfo = &FI;
assert(FI.arg_size() == paramTys.size() &&
"Expected one ArgInfo for each parameter type!");
auto &returnInfo = FI.getReturnInfo();
// Does the result need an extension attribute?
if (returnInfo.isExtend()) {
bool signExt = clangResultTy->hasSignedIntegerRepresentation();
assert((signExt || clangResultTy->hasUnsignedIntegerRepresentation()) &&
"Invalid attempt to add extension attribute to argument!");
addExtendAttribute(IGM, Attrs, llvm::AttributeList::ReturnIndex, signExt);
}
// If we return indirectly, that is the first parameter type.
if (returnInfo.isIndirect()) {
addIndirectResult();
}
size_t firstParamToLowerNormally = 0;
// Use a special IR type for passing block pointers.
if (FnType->getRepresentation() == SILFunctionTypeRepresentation::Block) {
assert(FI.arg_begin()[0].info.isDirect() &&
"block pointer not passed directly?");
ParamIRTypes.push_back(IGM.ObjCBlockPtrTy);
firstParamToLowerNormally = 1;
}
for (auto i : indices(paramTys).slice(firstParamToLowerNormally)) {
auto &AI = FI.arg_begin()[i].info;
// Add a padding argument if required.
if (auto *padType = AI.getPaddingType())
ParamIRTypes.push_back(padType);
switch (AI.getKind()) {
case clang::CodeGen::ABIArgInfo::Extend: {
bool signExt = paramTys[i]->hasSignedIntegerRepresentation();
assert((signExt || paramTys[i]->hasUnsignedIntegerRepresentation()) &&
"Invalid attempt to add extension attribute to argument!");
addExtendAttribute(IGM, Attrs, getCurParamIndex() +
llvm::AttributeList::FirstArgIndex, signExt);
LLVM_FALLTHROUGH;
}
case clang::CodeGen::ABIArgInfo::Direct: {
switch (FI.getExtParameterInfo(i).getABI()) {
case clang::ParameterABI::Ordinary:
break;
case clang::ParameterABI::SwiftContext:
IGM.addSwiftSelfAttributes(Attrs, getCurParamIndex());
break;
case clang::ParameterABI::SwiftErrorResult:
IGM.addSwiftErrorAttributes(Attrs, getCurParamIndex());
break;
case clang::ParameterABI::SwiftIndirectResult:
addIndirectResultAttributes(IGM, Attrs, getCurParamIndex(),claimSRet());
break;
}
// If the coercion type is a struct which can be flattened, we need to
// expand it.
auto *coercedTy = AI.getCoerceToType();
if (AI.isDirect() && AI.getCanBeFlattened() &&
isa<llvm::StructType>(coercedTy)) {
const auto *ST = cast<llvm::StructType>(coercedTy);
for (unsigned EI : range(ST->getNumElements()))
ParamIRTypes.push_back(ST->getElementType(EI));
} else {
ParamIRTypes.push_back(coercedTy);
}
break;
}
case clang::CodeGen::ABIArgInfo::CoerceAndExpand: {
auto types = AI.getCoerceAndExpandTypeSequence();
ParamIRTypes.append(types.begin(), types.end());
break;
}
case clang::CodeGen::ABIArgInfo::Indirect: {
assert(i >= clangToSwiftParamOffset &&
"Unexpected index for indirect byval argument");
auto ¶m = params[i - clangToSwiftParamOffset];
auto paramTy = getSILFuncConventions().getSILType(param);
auto ¶mTI = cast<FixedTypeInfo>(IGM.getTypeInfo(paramTy));
if (AI.getIndirectByVal())
addByvalArgumentAttributes(
IGM, Attrs, getCurParamIndex(),
Alignment(AI.getIndirectAlign().getQuantity()));
addPointerParameter(paramTI.getStorageType());
break;
}
case clang::CodeGen::ABIArgInfo::Expand:
ClangExpandTypeCollector(IGM, ParamIRTypes).visit(paramTys[i]);
break;
case clang::CodeGen::ABIArgInfo::Ignore:
break;
case clang::CodeGen::ABIArgInfo::InAlloca:
llvm_unreachable("Need to handle InAlloca during signature expansion");
}
}
if (returnInfo.isIndirect() || returnInfo.isIgnore()) {
ResultIRType = IGM.VoidTy;
} else {
ResultIRType = returnInfo.getCoerceToType();
}
}
static ArrayRef<llvm::Type *> expandScalarOrStructTypeToArray(llvm::Type *&ty) {
ArrayRef<llvm::Type*> expandedTys;
if (auto expansionTy = dyn_cast<llvm::StructType>(ty)) {
// Is there any good reason this isn't public API of llvm::StructType?
expandedTys = makeArrayRef(expansionTy->element_begin(),
expansionTy->getNumElements());
} else {
expandedTys = ty;
}
return expandedTys;
}
void SignatureExpansion::expand(SILParameterInfo param) {
auto paramSILType = getSILFuncConventions().getSILType(param);
auto &ti = IGM.getTypeInfo(paramSILType);
switch (auto conv = param.getConvention()) {
case ParameterConvention::Indirect_In:
case ParameterConvention::Indirect_In_Constant:
case ParameterConvention::Indirect_In_Guaranteed:
addIndirectValueParameterAttributes(IGM, Attrs, ti, ParamIRTypes.size());
addPointerParameter(
IGM.getStorageType(getSILFuncConventions().getSILType(param)));
return;
case ParameterConvention::Indirect_Inout:
case ParameterConvention::Indirect_InoutAliasable:
addInoutParameterAttributes(IGM, Attrs, ti, ParamIRTypes.size(),
conv == ParameterConvention::Indirect_InoutAliasable);
addPointerParameter(
IGM.getStorageType(getSILFuncConventions().getSILType(param)));
return;
case ParameterConvention::Direct_Owned:
case ParameterConvention::Direct_Unowned:
case ParameterConvention::Direct_Guaranteed:
switch (FnType->getLanguage()) {
case SILFunctionLanguage::C: {
llvm_unreachable("Unexpected C/ObjC method in parameter expansion!");
return;
}
case SILFunctionLanguage::Swift: {
auto &nativeSchema = ti.nativeParameterValueSchema(IGM);
if (nativeSchema.requiresIndirect()) {
addIndirectValueParameterAttributes(IGM, Attrs, ti,
ParamIRTypes.size());
ParamIRTypes.push_back(ti.getStorageType()->getPointerTo());
return;
}
if (nativeSchema.empty()) {
assert(ti.getSchema().empty());
return;
}
auto expandedTy = nativeSchema.getExpandedType(IGM);
auto expandedTysArray = expandScalarOrStructTypeToArray(expandedTy);
for (auto *Ty : expandedTysArray)
ParamIRTypes.push_back(Ty);
return;
}
}
llvm_unreachable("bad abstract CC");
}
llvm_unreachable("bad parameter convention");
}
/// Does the given function type have a self parameter that should be
/// given the special treatment for self parameters?
///
/// It's important that this only return true for things that are
/// passed as a single pointer.
bool irgen::hasSelfContextParameter(CanSILFunctionType fnType) {
if (!fnType->hasSelfParam())
return false;
SILParameterInfo param = fnType->getSelfParameter();
// All the indirect conventions pass a single pointer.
if (param.isFormalIndirect()) {
return true;
}
// Direct conventions depend on the type.
CanType type = param.getType();
// Thick or @objc metatypes (but not existential metatypes).
if (auto metatype = dyn_cast<MetatypeType>(type)) {
return metatype->getRepresentation() != MetatypeRepresentation::Thin;
}
// Classes and class-bounded archetypes or ObjC existentials.
// No need to apply this to existentials.
// The direct check for SubstitutableType works because only
// class-bounded generic types can be passed directly.
if (type->mayHaveSuperclass() || isa<SubstitutableType>(type) ||
type->isObjCExistentialType()) {
return true;
}
return false;
}
/// Expand the abstract parameters of a SIL function type into the physical
/// parameters of an LLVM function type (results have already been expanded).
void SignatureExpansion::expandParameters() {
assert(FnType->getRepresentation() != SILFunctionTypeRepresentation::Block
&& "block with non-C calling conv?!");
// First, if this is a coroutine, add the coroutine-context parameter.
switch (FnType->getCoroutineKind()) {
case SILCoroutineKind::None:
break;
case SILCoroutineKind::YieldOnce:
case SILCoroutineKind::YieldMany:
addCoroutineContextParameter();
break;
}
// Next, the formal parameters. But 'self' is treated as the
// context if it has pointer representation.
auto params = FnType->getParameters();
bool hasSelfContext = false;
if (hasSelfContextParameter(FnType)) {
hasSelfContext = true;
params = params.drop_back();
}
for (auto param : params) {
expand(param);
}
// Next, the generic signature.
if (hasPolymorphicParameters(FnType))
expandPolymorphicSignature(IGM, FnType, ParamIRTypes);
// Context is next.
if (hasSelfContext) {
auto curLength = ParamIRTypes.size(); (void) curLength;
if (claimSelf())
IGM.addSwiftSelfAttributes(Attrs, curLength);
expand(FnType->getSelfParameter());
assert(ParamIRTypes.size() == curLength + 1 &&
"adding 'self' added unexpected number of parameters");
} else {
auto needsContext = [=]() -> bool {
switch (FnType->getRepresentation()) {
case SILFunctionType::Representation::Block:
llvm_unreachable("adding block parameter in Swift CC expansion?");
// Always leave space for a context argument if we have an error result.
case SILFunctionType::Representation::CFunctionPointer:
case SILFunctionType::Representation::Method:
case SILFunctionType::Representation::WitnessMethod:
case SILFunctionType::Representation::ObjCMethod:
case SILFunctionType::Representation::Thin:
case SILFunctionType::Representation::Closure:
return FnType->hasErrorResult();
case SILFunctionType::Representation::Thick:
return true;
}
llvm_unreachable("bad representation kind");
};
if (needsContext()) {
if (claimSelf())
IGM.addSwiftSelfAttributes(Attrs, ParamIRTypes.size());
ParamIRTypes.push_back(IGM.RefCountedPtrTy);
}
}
// Error results are last. We always pass them as a pointer to the
// formal error type; LLVM will magically turn this into a non-pointer
// if we set the right attribute.
if (FnType->hasErrorResult()) {
if (claimError())
IGM.addSwiftErrorAttributes(Attrs, ParamIRTypes.size());
llvm::Type *errorType = IGM.getStorageType(
getSILFuncConventions().getSILType(FnType->getErrorResult()));
ParamIRTypes.push_back(errorType->getPointerTo());
}
// Witness methods have some extra parameter types.
if (FnType->getRepresentation() ==
SILFunctionTypeRepresentation::WitnessMethod) {
expandTrailingWitnessSignature(IGM, FnType, ParamIRTypes);
}
}
/// Expand the result and parameter types of a SIL function into the
/// physical parameter types of an LLVM function and return the result
/// type.
void SignatureExpansion::expandFunctionType() {
switch (FnType->getLanguage()) {
case SILFunctionLanguage::Swift: {
expandResult();
expandParameters();
return;
}
case SILFunctionLanguage::C:
expandExternalSignatureTypes();
return;
}
llvm_unreachable("bad abstract calling convention");
}
void SignatureExpansion::expandCoroutineContinuationType() {
expandCoroutineResult(/*for continuation*/ true);
expandCoroutineContinuationParameters();
}
Signature SignatureExpansion::getSignature() {
// Create the appropriate LLVM type.
llvm::FunctionType *llvmType =
llvm::FunctionType::get(ResultIRType, ParamIRTypes, /*variadic*/ false);
assert((ForeignInfo.ClangInfo != nullptr) ==
(FnType->getLanguage() == SILFunctionLanguage::C) &&
"C function type without C function info");
auto callingConv = expandCallingConv(IGM, FnType->getRepresentation());
Signature result;
result.Type = llvmType;
result.CallingConv = callingConv;
result.Attributes = Attrs;
using ExtraData = Signature::ExtraData;
if (FnType->getLanguage() == SILFunctionLanguage::C) {
result.ExtraDataKind = ExtraData::kindForMember<ForeignFunctionInfo>();
result.ExtraDataStorage.emplace<ForeignFunctionInfo>(result.ExtraDataKind,
ForeignInfo);
} else if (FnType->isCoroutine()) {
result.ExtraDataKind = ExtraData::kindForMember<CoroutineInfo>();
result.ExtraDataStorage.emplace<CoroutineInfo>(result.ExtraDataKind,
CoroInfo);
} else {
result.ExtraDataKind = ExtraData::kindForMember<void>();
}
return result;
}
Signature Signature::getUncached(IRGenModule &IGM,
CanSILFunctionType formalType) {
GenericContextScope scope(IGM, formalType->getGenericSignature());
SignatureExpansion expansion(IGM, formalType);
expansion.expandFunctionType();
return expansion.getSignature();
}
Signature Signature::forCoroutineContinuation(IRGenModule &IGM,
CanSILFunctionType fnType) {
assert(fnType->isCoroutine());
SignatureExpansion expansion(IGM, fnType);
expansion.expandCoroutineContinuationType();
return expansion.getSignature();
}
void irgen::extractScalarResults(IRGenFunction &IGF, llvm::Type *bodyType,
llvm::Value *call, Explosion &out) {
assert(!bodyType->isVoidTy() && "Unexpected void result type!");
auto *returned = call;
auto *callType = call->getType();
// If the type of the result of the call differs from the type used
// elsewhere in the caller due to ABI type coercion, we need to
// coerce the result back from the ABI type before extracting the
// elements.
if (bodyType != callType)
returned = IGF.coerceValue(returned, bodyType, IGF.IGM.DataLayout);
if (auto *structType = dyn_cast<llvm::StructType>(bodyType))
for (unsigned i = 0, e = structType->getNumElements(); i != e; ++i)
out.add(IGF.Builder.CreateExtractValue(returned, i));
else
out.add(returned);
}
/// Emit the unsubstituted result of this call into the given explosion.
/// The unsubstituted result must be naturally returned directly.
void CallEmission::emitToUnmappedExplosion(Explosion &out) {
assert(LastArgWritten == 0 && "emitting unnaturally to explosion");
auto call = emitCallSite();
// Bail out immediately on a void result.
llvm::Value *result = call.getInstruction();
if (result->getType()->isVoidTy())
return;
SILFunctionConventions fnConv(getCallee().getOrigFunctionType(),
IGF.getSILModule());
// If the result was returned autoreleased, implicitly insert the reclaim.
// This is only allowed on a single direct result.
if (fnConv.getNumDirectSILResults() == 1
&& (fnConv.getDirectSILResults().begin()->getConvention()
== ResultConvention::Autoreleased)) {
result = emitObjCRetainAutoreleasedReturnValue(IGF, result);
}
// Get the natural IR type in the body of the function that makes
// the call. This may be different than the IR type returned by the
// call itself due to ABI type coercion.
auto resultType = fnConv.getSILResultType();
auto &nativeSchema = IGF.IGM.getTypeInfo(resultType).nativeReturnValueSchema(IGF.IGM);
// For ABI reasons the result type of the call might not actually match the
// expected result type.
auto expectedNativeResultType = nativeSchema.getExpandedType(IGF.IGM);
if (result->getType() != expectedNativeResultType) {
// This should only be needed when we call C functions.
assert(getCallee().getOrigFunctionType()->getLanguage() ==
SILFunctionLanguage::C);
result =
IGF.coerceValue(result, expectedNativeResultType, IGF.IGM.DataLayout);
}
// Gather the values.
Explosion nativeExplosion;
extractScalarResults(IGF, result->getType(), result, nativeExplosion);
out = nativeSchema.mapFromNative(IGF.IGM, IGF, nativeExplosion, resultType);
}
/// Emit the unsubstituted result of this call to the given address.
/// The unsubstituted result must be naturally returned indirectly.
void CallEmission::emitToUnmappedMemory(Address result) {
assert(LastArgWritten == 1 && "emitting unnaturally to indirect result");
Args[0] = result.getAddress();
SILFunctionConventions FnConv(CurCallee.getSubstFunctionType(),
IGF.getSILModule());
addIndirectResultAttributes(IGF.IGM, CurCallee.getMutableAttributes(),
0, FnConv.getNumIndirectSILResults() <= 1);
#ifndef NDEBUG
LastArgWritten = 0; // appease an assert
#endif
emitCallSite();
}
/// The private routine to ultimately emit a call or invoke instruction.
llvm::CallSite CallEmission::emitCallSite() {
assert(LastArgWritten == 0);
assert(!EmittedCall);
EmittedCall = true;
// Make the call and clear the arguments array.
const auto &fn = getCallee().getFunctionPointer();
auto fnTy = fn.getFunctionType();
// Coerce argument types for those cases where the IR type required
// by the ABI differs from the type used within the function body.
assert(fnTy->getNumParams() == Args.size());
for (int i = 0, e = fnTy->getNumParams(); i != e; ++i) {
auto *paramTy = fnTy->getParamType(i);
auto *argTy = Args[i]->getType();
if (paramTy != argTy)
Args[i] = IGF.coerceValue(Args[i], paramTy, IGF.IGM.DataLayout);
}
// TODO: exceptions!
auto call = IGF.Builder.CreateCall(fn, Args);
// Make coroutines calls opaque to LLVM analysis.
if (IsCoroutine) {
// Go back and insert some instructions right before the call.
// It's easier to do this than to mess around with copying and
// modifying the FunctionPointer above.
IGF.Builder.SetInsertPoint(call);
// Insert a call to @llvm.coro.prepare.retcon, then bitcast to the right
// function type.
auto origCallee = call->getCalledValue();
llvm::Value *opaqueCallee = origCallee;
opaqueCallee =
IGF.Builder.CreateBitCast(opaqueCallee, IGF.IGM.Int8PtrTy);
opaqueCallee =
IGF.Builder.CreateIntrinsicCall(llvm::Intrinsic::ID::coro_prepare_retcon,
{ opaqueCallee });
opaqueCallee =
IGF.Builder.CreateBitCast(opaqueCallee, origCallee->getType());
call->setCalledFunction(opaqueCallee);
// Reset the insert point to after the call.
IGF.Builder.SetInsertPoint(call->getParent());
}
Args.clear();
// Destroy any temporaries we needed.
// We don't do this for coroutines because we need to wait until the
// coroutine is complete.
if (!IsCoroutine) {
Temporaries.destroyAll(IGF);
// Clear the temporary set so that we can assert that there are no
// temporaries later.
Temporaries.clear();
}
// Return.
return call;
}
llvm::CallInst *IRBuilder::CreateCall(const FunctionPointer &fn,
ArrayRef<llvm::Value*> args) {
assert(!isTrapIntrinsic(fn.getPointer()) && "Use CreateNonMergeableTrap");
llvm::CallInst *call = IRBuilderBase::CreateCall(fn.getPointer(), args);
call->setAttributes(fn.getAttributes());
call->setCallingConv(fn.getCallingConv());
return call;
}
/// Emit the result of this call to memory.
void CallEmission::emitToMemory(Address addr,
const LoadableTypeInfo &indirectedResultTI,
bool isOutlined) {
assert(LastArgWritten <= 1);
// If the call is naturally to an explosion, emit it that way and
// then initialize the temporary.
if (LastArgWritten == 0) {
Explosion result;
emitToExplosion(result, isOutlined);
indirectedResultTI.initialize(IGF, result, addr, isOutlined);
return;
}
// Okay, we're naturally emitting to memory.
Address origAddr = addr;
auto origFnType = CurCallee.getOrigFunctionType();
auto substFnType = CurCallee.getSubstFunctionType();
// We're never being asked to do anything with *formal*
// indirect results here, just the possibility of a direct-in-SIL
// result that's actually being passed indirectly.
//
// TODO: SIL address lowering should be able to handle such cases earlier.
auto origResultType = origFnType->getDirectFormalResultsType().getASTType();
auto substResultType = substFnType->getDirectFormalResultsType().getASTType();
if (origResultType->hasTypeParameter())
origResultType = IGF.IGM.getGenericEnvironment()
->mapTypeIntoContext(origResultType)
->getCanonicalType();
if (origResultType != substResultType) {
auto origTy = IGF.IGM.getStoragePointerTypeForLowered(origResultType);
origAddr = IGF.Builder.CreateBitCast(origAddr, origTy);
}
emitToUnmappedMemory(origAddr);
}
static void emitCastToSubstSchema(IRGenFunction &IGF, Explosion &in,
const ExplosionSchema &schema,
Explosion &out) {
assert(in.size() == schema.size());
for (unsigned i = 0, e = schema.size(); i != e; ++i) {
llvm::Type *expectedType = schema.begin()[i].getScalarType();
llvm::Value *value = in.claimNext();
if (value->getType() != expectedType)
value = IGF.Builder.CreateBitCast(value, expectedType,
value->getName() + ".asSubstituted");
out.add(value);
}
}
void CallEmission::emitYieldsToExplosion(Explosion &out) {
// Emit the call site.
auto call = emitCallSite();
// Pull the raw return values out.
Explosion rawReturnValues;
extractScalarResults(IGF, call->getType(), call.getInstruction(),
rawReturnValues);
auto coroInfo = getCallee().getSignature().getCoroutineInfo();
// Go ahead and forward the continuation pointer as an opaque pointer.
auto continuation = rawReturnValues.claimNext();
out.add(continuation);
// Collect the raw value components.
Explosion rawYieldComponents;
// Add all the direct yield components.
rawYieldComponents.add(
rawReturnValues.claim(coroInfo.NumDirectYieldComponents));
// Add all the indirect yield components.
assert(rawReturnValues.size() <= 1);
if (!rawReturnValues.empty()) {
// Extract the indirect yield buffer.
auto indirectPointer = rawReturnValues.claimNext();
auto indirectStructTy = cast<llvm::StructType>(
indirectPointer->getType()->getPointerElementType());
auto layout = IGF.IGM.DataLayout.getStructLayout(indirectStructTy);
Address indirectBuffer(indirectPointer, Alignment(layout->getAlignment()));
for (auto i : indices(indirectStructTy->elements())) {
// Skip padding.
if (indirectStructTy->getElementType(i)->isArrayTy())
continue;
auto eltAddr = IGF.Builder.CreateStructGEP(indirectBuffer, i, layout);
rawYieldComponents.add(IGF.Builder.CreateLoad(eltAddr));
}
}
auto substCoroType = getCallee().getSubstFunctionType();
SILFunctionConventions fnConv(substCoroType, IGF.getSILModule());
for (auto yield : fnConv.getYields()) {
YieldSchema schema(IGF.IGM, fnConv, yield);
// If the schema says it's indirect, then we expect a pointer.
if (schema.isIndirect()) {
auto pointer = IGF.Builder.CreateBitCast(rawYieldComponents.claimNext(),
schema.getIndirectPointerType());
// If it's formally indirect, then we should just add that pointer
// to the output.
if (schema.isFormalIndirect()) {
out.add(pointer);
continue;
}
// Otherwise, we need to load.
auto &yieldTI = cast<LoadableTypeInfo>(schema.getTypeInfo());
yieldTI.loadAsTake(IGF, yieldTI.getAddressForPointer(pointer), out);
continue;
}
// Otherwise, it's direct. Remap.
auto temp = schema.getDirectSchema().mapFromNative(IGF.IGM, IGF,
rawYieldComponents,
schema.getSILType());
auto &yieldTI = cast<LoadableTypeInfo>(schema.getTypeInfo());
emitCastToSubstSchema(IGF, temp, yieldTI.getSchema(), out);
}
}
/// Emit the result of this call to an explosion.
void CallEmission::emitToExplosion(Explosion &out, bool isOutlined) {
assert(LastArgWritten <= 1);
// For coroutine calls, we need to collect the yields, not the results;
// this looks very different.
if (IsCoroutine) {
assert(LastArgWritten == 0 && "coroutine with indirect result?");
emitYieldsToExplosion(out);
return;
}
SILFunctionConventions fnConv(getCallee().getSubstFunctionType(),
IGF.getSILModule());
SILType substResultType = fnConv.getSILResultType();
auto &substResultTI =
cast<LoadableTypeInfo>(IGF.getTypeInfo(substResultType));
// If the call is naturally to memory, emit it that way and then
// explode that temporary.
if (LastArgWritten == 1) {
StackAddress ctemp = substResultTI.allocateStack(IGF, substResultType,
"call.aggresult");
Address temp = ctemp.getAddress();
emitToMemory(temp, substResultTI, isOutlined);
// We can use a take.
substResultTI.loadAsTake(IGF, temp, out);
substResultTI.deallocateStack(IGF, ctemp, substResultType);
return;
}
// Okay, we're naturally emitting to an explosion.
Explosion temp;
emitToUnmappedExplosion(temp);
// We might need to bitcast the results.
emitCastToSubstSchema(IGF, temp, substResultTI.getSchema(), out);
}
CallEmission::CallEmission(CallEmission &&other)
: IGF(other.IGF),
Args(std::move(other.Args)),
CurCallee(std::move(other.CurCallee)),
LastArgWritten(other.LastArgWritten),
EmittedCall(other.EmittedCall) {
// Prevent other's destructor from asserting.
LastArgWritten = 0;
EmittedCall = true;
}
CallEmission::~CallEmission() {
assert(LastArgWritten == 0);
assert(EmittedCall);
assert(Temporaries.hasBeenCleared());
}
Callee::Callee(CalleeInfo &&info, const FunctionPointer &fn,
llvm::Value *firstData, llvm::Value *secondData)
: Info(std::move(info)), Fn(fn),
FirstData(firstData), SecondData(secondData) {
#ifndef NDEBUG
// We should have foreign info if it's a foreign call.
assert((Fn.getForeignInfo().ClangInfo != nullptr) ==
(Info.OrigFnType->getLanguage() == SILFunctionLanguage::C));
// We should have the right data values for the representation.
switch (Info.OrigFnType->getRepresentation()) {
case SILFunctionTypeRepresentation::ObjCMethod:
assert(FirstData && SecondData);
break;
case SILFunctionTypeRepresentation::Method:
case SILFunctionTypeRepresentation::WitnessMethod:
assert((FirstData != nullptr) == hasSelfContextParameter(Info.OrigFnType));
assert(!SecondData);
break;
case SILFunctionTypeRepresentation::Thick:
case SILFunctionTypeRepresentation::Block:
assert(FirstData && !SecondData);
break;
case SILFunctionTypeRepresentation::Thin:
case SILFunctionTypeRepresentation::Closure:
case SILFunctionTypeRepresentation::CFunctionPointer:
assert(!FirstData && !SecondData);
break;
}
#endif
}
llvm::Value *Callee::getSwiftContext() const {
switch (Info.OrigFnType->getRepresentation()) {
case SILFunctionTypeRepresentation::Block:
case SILFunctionTypeRepresentation::ObjCMethod:
case SILFunctionTypeRepresentation::CFunctionPointer:
case SILFunctionTypeRepresentation::Thin:
case SILFunctionTypeRepresentation::Closure:
return nullptr;
case SILFunctionTypeRepresentation::WitnessMethod:
case SILFunctionTypeRepresentation::Method:
// This may or may not be null.
return FirstData;
case SILFunctionTypeRepresentation::Thick:
assert(FirstData && "no context value set on callee");
return FirstData;
}
llvm_unreachable("bad representation");
}
llvm::Value *Callee::getBlockObject() const {
assert(Info.OrigFnType->getRepresentation() ==
SILFunctionTypeRepresentation::Block &&
"not a block");
assert(FirstData && "no block object set on callee");
return FirstData;
}
llvm::Value *Callee::getObjCMethodReceiver() const {
assert(Info.OrigFnType->getRepresentation() ==
SILFunctionTypeRepresentation::ObjCMethod &&
"not a method");
assert(FirstData && "no receiver set on callee");
return FirstData;
}
llvm::Value *Callee::getObjCMethodSelector() const {
assert(Info.OrigFnType->getRepresentation() ==
SILFunctionTypeRepresentation::ObjCMethod &&
"not a method");
assert(SecondData && "no selector set on callee");
return SecondData;
}
/// Set up this emitter afresh from the current callee specs.
void CallEmission::setFromCallee() {
IsCoroutine = CurCallee.getSubstFunctionType()->isCoroutine();
EmittedCall = false;
unsigned numArgs = CurCallee.getLLVMFunctionType()->getNumParams();
// Set up the args array.
assert(Args.empty());
Args.reserve(numArgs);
Args.set_size(numArgs);
LastArgWritten = numArgs;
auto fnType = CurCallee.getOrigFunctionType();
if (fnType->getRepresentation()
== SILFunctionTypeRepresentation::WitnessMethod) {
unsigned n = getTrailingWitnessSignatureLength(IGF.IGM, fnType);
while (n--) {
Args[--LastArgWritten] = nullptr;
}
}
llvm::Value *contextPtr = CurCallee.getSwiftContext();
// Add the error result if we have one.
if (fnType->hasErrorResult()) {
// The invariant is that this is always zero-initialized, so we
// don't need to do anything extra here.
SILFunctionConventions fnConv(fnType, IGF.getSILModule());
Address errorResultSlot = IGF.getErrorResultSlot(fnConv.getSILErrorType());
assert(LastArgWritten > 0);
Args[--LastArgWritten] = errorResultSlot.getAddress();
addAttribute(LastArgWritten + llvm::AttributeList::FirstArgIndex,
llvm::Attribute::NoCapture);
IGF.IGM.addSwiftErrorAttributes(CurCallee.getMutableAttributes(),
LastArgWritten);
// Fill in the context pointer if necessary.
if (!contextPtr) {
assert(!CurCallee.getOrigFunctionType()->getExtInfo().hasContext() &&
"Missing context?");
contextPtr = llvm::UndefValue::get(IGF.IGM.RefCountedPtrTy);
}
}
// Add the data pointer if we have one.
// (Note that we're emitting backwards, so this correctly goes
// *before* the error pointer.)
if (contextPtr) {
assert(LastArgWritten > 0);
Args[--LastArgWritten] = contextPtr;
IGF.IGM.addSwiftSelfAttributes(CurCallee.getMutableAttributes(),
LastArgWritten);
}
}
bool irgen::canCoerceToSchema(IRGenModule &IGM,
ArrayRef<llvm::Type*> expandedTys,
const ExplosionSchema &schema) {
// If the schemas don't even match in number, we have to go
// through memory.
if (expandedTys.size() != schema.size())
return false;
// If there's just one element, we can always coerce as a scalar.
if (expandedTys.size() == 1) return true;
// If there are multiple elements, the pairs of types need to
// match in size for the coercion to work.
for (size_t i = 0, e = expandedTys.size(); i != e; ++i) {
llvm::Type *inputTy = schema[i].getScalarType();
llvm::Type *outputTy = expandedTys[i];
if (inputTy != outputTy &&
IGM.DataLayout.getTypeSizeInBits(inputTy) !=
IGM.DataLayout.getTypeSizeInBits(outputTy))
return false;
}
// Okay, everything is fine.
return true;
}
static llvm::Type *getOutputType(TranslationDirection direction, unsigned index,
const ExplosionSchema &nativeSchema,
ArrayRef<llvm::Type*> expandedForeignTys) {
assert(nativeSchema.size() == expandedForeignTys.size());
return (direction == TranslationDirection::ToForeign
? expandedForeignTys[index]
: nativeSchema[index].getScalarType());
}
static void emitCoerceAndExpand(IRGenFunction &IGF, Explosion &in,
Explosion &out, SILType paramTy,
const LoadableTypeInfo ¶mTI,
llvm::StructType *coercionTy,
ArrayRef<llvm::Type *> expandedTys,
TranslationDirection direction,
bool isOutlined) {
// If we can directly coerce the scalar values, avoid going through memory.
auto schema = paramTI.getSchema();
if (canCoerceToSchema(IGF.IGM, expandedTys, schema)) {
for (auto index : indices(expandedTys)) {
llvm::Value *arg = in.claimNext();
assert(arg->getType() ==
getOutputType(reverse(direction), index, schema, expandedTys));
auto outputTy = getOutputType(direction, index, schema, expandedTys);
if (arg->getType() != outputTy)
arg = IGF.coerceValue(arg, outputTy, IGF.IGM.DataLayout);
out.add(arg);
}
return;
}
// Otherwise, materialize to a temporary.
auto temporaryAlloc =
paramTI.allocateStack(IGF, paramTy, "coerce-and-expand.temp");
Address temporary = temporaryAlloc.getAddress();
auto coercionTyLayout = IGF.IGM.DataLayout.getStructLayout(coercionTy);
// Make the alloca at least as aligned as the coercion struct, just
// so that the element accesses we make don't end up under-aligned.
Alignment coercionTyAlignment = Alignment(coercionTyLayout->getAlignment());
auto alloca = cast<llvm::AllocaInst>(temporary.getAddress());
if (alloca->getAlignment() < coercionTyAlignment.getValue()) {
alloca->setAlignment(coercionTyAlignment.getValue());
temporary = Address(temporary.getAddress(), coercionTyAlignment);
}
// If we're translating *to* the foreign expansion, do an ordinary
// initialization from the input explosion.
if (direction == TranslationDirection::ToForeign) {
paramTI.initialize(IGF, in, temporary, isOutlined);
}
Address coercedTemporary =
IGF.Builder.CreateElementBitCast(temporary, coercionTy);
#ifndef NDEBUG
size_t expandedTyIndex = 0;
#endif
for (auto eltIndex : indices(coercionTy->elements())) {
auto eltTy = coercionTy->getElementType(eltIndex);
// Skip padding fields.
if (eltTy->isArrayTy()) continue;
assert(expandedTys[expandedTyIndex++] == eltTy);
// Project down to the field.
Address eltAddr =
IGF.Builder.CreateStructGEP(coercedTemporary, eltIndex, coercionTyLayout);
// If we're translating *to* the foreign expansion, pull the value out
// of the field and add it to the output.
if (direction == TranslationDirection::ToForeign) {
llvm::Value *value = IGF.Builder.CreateLoad(eltAddr);
out.add(value);
// Otherwise, claim the next value from the input and store that
// in the field.
} else {
llvm::Value *value = in.claimNext();
IGF.Builder.CreateStore(value, eltAddr);
}
}
assert(expandedTyIndex == expandedTys.size());
// If we're translating *from* the foreign expansion, do an ordinary
// load into the output explosion.
if (direction == TranslationDirection::ToNative) {
paramTI.loadAsTake(IGF, temporary, out);
}
paramTI.deallocateStack(IGF, temporaryAlloc, paramTy);
}
static void emitDirectExternalArgument(IRGenFunction &IGF, SILType argType,
const clang::CodeGen::ABIArgInfo &AI,
Explosion &in, Explosion &out,
bool isOutlined) {
bool IsDirectFlattened = AI.isDirect() && AI.getCanBeFlattened();
bool IsIndirect = !AI.isDirect();
// If we're supposed to pass directly as a struct type, that
// really means expanding out as multiple arguments.
llvm::Type *coercedTy = AI.getCoerceToType();
ArrayRef<llvm::Type *> expandedTys =
expandScalarOrStructTypeToArray(coercedTy);
auto &argTI = cast<LoadableTypeInfo>(IGF.getTypeInfo(argType));
auto inputSchema = argTI.getSchema();
// Check to see if we can pairwise coerce Swift's exploded scalars
// to Clang's expanded elements.
if ((IsDirectFlattened || IsIndirect) &&
canCoerceToSchema(IGF.IGM, expandedTys, inputSchema)) {
for (auto outputTy : expandedTys) {
llvm::Value *arg = in.claimNext();
if (arg->getType() != outputTy)
arg = IGF.coerceValue(arg, outputTy, IGF.IGM.DataLayout);
out.add(arg);
}
return;
}
// Otherwise, we need to coerce through memory.
Address temporary;
Size tempSize;
std::tie(temporary, tempSize) =
allocateForCoercion(IGF, argTI.getStorageType(), coercedTy, "coerced-arg");
IGF.Builder.CreateLifetimeStart(temporary, tempSize);
// Store to a temporary.
Address tempOfArgTy = IGF.Builder.CreateBitCast(
temporary, argTI.getStorageType()->getPointerTo());
argTI.initializeFromParams(IGF, in, tempOfArgTy, argType, isOutlined);
// Bitcast the temporary to the expected type.
Address coercedAddr =
IGF.Builder.CreateBitCast(temporary, coercedTy->getPointerTo());
if (IsDirectFlattened && isa<llvm::StructType>(coercedTy)) {
// Project out individual elements if necessary.
auto *ST = cast<llvm::StructType>(coercedTy);
const auto *layout = IGF.IGM.DataLayout.getStructLayout(ST);
for (unsigned EI : range(ST->getNumElements())) {
auto offset = Size(layout->getElementOffset(EI));
auto address = IGF.Builder.CreateStructGEP(coercedAddr, EI, offset);
out.add(IGF.Builder.CreateLoad(address));
}
} else {
// Otherwise, collect the single scalar.
out.add(IGF.Builder.CreateLoad(coercedAddr));
}
IGF.Builder.CreateLifetimeEnd(temporary, tempSize);
}
namespace {
/// Load a clang argument expansion from a buffer.
struct ClangExpandLoadEmitter :
ClangExpandProjection<ClangExpandLoadEmitter> {
Explosion &Out;
ClangExpandLoadEmitter(IRGenFunction &IGF, Explosion &out)
: ClangExpandProjection(IGF), Out(out) {}
void visitScalar(llvm::Type *scalarTy, Address addr) {
addr = IGF.Builder.CreateBitCast(addr, scalarTy->getPointerTo());
auto value = IGF.Builder.CreateLoad(addr);
Out.add(value);
}
};
/// Store a clang argument expansion into a buffer.
struct ClangExpandStoreEmitter :
ClangExpandProjection<ClangExpandStoreEmitter> {
Explosion &In;
ClangExpandStoreEmitter(IRGenFunction &IGF, Explosion &in)
: ClangExpandProjection(IGF), In(in) {}
void visitScalar(llvm::Type *scalarTy, Address addr) {
auto value = In.claimNext();
addr = IGF.Builder.CreateBitCast(addr, scalarTy->getPointerTo());
IGF.Builder.CreateStore(value, addr);
}
};
} // end anonymous namespace
/// Given a Swift value explosion in 'in', produce a Clang expansion
/// (according to ABIArgInfo::Expand) in 'out'.
static void
emitClangExpandedArgument(IRGenFunction &IGF, Explosion &in, Explosion &out,
clang::CanQualType clangType, SILType swiftType,
const LoadableTypeInfo &swiftTI, bool isOutlined) {
// If Clang's expansion schema matches Swift's, great.
auto swiftSchema = swiftTI.getSchema();
if (doesClangExpansionMatchSchema(IGF.IGM, clangType, swiftSchema)) {
return in.transferInto(out, swiftSchema.size());
}
// Otherwise, materialize to a temporary.
auto ctemp = swiftTI.allocateStack(IGF, swiftType, "clang-expand-arg.temp");
Address temp = ctemp.getAddress();
swiftTI.initialize(IGF, in, temp, isOutlined);
Address castTemp = IGF.Builder.CreateBitCast(temp, IGF.IGM.Int8PtrTy);
ClangExpandLoadEmitter(IGF, out).visit(clangType, castTemp);
swiftTI.deallocateStack(IGF, ctemp, swiftType);
}
/// Given a Clang-expanded (according to ABIArgInfo::Expand) parameter
/// in 'in', produce a Swift value explosion in 'out'.
void irgen::emitClangExpandedParameter(IRGenFunction &IGF,
Explosion &in, Explosion &out,
clang::CanQualType clangType,
SILType swiftType,
const LoadableTypeInfo &swiftTI) {
// If Clang's expansion schema matches Swift's, great.
auto swiftSchema = swiftTI.getSchema();
if (doesClangExpansionMatchSchema(IGF.IGM, clangType, swiftSchema)) {
return in.transferInto(out, swiftSchema.size());
}
// Otherwise, materialize to a temporary.
auto tempAlloc = swiftTI.allocateStack(IGF, swiftType,
"clang-expand-param.temp");
Address temp = tempAlloc.getAddress();
Address castTemp = IGF.Builder.CreateBitCast(temp, IGF.IGM.Int8PtrTy);
ClangExpandStoreEmitter(IGF, in).visit(clangType, castTemp);
// Then load out.
swiftTI.loadAsTake(IGF, temp, out);
swiftTI.deallocateStack(IGF, tempAlloc, swiftType);
}
static void externalizeArguments(IRGenFunction &IGF, const Callee &callee,
Explosion &in, Explosion &out,
TemporarySet &temporaries,
bool isOutlined) {
auto silConv = IGF.IGM.silConv;
auto fnType = callee.getOrigFunctionType();
auto params = fnType->getParameters();
assert(callee.getForeignInfo().ClangInfo);
auto &FI = *callee.getForeignInfo().ClangInfo;
// The index of the first "physical" parameter from paramTys/FI that
// corresponds to a logical parameter from params.
unsigned firstParam = 0;
// Handle the ObjC prefix.
if (callee.getRepresentation() == SILFunctionTypeRepresentation::ObjCMethod) {
// Ignore both the logical and the physical parameters associated
// with self and _cmd.
firstParam += 2;
params = params.drop_back();
// Or the block prefix.
} else if (fnType->getRepresentation()
== SILFunctionTypeRepresentation::Block) {
// Ignore the physical block-object parameter.
firstParam += 1;
}
for (unsigned i = firstParam, e = FI.arg_size(); i != e; ++i) {
auto clangParamTy = FI.arg_begin()[i].type;
auto &AI = FI.arg_begin()[i].info;
// We don't need to do anything to handle the Swift parameter-ABI
// attributes here because we shouldn't be trying to round-trip
// swiftcall function pointers through SIL as C functions anyway.
assert(FI.getExtParameterInfo(i).getABI() == clang::ParameterABI::Ordinary);
// Add a padding argument if required.
if (auto *padType = AI.getPaddingType())
out.add(llvm::UndefValue::get(padType));
SILType paramType = silConv.getSILType(params[i - firstParam]);
switch (AI.getKind()) {
case clang::CodeGen::ABIArgInfo::Extend: {
bool signExt = clangParamTy->hasSignedIntegerRepresentation();
assert((signExt || clangParamTy->hasUnsignedIntegerRepresentation()) &&
"Invalid attempt to add extension attribute to argument!");
(void) signExt;
LLVM_FALLTHROUGH;
}
case clang::CodeGen::ABIArgInfo::Direct: {
auto toTy = AI.getCoerceToType();
// Indirect parameters are bridged as Clang pointer types.
if (silConv.isSILIndirect(params[i - firstParam])) {
assert(paramType.isAddress() && "SIL type is not an address?");
auto addr = in.claimNext();
if (addr->getType() != toTy)
addr = IGF.coerceValue(addr, toTy, IGF.IGM.DataLayout);
out.add(addr);
break;
}
emitDirectExternalArgument(IGF, paramType, AI, in, out, isOutlined);
break;
}
case clang::CodeGen::ABIArgInfo::Indirect: {
auto &ti = cast<LoadableTypeInfo>(IGF.getTypeInfo(paramType));
auto temp = ti.allocateStack(IGF, paramType, "indirect-temporary");
temporaries.add({temp, paramType});
Address addr = temp.getAddress();
// Set at least the alignment the ABI expects.
if (AI.getIndirectByVal()) {
auto ABIAlign = AI.getIndirectAlign();
if (ABIAlign > addr.getAlignment()) {
auto *AS = cast<llvm::AllocaInst>(addr.getAddress());
AS->setAlignment(ABIAlign.getQuantity());
addr = Address(addr.getAddress(), Alignment(ABIAlign.getQuantity()));
}
}
ti.initialize(IGF, in, addr, isOutlined);
out.add(addr.getAddress());
break;
}
case clang::CodeGen::ABIArgInfo::CoerceAndExpand: {
auto ¶mTI = cast<LoadableTypeInfo>(IGF.getTypeInfo(paramType));
emitCoerceAndExpand(IGF, in, out, paramType, paramTI,
AI.getCoerceAndExpandType(),
AI.getCoerceAndExpandTypeSequence(),
TranslationDirection::ToForeign, isOutlined);
break;
}
case clang::CodeGen::ABIArgInfo::Expand:
emitClangExpandedArgument(
IGF, in, out, clangParamTy, paramType,
cast<LoadableTypeInfo>(IGF.getTypeInfo(paramType)), isOutlined);
break;
case clang::CodeGen::ABIArgInfo::Ignore:
break;
case clang::CodeGen::ABIArgInfo::InAlloca:
llvm_unreachable("Need to handle InAlloca when externalizing arguments");
break;
}
}
}
/// Returns whether allocas are needed.
bool irgen::addNativeArgument(IRGenFunction &IGF, Explosion &in,
SILParameterInfo origParamInfo, Explosion &out,
bool isOutlined) {
// Addresses consist of a single pointer argument.
if (IGF.IGM.silConv.isSILIndirect(origParamInfo)) {
out.add(in.claimNext());
return false;
}
auto paramType = IGF.IGM.silConv.getSILType(origParamInfo);
auto &ti = cast<LoadableTypeInfo>(IGF.getTypeInfo(paramType));
auto schema = ti.getSchema();
auto &nativeSchema = ti.nativeParameterValueSchema(IGF.IGM);
if (nativeSchema.requiresIndirect()) {
// Pass the argument indirectly.
auto buf = IGF.createAlloca(ti.getStorageType(),
ti.getFixedAlignment(), "");
ti.initialize(IGF, in, buf, isOutlined);
out.add(buf.getAddress());
return true;
} else {
if (schema.empty()) {
assert(nativeSchema.empty());
return false;
}
assert(!nativeSchema.empty());
// Pass the argument explosion directly, mapping into the native swift
// calling convention.
Explosion nonNativeParam;
ti.reexplode(IGF, in, nonNativeParam);
Explosion nativeParam = nativeSchema.mapIntoNative(
IGF.IGM, IGF, nonNativeParam, paramType, isOutlined);
nativeParam.transferInto(out, nativeParam.size());
return false;
}
}
/// Emit a direct parameter that was passed under a C-based CC.
static void emitDirectForeignParameter(IRGenFunction &IGF, Explosion &in,
const clang::CodeGen::ABIArgInfo &AI,
Explosion &out, SILType paramType,
const LoadableTypeInfo ¶mTI) {
// The ABI IR types for the entrypoint might differ from the
// Swift IR types for the body of the function.
llvm::Type *coercionTy = AI.getCoerceToType();
ArrayRef<llvm::Type*> expandedTys;
if (AI.isDirect() && AI.getCanBeFlattened() &&
isa<llvm::StructType>(coercionTy)) {
const auto *ST = cast<llvm::StructType>(coercionTy);
expandedTys = makeArrayRef(ST->element_begin(), ST->getNumElements());
} else if (coercionTy == paramTI.getStorageType()) {
// Fast-path a really common case. This check assumes that either
// the storage type of a type is an llvm::StructType or it has a
// single-element explosion.
out.add(in.claimNext());
return;
} else {
expandedTys = coercionTy;
}
auto outputSchema = paramTI.getSchema();
// Check to see if we can pairwise-coerce Swift's exploded scalars
// to Clang's expanded elements.
if (canCoerceToSchema(IGF.IGM, expandedTys, outputSchema)) {
for (auto &outputElt : outputSchema) {
llvm::Value *param = in.claimNext();
llvm::Type *outputTy = outputElt.getScalarType();
if (param->getType() != outputTy)
param = IGF.coerceValue(param, outputTy, IGF.IGM.DataLayout);
out.add(param);
}
return;
}
// Otherwise, we need to traffic through memory.
// Create a temporary.
Address temporary; Size tempSize;
std::tie(temporary, tempSize) = allocateForCoercion(IGF,
coercionTy,
paramTI.getStorageType(),
"");
IGF.Builder.CreateLifetimeStart(temporary, tempSize);
// Write the input parameters into the temporary:
Address coercedAddr =
IGF.Builder.CreateBitCast(temporary, coercionTy->getPointerTo());
// Break down a struct expansion if necessary.
if (auto expansionTy = dyn_cast<llvm::StructType>(coercionTy)) {
auto layout = IGF.IGM.DataLayout.getStructLayout(expansionTy);
for (unsigned i = 0, e = expansionTy->getNumElements(); i != e; ++i) {
auto fieldOffset = Size(layout->getElementOffset(i));
auto fieldAddr = IGF.Builder.CreateStructGEP(coercedAddr, i, fieldOffset);
IGF.Builder.CreateStore(in.claimNext(), fieldAddr);
}
// Otherwise, store the single scalar.
} else {
IGF.Builder.CreateStore(in.claimNext(), coercedAddr);
}
// Pull out the elements.
temporary = IGF.Builder.CreateBitCast(temporary,
paramTI.getStorageType()->getPointerTo());
paramTI.loadAsTake(IGF, temporary, out);
// Deallocate the temporary.
// `deallocateStack` emits the lifetime.end marker for us.
paramTI.deallocateStack(IGF, StackAddress(temporary), paramType);
}
void irgen::emitForeignParameter(IRGenFunction &IGF, Explosion ¶ms,
ForeignFunctionInfo foreignInfo,
unsigned foreignParamIndex, SILType paramTy,
const LoadableTypeInfo ¶mTI,
Explosion ¶mExplosion, bool isOutlined) {
assert(foreignInfo.ClangInfo);
auto &FI = *foreignInfo.ClangInfo;
auto clangArgTy = FI.arg_begin()[foreignParamIndex].type;
auto AI = FI.arg_begin()[foreignParamIndex].info;
// We don't need to do anything to handle the Swift parameter-ABI
// attributes here because we shouldn't be trying to round-trip
// swiftcall function pointers through SIL as C functions anyway.
assert(FI.getExtParameterInfo(foreignParamIndex).getABI()
== clang::ParameterABI::Ordinary);
// Drop padding arguments.
if (AI.getPaddingType())
params.claimNext();
switch (AI.getKind()) {
case clang::CodeGen::ABIArgInfo::Extend:
case clang::CodeGen::ABIArgInfo::Direct:
emitDirectForeignParameter(IGF, params, AI, paramExplosion, paramTy,
paramTI);
return;
case clang::CodeGen::ABIArgInfo::Indirect: {
Address address = paramTI.getAddressForPointer(params.claimNext());
paramTI.loadAsTake(IGF, address, paramExplosion);
return;
}
case clang::CodeGen::ABIArgInfo::Expand: {
emitClangExpandedParameter(IGF, params, paramExplosion, clangArgTy,
paramTy, paramTI);
return;
}
case clang::CodeGen::ABIArgInfo::CoerceAndExpand: {
auto ¶mTI = cast<LoadableTypeInfo>(IGF.getTypeInfo(paramTy));
emitCoerceAndExpand(IGF, params, paramExplosion, paramTy, paramTI,
AI.getCoerceAndExpandType(),
AI.getCoerceAndExpandTypeSequence(),
TranslationDirection::ToNative, isOutlined);
break;
}
case clang::CodeGen::ABIArgInfo::Ignore:
return;
case clang::CodeGen::ABIArgInfo::InAlloca:
llvm_unreachable("Need to handle InAlloca during signature expansion");
}
}
static void emitRetconCoroutineEntry(IRGenFunction &IGF,
CanSILFunctionType fnType,
Explosion &allParamValues,
llvm::Intrinsic::ID idIntrinsic,
Size bufferSize,
Alignment bufferAlignment) {
auto prototype =
IGF.IGM.getOpaquePtr(IGF.IGM.getAddrOfContinuationPrototype(fnType));
// Use malloc and free as our allocator.
auto allocFn = IGF.IGM.getOpaquePtr(IGF.IGM.getMallocFn());
auto deallocFn = IGF.IGM.getOpaquePtr(IGF.IGM.getFreeFn());
// Call the right 'llvm.coro.id.retcon' variant.
llvm::Value *buffer = allParamValues.claimNext();
llvm::Value *id = IGF.Builder.CreateIntrinsicCall(idIntrinsic, {
llvm::ConstantInt::get(IGF.IGM.Int32Ty, bufferSize.getValue()),
llvm::ConstantInt::get(IGF.IGM.Int32Ty, bufferAlignment.getValue()),
buffer,
prototype,
allocFn,
deallocFn
});
// Call 'llvm.coro.begin', just for consistency with the normal pattern.
// This serves as a handle that we can pass around to other intrinsics.
auto hdl = IGF.Builder.CreateIntrinsicCall(llvm::Intrinsic::ID::coro_begin, {
id,
llvm::ConstantPointerNull::get(IGF.IGM.Int8PtrTy)
});
// Set the coroutine handle; this also flags that is a coroutine so that
// e.g. dynamic allocas use the right code generation.
IGF.setCoroutineHandle(hdl);
}
void irgen::emitYieldOnceCoroutineEntry(IRGenFunction &IGF,
CanSILFunctionType fnType,
Explosion &allParamValues) {
emitRetconCoroutineEntry(IGF, fnType, allParamValues,
llvm::Intrinsic::ID::coro_id_retcon_once,
getYieldOnceCoroutineBufferSize(IGF.IGM),
getYieldOnceCoroutineBufferAlignment(IGF.IGM));
}
void irgen::emitYieldManyCoroutineEntry(IRGenFunction &IGF,
CanSILFunctionType fnType,
Explosion &allParamValues) {
emitRetconCoroutineEntry(IGF, fnType, allParamValues,
llvm::Intrinsic::ID::coro_id_retcon,
getYieldManyCoroutineBufferSize(IGF.IGM),
getYieldManyCoroutineBufferAlignment(IGF.IGM));
}
static Address createOpaqueBufferAlloca(IRGenFunction &IGF,
Size size, Alignment align) {
auto ty = llvm::ArrayType::get(IGF.IGM.Int8Ty, size.getValue());
auto addr = IGF.createAlloca(ty, align);
addr = IGF.Builder.CreateStructGEP(addr, 0, Size(0));
IGF.Builder.CreateLifetimeStart(addr, size);
return addr;
}
Address irgen::emitAllocYieldOnceCoroutineBuffer(IRGenFunction &IGF) {
return createOpaqueBufferAlloca(IGF, getYieldOnceCoroutineBufferSize(IGF.IGM),
getYieldOnceCoroutineBufferAlignment(IGF.IGM));
}
Address irgen::emitAllocYieldManyCoroutineBuffer(IRGenFunction &IGF) {
return createOpaqueBufferAlloca(IGF, getYieldManyCoroutineBufferSize(IGF.IGM),
getYieldManyCoroutineBufferAlignment(IGF.IGM));
}
void irgen::emitDeallocYieldOnceCoroutineBuffer(IRGenFunction &IGF,
Address buffer) {
auto bufferSize = getYieldOnceCoroutineBufferSize(IGF.IGM);
IGF.Builder.CreateLifetimeEnd(buffer, bufferSize);
}
void irgen::emitDeallocYieldManyCoroutineBuffer(IRGenFunction &IGF,
Address buffer) {
auto bufferSize = getYieldManyCoroutineBufferSize(IGF.IGM);
IGF.Builder.CreateLifetimeEnd(buffer, bufferSize);
}
llvm::Value *irgen::emitYield(IRGenFunction &IGF,
CanSILFunctionType coroutineType,
Explosion &substValues) {
auto coroSignature = IGF.IGM.getSignature(coroutineType);
auto coroInfo = coroSignature.getCoroutineInfo();
// Translate the arguments to an unsubstituted form.
Explosion allComponents;
for (auto yield : coroutineType->getYields())
addNativeArgument(IGF, substValues, yield, allComponents, false);
// Figure out which arguments need to be yielded directly.
SmallVector<llvm::Value*, 8> yieldArgs;
// Add the direct yield components.
auto directComponents =
allComponents.claim(coroInfo.NumDirectYieldComponents);
yieldArgs.append(directComponents.begin(), directComponents.end());
// The rest need to go into an indirect buffer.
auto indirectComponents = allComponents.claimAll();
auto resultStructTy =
dyn_cast<llvm::StructType>(coroSignature.getType()->getReturnType());
assert((!resultStructTy
&& directComponents.empty()
&& indirectComponents.empty())
|| (resultStructTy
&& resultStructTy->getNumElements() ==
(1 + directComponents.size()
+ unsigned(!indirectComponents.empty()))));
// Fill in the indirect buffer if necessary.
Optional<Address> indirectBuffer;
Size indirectBufferSize;
if (!indirectComponents.empty()) {
auto bufferStructTy = cast<llvm::StructType>(
resultStructTy->getElementType(resultStructTy->getNumElements() - 1)
->getPointerElementType());
auto layout = IGF.IGM.DataLayout.getStructLayout(bufferStructTy);
indirectBuffer = IGF.createAlloca(bufferStructTy,
Alignment(layout->getAlignment()));
indirectBufferSize = Size(layout->getSizeInBytes());
IGF.Builder.CreateLifetimeStart(*indirectBuffer, indirectBufferSize);
for (size_t i : indices(bufferStructTy->elements())) {
// Skip padding elements.
if (bufferStructTy->getElementType(i)->isArrayTy())
continue;
assert(!indirectComponents.empty() &&
"insufficient number of indirect yield components");
auto addr = IGF.Builder.CreateStructGEP(*indirectBuffer, i, layout);
IGF.Builder.CreateStore(indirectComponents.front(), addr);
indirectComponents = indirectComponents.drop_front();
}
assert(indirectComponents.empty() && "too many indirect yield components");
// Remember to yield the indirect buffer.
yieldArgs.push_back(indirectBuffer->getAddress());
}
// Perform the yield.
auto isUnwind =
IGF.Builder.CreateIntrinsicCall(llvm::Intrinsic::ID::coro_suspend_retcon,
{ IGF.IGM.Int1Ty },
yieldArgs);
// We're done with the indirect buffer.
if (indirectBuffer) {
IGF.Builder.CreateLifetimeEnd(*indirectBuffer, indirectBufferSize);
}
return isUnwind;
}
/// Add a new set of arguments to the function.
void CallEmission::setArgs(Explosion &original, bool isOutlined,
WitnessMetadata *witnessMetadata) {
// Convert arguments to a representation appropriate to the calling
// convention.
Explosion adjusted;
auto origCalleeType = CurCallee.getOrigFunctionType();
SILFunctionConventions fnConv(origCalleeType, IGF.getSILModule());
// Pass along the indirect result pointers.
original.transferInto(adjusted, fnConv.getNumIndirectSILResults());
// Pass along the coroutine buffer.
switch (origCalleeType->getCoroutineKind()) {
case SILCoroutineKind::YieldMany:
case SILCoroutineKind::YieldOnce:
original.transferInto(adjusted, 1);
break;
case SILCoroutineKind::None:
break;
}
// Translate the formal arguments and handle any special arguments.
switch (getCallee().getRepresentation()) {
case SILFunctionTypeRepresentation::ObjCMethod:
adjusted.add(getCallee().getObjCMethodReceiver());
adjusted.add(getCallee().getObjCMethodSelector());
externalizeArguments(IGF, getCallee(), original, adjusted,
Temporaries, isOutlined);
break;
case SILFunctionTypeRepresentation::Block:
adjusted.add(getCallee().getBlockObject());
LLVM_FALLTHROUGH;
case SILFunctionTypeRepresentation::CFunctionPointer:
externalizeArguments(IGF, getCallee(), original, adjusted,
Temporaries, isOutlined);
break;
case SILFunctionTypeRepresentation::WitnessMethod:
assert(witnessMetadata);
assert(witnessMetadata->SelfMetadata->getType() ==
IGF.IGM.TypeMetadataPtrTy);
assert(witnessMetadata->SelfWitnessTable->getType() ==
IGF.IGM.WitnessTablePtrTy);
Args.rbegin()[1] = witnessMetadata->SelfMetadata;
Args.rbegin()[0] = witnessMetadata->SelfWitnessTable;
LLVM_FALLTHROUGH;
case SILFunctionTypeRepresentation::Closure:
case SILFunctionTypeRepresentation::Method:
case SILFunctionTypeRepresentation::Thin:
case SILFunctionTypeRepresentation::Thick: {
// Check for value arguments that need to be passed indirectly.
// But don't expect to see 'self' if it's been moved to the context
// position.
auto params = origCalleeType->getParameters();
if (hasSelfContextParameter(origCalleeType)) {
params = params.drop_back();
}
for (auto param : params) {
addNativeArgument(IGF, original, param, adjusted, isOutlined);
}
// Anything else, just pass along. This will include things like
// generic arguments.
adjusted.add(original.claimAll());
break;
}
}
// Add the given number of arguments.
assert(LastArgWritten >= adjusted.size());
size_t targetIndex = LastArgWritten - adjusted.size();
assert(targetIndex <= 1);
LastArgWritten = targetIndex;
auto argIterator = Args.begin() + targetIndex;
for (auto value : adjusted.claimAll()) {
*argIterator++ = value;
}
}
void CallEmission::addAttribute(unsigned index,
llvm::Attribute::AttrKind attr) {
auto &attrs = CurCallee.getMutableAttributes();
attrs = attrs.addAttribute(IGF.IGM.LLVMContext, index, attr);
}
/// Initialize an Explosion with the parameters of the current
/// function. All of the objects will be added unmanaged. This is
/// really only useful when writing prologue code.
Explosion IRGenFunction::collectParameters() {
Explosion params;
for (auto i = CurFn->arg_begin(), e = CurFn->arg_end(); i != e; ++i)
params.add(&*i);
return params;
}
/// Fetch the error result slot.
Address IRGenFunction::getErrorResultSlot(SILType errorType) {
if (!ErrorResultSlot) {
auto &errorTI = cast<FixedTypeInfo>(getTypeInfo(errorType));
IRBuilder builder(IGM.getLLVMContext(), IGM.DebugInfo != nullptr);
builder.SetInsertPoint(AllocaIP->getParent(), AllocaIP->getIterator());
// Create the alloca. We don't use allocateStack because we're
// not allocating this in stack order.
auto addr = createAlloca(errorTI.getStorageType(),
errorTI.getFixedAlignment(),
"swifterror");
// Only add the swifterror attribute on ABIs that pass it in a register.
// We create a shadow stack location of the swifterror parameter for the
// debugger on platforms that pass swifterror by reference and so we can't
// mark the parameter with a swifterror attribute for these.
if (IGM.IsSwiftErrorInRegister)
cast<llvm::AllocaInst>(addr.getAddress())->setSwiftError(true);
// Initialize at the alloca point.
auto nullError = llvm::ConstantPointerNull::get(
cast<llvm::PointerType>(errorTI.getStorageType()));
builder.CreateStore(nullError, addr);
ErrorResultSlot = addr.getAddress();
}
return Address(ErrorResultSlot, IGM.getPointerAlignment());
}
/// Fetch the error result slot received from the caller.
Address IRGenFunction::getCallerErrorResultSlot() {
assert(ErrorResultSlot && "no error result slot!");
assert(isa<llvm::Argument>(ErrorResultSlot) && "error result slot is local!");
return Address(ErrorResultSlot, IGM.getPointerAlignment());
}
// Set the error result slot. This should only be done in the prologue.
void IRGenFunction::setErrorResultSlot(llvm::Value *address) {
assert(!ErrorResultSlot && "already have error result slot!");
assert(isa<llvm::PointerType>(address->getType()));
ErrorResultSlot = address;
}
/// Emit the basic block that 'return' should branch to and insert it into
/// the current function. This creates a second
/// insertion point that most blocks should be inserted before.
void IRGenFunction::emitBBForReturn() {
ReturnBB = createBasicBlock("return");
CurFn->getBasicBlockList().push_back(ReturnBB);
}
/// Emit the prologue for the function.
void IRGenFunction::emitPrologue() {
// Set up the IRBuilder.
llvm::BasicBlock *EntryBB = createBasicBlock("entry");
assert(CurFn->getBasicBlockList().empty() && "prologue already emitted?");
CurFn->getBasicBlockList().push_back(EntryBB);
Builder.SetInsertPoint(EntryBB);
// Set up the alloca insertion point.
AllocaIP = Builder.IRBuilderBase::CreateAlloca(IGM.Int1Ty,
/*array size*/ nullptr,
"alloca point");
}
/// Emit a branch to the return block and set the insert point there.
/// Returns true if the return block is reachable, false otherwise.
bool IRGenFunction::emitBranchToReturnBB() {
// If there are no edges to the return block, we never want to emit it.
if (ReturnBB->use_empty()) {
ReturnBB->eraseFromParent();
// Normally this means that we'll just insert the epilogue in the
// current block, but if the current IP is unreachable then so is
// the entire epilogue.
if (!Builder.hasValidIP())
return false;
// Otherwise, branch to it if the current IP is reachable.
} else if (Builder.hasValidIP()) {
Builder.CreateBr(ReturnBB);
Builder.SetInsertPoint(ReturnBB);
// Otherwise, if there is exactly one use of the return block, merge
// it into its predecessor.
} else if (ReturnBB->hasOneUse()) {
// return statements are never emitted as conditional branches.
llvm::BranchInst *Br = cast<llvm::BranchInst>(*ReturnBB->use_begin());
assert(Br->isUnconditional());
Builder.SetInsertPoint(Br->getParent());
Br->eraseFromParent();
ReturnBB->eraseFromParent();
// Otherwise, just move the IP to the return block.
} else {
Builder.SetInsertPoint(ReturnBB);
}
return true;
}
/// Emit the epilogue for the function.
void IRGenFunction::emitEpilogue() {
// Destroy the alloca insertion point.
AllocaIP->eraseFromParent();
}
std::pair<Address, Size>
irgen::allocateForCoercion(IRGenFunction &IGF,
llvm::Type *fromTy,
llvm::Type *toTy,
const llvm::Twine &basename) {
auto &DL = IGF.IGM.DataLayout;
auto fromSize = DL.getTypeSizeInBits(fromTy);
auto toSize = DL.getTypeSizeInBits(toTy);
auto bufferTy = fromSize >= toSize
? fromTy
: toTy;
auto alignment = std::max(DL.getABITypeAlignment(fromTy),
DL.getABITypeAlignment(toTy));
auto buffer = IGF.createAlloca(bufferTy, Alignment(alignment),
basename + ".coerced");
Size size(std::max(fromSize, toSize));
return {buffer, size};
}
llvm::Value* IRGenFunction::coerceValue(llvm::Value *value, llvm::Type *toTy,
const llvm::DataLayout &DL)
{
llvm::Type *fromTy = value->getType();
assert(fromTy != toTy && "Unexpected same types in type coercion!");
assert(!fromTy->isVoidTy()
&& "Unexpected void source type in type coercion!");
assert(!toTy->isVoidTy()
&& "Unexpected void destination type in type coercion!");
// Use the pointer/pointer and pointer/int casts if we can.
if (toTy->isPointerTy()) {
if (fromTy->isPointerTy())
return Builder.CreateBitCast(value, toTy);
if (fromTy == IGM.IntPtrTy)
return Builder.CreateIntToPtr(value, toTy);
} else if (fromTy->isPointerTy()) {
if (toTy == IGM.IntPtrTy) {
return Builder.CreatePtrToInt(value, toTy);
}
}
// Otherwise we need to store, bitcast, and load.
Address address; Size size;
std::tie(address, size) = allocateForCoercion(*this, fromTy, toTy,
value->getName() + ".coercion");
Builder.CreateLifetimeStart(address, size);
auto orig = Builder.CreateBitCast(address, fromTy->getPointerTo());
Builder.CreateStore(value, orig);
auto coerced = Builder.CreateBitCast(address, toTy->getPointerTo());
auto loaded = Builder.CreateLoad(coerced);
Builder.CreateLifetimeEnd(address, size);
return loaded;
}
void IRGenFunction::emitScalarReturn(llvm::Type *resultType,
Explosion &result) {
if (result.empty()) {
Builder.CreateRetVoid();
return;
}
auto *ABIType = CurFn->getReturnType();
if (result.size() == 1) {
auto *returned = result.claimNext();
if (ABIType != returned->getType())
returned = coerceValue(returned, ABIType, IGM.DataLayout);
Builder.CreateRet(returned);
return;
}
// Multiple return values are returned as a struct.
assert(cast<llvm::StructType>(resultType)->getNumElements() == result.size());
llvm::Value *resultAgg = llvm::UndefValue::get(resultType);
for (unsigned i = 0, e = result.size(); i != e; ++i) {
llvm::Value *elt = result.claimNext();
resultAgg = Builder.CreateInsertValue(resultAgg, elt, i);
}
if (ABIType != resultType)
resultAgg = coerceValue(resultAgg, ABIType, IGM.DataLayout);
Builder.CreateRet(resultAgg);
}
/// Adjust the alignment of the alloca pointed to by \p allocaAddr to the
/// required alignment of the struct \p type.
static void adjustAllocaAlignment(const llvm::DataLayout &DL,
Address allocaAddr, llvm::StructType *type) {
auto layout = DL.getStructLayout(type);
Alignment layoutAlignment = Alignment(layout->getAlignment());
auto alloca = cast<llvm::AllocaInst>(allocaAddr.getAddress());
if (alloca->getAlignment() < layoutAlignment.getValue()) {
alloca->setAlignment(layoutAlignment.getValue());
allocaAddr = Address(allocaAddr.getAddress(), layoutAlignment);
}
}
unsigned NativeConventionSchema::size() const {
if (empty())
return 0;
unsigned size = 0;
Lowering.enumerateComponents([&](clang::CharUnits offset,
clang::CharUnits end,
llvm::Type *type) { ++size; });
return size;
}
static bool canMatchByTruncation(IRGenModule &IGM,
ArrayRef<llvm::Type*> expandedTys,
const ExplosionSchema &schema) {
// If the schemas don't even match in number, we have to go
// through memory.
if (expandedTys.size() != schema.size() || expandedTys.empty())
return false;
if (expandedTys.size() == 1) return false;
// If there are multiple elements, the pairs of types need to
// match in size upto the penultimate for the truncation to work.
size_t e = expandedTys.size();
for (size_t i = 0; i != e - 1; ++i) {
// Check that we can truncate the last element.
llvm::Type *outputTy = schema[i].getScalarType();
llvm::Type *inputTy = expandedTys[i];
if (inputTy != outputTy &&
IGM.DataLayout.getTypeSizeInBits(inputTy) !=
IGM.DataLayout.getTypeSizeInBits(outputTy))
return false;
}
llvm::Type *outputTy = schema[e-1].getScalarType();
llvm::Type *inputTy = expandedTys[e-1];
return inputTy == outputTy || (IGM.DataLayout.getTypeSizeInBits(inputTy) ==
IGM.DataLayout.getTypeSizeInBits(outputTy)) ||
(IGM.DataLayout.getTypeSizeInBits(inputTy) >
IGM.DataLayout.getTypeSizeInBits(outputTy) &&
isa<llvm::IntegerType>(inputTy) && isa<llvm::IntegerType>(outputTy));
}
Explosion NativeConventionSchema::mapFromNative(IRGenModule &IGM,
IRGenFunction &IGF,
Explosion &native,
SILType type) const {
if (native.empty()) {
assert(empty() && "Empty explosion must match the native convention");
return Explosion();
}
assert(!empty());
auto *nativeTy = getExpandedType(IGM);
auto expandedTys = expandScalarOrStructTypeToArray(nativeTy);
auto &TI = IGM.getTypeInfo(type);
auto schema = TI.getSchema();
// The expected explosion type.
auto *explosionTy = schema.getScalarResultType(IGM);
// Check whether we can coerce the explosion to the expected type convention.
auto &DataLayout = IGM.DataLayout;
Explosion nonNativeExplosion;
if (canCoerceToSchema(IGM, expandedTys, schema)) {
if (native.size() == 1) {
auto *elt = native.claimNext();
if (explosionTy != elt->getType()) {
if (isa<llvm::IntegerType>(explosionTy) &&
isa<llvm::IntegerType>(elt->getType())) {
elt = IGF.Builder.CreateTrunc(elt, explosionTy);
} else {
elt = IGF.coerceValue(elt, explosionTy, DataLayout);
}
}
nonNativeExplosion.add(elt);
return nonNativeExplosion;
} else if (nativeTy == explosionTy) {
native.transferInto(nonNativeExplosion, native.size());
return nonNativeExplosion;
}
// Otherwise, we have to go through memory if we can match by truncation.
} else if (canMatchByTruncation(IGM, expandedTys, schema)) {
assert(expandedTys.size() == schema.size());
for (size_t i = 0, e = expandedTys.size(); i != e; ++i) {
auto *elt = native.claimNext();
auto *schemaTy = schema[i].getScalarType();
auto *nativeTy = elt->getType();
assert(nativeTy == expandedTys[i]);
if (schemaTy == nativeTy) {
// elt = elt
} else if (DataLayout.getTypeSizeInBits(schemaTy) ==
DataLayout.getTypeSizeInBits(nativeTy))
elt = IGF.coerceValue(elt, schemaTy, DataLayout);
else {
assert(DataLayout.getTypeSizeInBits(schemaTy) <
DataLayout.getTypeSizeInBits(nativeTy));
elt = IGF.Builder.CreateTrunc(elt, schemaTy);
}
nonNativeExplosion.add(elt);
}
return nonNativeExplosion;
}
// If not, go through memory.
auto &loadableTI = cast<LoadableTypeInfo>(TI);
// We can get two layouts if there are overlapping ranges in the legal type
// sequence.
llvm::StructType *coercionTy, *overlappedCoercionTy;
SmallVector<unsigned, 8> expandedTyIndicesMap;
std::tie(coercionTy, overlappedCoercionTy) =
getCoercionTypes(IGM, expandedTyIndicesMap);
// Get the larger layout out of those two.
auto coercionSize = DataLayout.getTypeSizeInBits(coercionTy);
auto overlappedCoercionSize =
DataLayout.getTypeSizeInBits(overlappedCoercionTy);
llvm::StructType *largerCoercion = coercionSize >= overlappedCoercionSize
? coercionTy
: overlappedCoercionTy;
// Allocate a temporary for the coercion.
Address temporary;
Size tempSize;
std::tie(temporary, tempSize) = allocateForCoercion(
IGF, largerCoercion, loadableTI.getStorageType(), "temp-coercion");
// Make sure we have sufficiently large alignment.
adjustAllocaAlignment(DataLayout, temporary, coercionTy);
adjustAllocaAlignment(DataLayout, temporary, overlappedCoercionTy);
auto &Builder = IGF.Builder;
Builder.CreateLifetimeStart(temporary, tempSize);
// Store the expanded type elements.
auto coercionAddr = Builder.CreateElementBitCast(temporary, coercionTy);
unsigned expandedMapIdx = 0;
auto eltsArray = native.claimAll();
SmallVector<llvm::Value *, 8> nativeElts(eltsArray.begin(), eltsArray.end());
auto storeToFn = [&](llvm::StructType *ty, Address structAddr) {
for (auto eltIndex : indices(ty->elements())) {
auto layout = DataLayout.getStructLayout(ty);
auto eltTy = ty->getElementType(eltIndex);
// Skip padding fields.
if (eltTy->isArrayTy())
continue;
Address eltAddr = Builder.CreateStructGEP(structAddr, eltIndex, layout);
auto index = expandedTyIndicesMap[expandedMapIdx];
assert(index < nativeElts.size() && nativeElts[index] != nullptr);
auto nativeElt = nativeElts[index];
Builder.CreateStore(nativeElt, eltAddr);
nativeElts[index] = nullptr;
++expandedMapIdx;
}
};
storeToFn(coercionTy, coercionAddr);
if (!overlappedCoercionTy->isEmptyTy()) {
auto overlappedCoercionAddr =
Builder.CreateElementBitCast(temporary, overlappedCoercionTy);
storeToFn(overlappedCoercionTy, overlappedCoercionAddr);
}
// Reload according to the types schema.
Address storageAddr = Builder.CreateBitCast(
temporary, loadableTI.getStorageType()->getPointerTo());
loadableTI.loadAsTake(IGF, storageAddr, nonNativeExplosion);
Builder.CreateLifetimeEnd(temporary, tempSize);
return nonNativeExplosion;
}
Explosion NativeConventionSchema::mapIntoNative(IRGenModule &IGM,
IRGenFunction &IGF,
Explosion &fromNonNative,
SILType type,
bool isOutlined) const {
if (fromNonNative.empty()) {
assert(empty() && "Empty explosion must match the native convention");
return Explosion();
}
assert(!requiresIndirect() && "Expected direct convention");
assert(!empty());
auto *nativeTy = getExpandedType(IGM);
auto expandedTys = expandScalarOrStructTypeToArray(nativeTy);
auto &TI = IGM.getTypeInfo(type);
auto schema = TI.getSchema();
auto *explosionTy = schema.getScalarResultType(IGM);
// Check whether we can coerce the explosion to the expected type convention.
auto &DataLayout = IGM.DataLayout;
Explosion nativeExplosion;
if (canCoerceToSchema(IGM, expandedTys, schema)) {
if (fromNonNative.size() == 1) {
auto *elt = fromNonNative.claimNext();
if (nativeTy != elt->getType()) {
if (isa<llvm::IntegerType>(nativeTy) &&
isa<llvm::IntegerType>(elt->getType()))
elt = IGF.Builder.CreateZExt(elt, nativeTy);
else
elt = IGF.coerceValue(elt, nativeTy, DataLayout);
}
nativeExplosion.add(elt);
return nativeExplosion;
} else if (nativeTy == explosionTy) {
fromNonNative.transferInto(nativeExplosion, fromNonNative.size());
return nativeExplosion;
}
// Otherwise, we have to go through memory if we can't match by truncation.
} else if (canMatchByTruncation(IGM, expandedTys, schema)) {
assert(expandedTys.size() == schema.size());
for (size_t i = 0, e = expandedTys.size(); i != e; ++i) {
auto *elt = fromNonNative.claimNext();
auto *schemaTy = elt->getType();
auto *nativeTy = expandedTys[i];
assert(schema[i].getScalarType() == schemaTy);
if (schemaTy == nativeTy) {
// elt = elt
} else if (DataLayout.getTypeSizeInBits(schemaTy) ==
DataLayout.getTypeSizeInBits(nativeTy))
elt = IGF.coerceValue(elt, nativeTy, DataLayout);
else {
assert(DataLayout.getTypeSizeInBits(schemaTy) <
DataLayout.getTypeSizeInBits(nativeTy));
elt = IGF.Builder.CreateZExt(elt, nativeTy);
}
nativeExplosion.add(elt);
}
return nativeExplosion;
}
// If not, go through memory.
auto &loadableTI = cast<LoadableTypeInfo>(TI);
// We can get two layouts if there are overlapping ranges in the legal type
// sequence.
llvm::StructType *coercionTy, *overlappedCoercionTy;
SmallVector<unsigned, 8> expandedTyIndicesMap;
std::tie(coercionTy, overlappedCoercionTy) =
getCoercionTypes(IGM, expandedTyIndicesMap);
// Get the larger layout out of those two.
auto coercionSize = DataLayout.getTypeSizeInBits(coercionTy);
auto overlappedCoercionSize =
DataLayout.getTypeSizeInBits(overlappedCoercionTy);
llvm::StructType *largerCoercion = coercionSize >= overlappedCoercionSize
? coercionTy
: overlappedCoercionTy;
// Allocate a temporary for the coercion.
Address temporary;
Size tempSize;
std::tie(temporary, tempSize) = allocateForCoercion(
IGF, largerCoercion, loadableTI.getStorageType(), "temp-coercion");
// Make sure we have sufficiently large alignment.
adjustAllocaAlignment(DataLayout, temporary, coercionTy);
adjustAllocaAlignment(DataLayout, temporary, overlappedCoercionTy);
auto &Builder = IGF.Builder;
Builder.CreateLifetimeStart(temporary, tempSize);
// Initialize the memory of the temporary.
Address storageAddr = Builder.CreateBitCast(
temporary, loadableTI.getStorageType()->getPointerTo());
loadableTI.initialize(IGF, fromNonNative, storageAddr, isOutlined);
// Load the expanded type elements from memory.
auto coercionAddr = Builder.CreateElementBitCast(temporary, coercionTy);
unsigned expandedMapIdx = 0;
SmallVector<llvm::Value *, 8> expandedElts(expandedTys.size(), nullptr);
auto loadFromFn = [&](llvm::StructType *ty, Address structAddr) {
for (auto eltIndex : indices(ty->elements())) {
auto layout = DataLayout.getStructLayout(ty);
auto eltTy = ty->getElementType(eltIndex);
// Skip padding fields.
if (eltTy->isArrayTy())
continue;
Address eltAddr = Builder.CreateStructGEP(structAddr, eltIndex, layout);
llvm::Value *elt = Builder.CreateLoad(eltAddr);
auto index = expandedTyIndicesMap[expandedMapIdx];
assert(expandedElts[index] == nullptr);
expandedElts[index] = elt;
++expandedMapIdx;
}
};
loadFromFn(coercionTy, coercionAddr);
if (!overlappedCoercionTy->isEmptyTy()) {
auto overlappedCoercionAddr =
Builder.CreateElementBitCast(temporary, overlappedCoercionTy);
loadFromFn(overlappedCoercionTy, overlappedCoercionAddr);
}
Builder.CreateLifetimeEnd(temporary, tempSize);
// Add the values to the explosion.
for (auto *val : expandedElts)
nativeExplosion.add(val);
assert(expandedTys.size() == nativeExplosion.size());
return nativeExplosion;
}
void IRGenFunction::emitScalarReturn(SILType resultType, Explosion &result,
bool isSwiftCCReturn, bool isOutlined) {
if (result.empty()) {
assert(IGM.getTypeInfo(resultType).nativeReturnValueSchema(IGM).empty() &&
"Empty explosion must match the native calling convention");
Builder.CreateRetVoid();
return;
}
// In the native case no coercion is needed.
if (isSwiftCCReturn) {
auto &nativeSchema =
IGM.getTypeInfo(resultType).nativeReturnValueSchema(IGM);
assert(!nativeSchema.requiresIndirect());
Explosion native =
nativeSchema.mapIntoNative(IGM, *this, result, resultType, isOutlined);
if (native.size() == 1) {
Builder.CreateRet(native.claimNext());
return;
}
llvm::Value *nativeAgg =
llvm::UndefValue::get(nativeSchema.getExpandedType(IGM));
for (unsigned i = 0, e = native.size(); i != e; ++i) {
llvm::Value *elt = native.claimNext();
nativeAgg = Builder.CreateInsertValue(nativeAgg, elt, i);
}
Builder.CreateRet(nativeAgg);
return;
}
// Otherwise we potentially need to coerce the type. We don't need to go
// through the mapping to the native calling convention.
auto *ABIType = CurFn->getReturnType();
if (result.size() == 1) {
auto *returned = result.claimNext();
if (ABIType != returned->getType())
returned = coerceValue(returned, ABIType, IGM.DataLayout);
Builder.CreateRet(returned);
return;
}
auto &resultTI = IGM.getTypeInfo(resultType);
auto schema = resultTI.getSchema();
auto *bodyType = schema.getScalarResultType(IGM);
// Multiple return values are returned as a struct.
assert(cast<llvm::StructType>(bodyType)->getNumElements() == result.size());
llvm::Value *resultAgg = llvm::UndefValue::get(bodyType);
for (unsigned i = 0, e = result.size(); i != e; ++i) {
llvm::Value *elt = result.claimNext();
resultAgg = Builder.CreateInsertValue(resultAgg, elt, i);
}
if (ABIType != bodyType)
resultAgg = coerceValue(resultAgg, ABIType, IGM.DataLayout);
Builder.CreateRet(resultAgg);
}
/// Modify the given variable to hold a pointer whose type is the
/// LLVM lowering of the given function type, and return the signature
/// for the type.
static Signature emitCastOfFunctionPointer(IRGenFunction &IGF,
llvm::Value *&fnPtr,
CanSILFunctionType fnType) {
// Figure out the function type.
auto sig = IGF.IGM.getSignature(fnType);
// Emit the cast.
fnPtr = IGF.Builder.CreateBitCast(fnPtr, sig.getType()->getPointerTo());
// Return the information.
return sig;
}
Callee irgen::getBlockPointerCallee(IRGenFunction &IGF,
llvm::Value *blockPtr,
CalleeInfo &&info) {
// Grab the block pointer and make it the first physical argument.
llvm::PointerType *blockPtrTy = IGF.IGM.ObjCBlockPtrTy;
auto castBlockPtr = IGF.Builder.CreateBitCast(blockPtr, blockPtrTy);
// Extract the invocation pointer for blocks.
auto blockStructTy = blockPtrTy->getElementType();
llvm::Value *invokeFnPtrPtr =
IGF.Builder.CreateStructGEP(blockStructTy, castBlockPtr, 3);
Address invokeFnPtrAddr(invokeFnPtrPtr, IGF.IGM.getPointerAlignment());
llvm::Value *invokeFnPtr = IGF.Builder.CreateLoad(invokeFnPtrAddr);
auto sig = emitCastOfFunctionPointer(IGF, invokeFnPtr, info.OrigFnType);
FunctionPointer fn(invokeFnPtr, sig);
return Callee(std::move(info), fn, blockPtr);
}
Callee irgen::getSwiftFunctionPointerCallee(
IRGenFunction &IGF, llvm::Value *fnPtr, llvm::Value *dataPtr,
CalleeInfo &&calleeInfo, bool castOpaqueToRefcountedContext) {
auto sig = emitCastOfFunctionPointer(IGF, fnPtr, calleeInfo.OrigFnType);
FunctionPointer fn(fnPtr, sig);
if (castOpaqueToRefcountedContext) {
assert(dataPtr && dataPtr->getType() == IGF.IGM.OpaquePtrTy &&
"Expecting trivial closure context");
dataPtr = IGF.Builder.CreateBitCast(dataPtr, IGF.IGM.RefCountedPtrTy);
}
return Callee(std::move(calleeInfo), fn, dataPtr);
}
Callee irgen::getCFunctionPointerCallee(IRGenFunction &IGF,
llvm::Value *fnPtr,
CalleeInfo &&calleeInfo) {
auto sig = emitCastOfFunctionPointer(IGF, fnPtr, calleeInfo.OrigFnType);
FunctionPointer fn(fnPtr, sig);
return Callee(std::move(calleeInfo), fn);
}
FunctionPointer
FunctionPointer::forDirect(IRGenModule &IGM, llvm::Constant *fnPtr,
CanSILFunctionType fnType) {
return forDirect(fnPtr, IGM.getSignature(fnType));
}
FunctionPointer
FunctionPointer::forExplosionValue(IRGenFunction &IGF, llvm::Value *fnPtr,
CanSILFunctionType fnType) {
// Bitcast out of an opaque pointer type.
assert(fnPtr->getType() == IGF.IGM.Int8PtrTy);
auto sig = emitCastOfFunctionPointer(IGF, fnPtr, fnType);
return FunctionPointer(fnPtr, sig);
}
llvm::Value *
FunctionPointer::getExplosionValue(IRGenFunction &IGF,
CanSILFunctionType fnType) const {
// Bitcast to an opaque pointer type.
llvm::Value *fnPtr =
IGF.Builder.CreateBitCast(getPointer(), IGF.IGM.Int8PtrTy);
return fnPtr;
}
| brentdax/swift | lib/IRGen/GenCall.cpp | C++ | apache-2.0 | 127,302 |
# Copyright 2017-present Adtran, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from voltha.protos.events_pb2 import AlarmEventType, AlarmEventSeverity, AlarmEventCategory
from voltha.extensions.alarms.adapter_alarms import AlarmBase
class OltLosAlarm(AlarmBase):
def __init__(self, alarm_mgr, intf_id, port_type_name):
super(OltLosAlarm, self).__init__(alarm_mgr, object_type='olt LOS',
alarm='OLT_LOS',
alarm_category=AlarmEventCategory.OLT,
alarm_type=AlarmEventType.COMMUNICATION,
alarm_severity=AlarmEventSeverity.MAJOR)
# Added port type to indicate if alarm was on NNI or PON
self._intf_id = intf_id
self._port_type_name = port_type_name
def get_context_data(self):
return {'olt-intf-id:': self._intf_id,
'olt-port-type-name': self._port_type_name}
| opencord/voltha | voltha/extensions/alarms/olt/olt_los_alarm.py | Python | apache-2.0 | 1,492 |
package galvin;
public interface Selectable {
public boolean isSelected();
public void setSelected( boolean selected );
}
| thomasgalvin/Utils | src/main/java/galvin/Selectable.java | Java | apache-2.0 | 131 |
/*
* Copyright (c) 2016 Yahoo Inc.
* Licensed under the terms of the Apache version 2.0 license.
* See LICENSE file for terms.
*/
package com.yahoo.yqlplus.engine.guice;
import com.google.common.base.Joiner;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.yahoo.yqlplus.api.Exports;
import com.yahoo.yqlplus.api.Source;
import com.yahoo.yqlplus.engine.api.DependencyNotFoundException;
import com.yahoo.yqlplus.engine.internal.plan.*;
import com.yahoo.yqlplus.engine.internal.source.ExportUnitGenerator;
import com.yahoo.yqlplus.engine.internal.source.SourceUnitGenerator;
import com.yahoo.yqlplus.language.parser.Location;
import com.yahoo.yqlplus.language.parser.ProgramCompileException;
import java.util.List;
import java.util.Map;
/**
* Implement the Namespace binding with a Guice MapBinder.
*/
public class MultibinderPlannerNamespace implements SourceNamespace, ModuleNamespace {
private final Map<String, Provider<Source>> sourceBindings;
private final Map<String, Provider<Exports>> exportsBindings;
private String keyFor(List<String> path) {
return Joiner.on('.').join(path);
}
@Inject
MultibinderPlannerNamespace(Map<String, Provider<Exports>> exportsBindings, Map<String, Provider<Source>> sourceBindings) {
this.exportsBindings = exportsBindings;
this.sourceBindings = sourceBindings;
}
@Override
public ModuleType findModule(Location location, ContextPlanner planner, List<String> modulePath) {
Provider<Exports> moduleProvider = exportsBindings.get(keyFor(modulePath));
if (moduleProvider == null) {
throw new ProgramCompileException(location, "No source '%s' found", keyFor(modulePath));
}
ExportUnitGenerator adapter = new ExportUnitGenerator(planner.getGambitScope());
return adapter.apply(modulePath, moduleProvider);
}
@Override
public SourceType findSource(Location location, ContextPlanner planner, List<String> sourcePath) {
Provider<Source> sourceProvider = sourceBindings.get(keyFor(sourcePath));
if (sourceProvider == null) {
throw new DependencyNotFoundException(location, "No source '%s' found", keyFor(sourcePath));
}
SourceUnitGenerator adapter = new SourceUnitGenerator(planner.getGambitScope());
return adapter.apply(sourcePath, sourceProvider);
}
}
| slolars/yql-plus | yqlplus_engine/src/main/java/com/yahoo/yqlplus/engine/guice/MultibinderPlannerNamespace.java | Java | apache-2.0 | 2,406 |
/*
* Copyright 2012-2019 MarkLogic Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.marklogic.hub.web.model;
public class StatusMessage {
public int percentComplete;
public String message;
public StatusMessage(int percentComplete, String message) {
this.percentComplete = percentComplete;
this.message = message;
}
}
| marklogic/data-hub-in-a-box | web/src/main/java/com/marklogic/hub/web/model/StatusMessage.java | Java | apache-2.0 | 901 |
package jo.alexa.sim.logic;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.Reader;
import java.util.List;
import jo.alexa.sim.data.ApplicationBean;
import jo.alexa.sim.data.IntentBean;
import jo.alexa.sim.data.PhraseSegmentBean;
import jo.alexa.sim.data.SlotBean;
import jo.alexa.sim.data.SlotSegmentBean;
import jo.alexa.sim.data.TextSegmentBean;
import jo.alexa.sim.data.UtteranceBean;
public class UtteranceLogic
{
private static final String[] BUILT_IN_UTTERANCES = {
"AMAZON.CancelIntent\tcancel",
"AMAZON.CancelIntent\tnever mind",
"AMAZON.CancelIntent\tforget it",
"AMAZON.HelpIntent\thelp",
"AMAZON.HelpIntent\thelp me",
"AMAZON.HelpIntent\tcan you help me",
"AMAZON.NoIntent\tno",
"AMAZON.NoIntent\tno thanks",
"AMAZON.RepeatIntent\trepeat",
"AMAZON.RepeatIntent\tsay that again",
"AMAZON.RepeatIntent\trepeat that",
"AMAZON.StartOverIntent\tstart over",
"AMAZON.StartOverIntent\trestart",
"AMAZON.StartOverIntent\tstart again",
"AMAZON.StopIntent\tstop",
"AMAZON.StopIntent\toff",
"AMAZON.StopIntent\tshut up",
"AMAZON.YesIntent\tyes",
"AMAZON.YesIntent\tyes please",
"AMAZON.YesIntent\tsure",
};
public static void read(ApplicationBean app, Reader r) throws IOException
{
app.getUtterances().clear();
for (IntentBean intent : app.getIntentIndex().values())
for (SlotBean slot : intent.getSlots())
slot.getValues().clear();
BufferedReader rdr = new BufferedReader(r);
for (;;)
{
String inbuf = rdr.readLine();
if (inbuf == null)
break;
inbuf = inbuf.trim();
if (inbuf.length() > 0)
parseUtterance(app, inbuf, true);
}
rdr.close();
// simulate built in utterances
for (String inbuf : BUILT_IN_UTTERANCES)
parseUtterance(app, inbuf, false);
}
private static void parseUtterance(ApplicationBean app, String inbuf, boolean strict)
{
int o = inbuf.indexOf('\t');
if (o < 0)
{
o = inbuf.indexOf(' ');
if (o < 0)
throw new IllegalArgumentException("Badly formed utterance line '"+inbuf+"'");
}
UtteranceBean utterance = new UtteranceBean();
utterance.setIntent(app.getIntentIndex().get(inbuf.substring(0, o)));
if ((utterance.getIntent() == null) && strict)
throw new IllegalArgumentException("Unknown intent '"+inbuf.substring(0, o)+"'");
inbuf = inbuf.substring(o + 1).trim();
while (inbuf.length() > 0)
if (inbuf.charAt(0) == '{')
{
int end = inbuf.indexOf('}');
if (end < 0)
throw new IllegalArgumentException("Can't find end of slot '"+inbuf+"'");
String slotPhrase = inbuf.substring(1, end);
inbuf = inbuf.substring(end + 1).trim();
SlotSegmentBean slotSeg = new SlotSegmentBean();
int mid = slotPhrase.indexOf('|');
if (mid < 0)
{
slotSeg.setText(null);
slotSeg.setSlot(app.getSlotIndex().get(slotPhrase));
}
else
{
slotSeg.setText(slotPhrase.substring(0, mid).toLowerCase());
slotSeg.setSlot(app.getSlotIndex().get(slotPhrase.substring(mid + 1)));
}
if (slotSeg.getSlot() == null)
throw new IllegalArgumentException("Unknown slot '"+slotPhrase.substring(mid + 1)+"'");
utterance.getPhrase().add(slotSeg);
if (slotSeg.getText() != null)
slotSeg.getSlot().getValues().add(slotSeg.getText());
}
else
{
int end = inbuf.indexOf('{');
if (end < 0)
end = inbuf.length();
TextSegmentBean textSeg = new TextSegmentBean();
textSeg.setText(inbuf.substring(0, end).trim().toLowerCase());
inbuf = inbuf.substring(end).trim();
utterance.getPhrase().add(textSeg);
}
app.getUtterances().add(utterance);
}
public static String renderAsHTML(List<UtteranceBean> utterances)
{
StringBuffer html = new StringBuffer();
for (UtteranceBean u : utterances)
{
if (html.length() > 0)
html.append("<br/>");
html.append(renderAsHTML(u));
}
return html.toString();
}
public static String renderAsHTML(UtteranceBean utterance)
{
StringBuffer html = new StringBuffer();
html.append("<b>");
html.append(utterance.getIntent().getIntent());
html.append("</b>");
html.append(" ");
for (PhraseSegmentBean p : utterance.getPhrase())
if (p instanceof TextSegmentBean)
{
html.append(((TextSegmentBean)p).getText());
html.append(" ");
}
else if (p instanceof SlotSegmentBean)
{
html.append("<span title=\""+((SlotSegmentBean)p).getSlot().getName()+" ("+((SlotSegmentBean)p).getSlot().getName()+")\">");
html.append(((SlotSegmentBean)p).getText());
html.append("</span>");
html.append(" ");
}
else
throw new IllegalArgumentException("Unknown PhraseSegment: "+p.getClass().getName());
return html.toString();
}
}
| jjaquinta/EchoSim | jo.alexa.sim/src/jo/alexa/sim/logic/UtteranceLogic.java | Java | apache-2.0 | 5,919 |
# -*- coding: utf-8 -*-
# Copyright 2017 DST Controls
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
osisoftpy.factory
~~~~~~~~~~~~
"""
from __future__ import (absolute_import, division, unicode_literals)
from future.builtins import *
from future.utils import iteritems
def create(factory, thing, session, webapi=None):
"""
Return an object created with factory
:param webapi:
:param factory:
:param params:
:param session:
:return:
"""
payload = dict(map(lambda k_v: (k_v[0].lower(), k_v[1]), iteritems(thing)))
# added to avoid creating Value objects if the value was considered bad values
# but we don't need this since we don't want the library to cull bad values that
# the pi web api gave us.
#
# if 'good' in payload:
# if not payload['good']:
# return None
payload.update({'session': session, 'webapi': webapi})
thing = factory.create(**payload)
return thing
class Factory(object):
def __init__(self, type_):
self.type = type_
def create(self, **kwargs):
return self.type(**kwargs)
| dstcontrols/osisoftpy | src/osisoftpy/factory.py | Python | apache-2.0 | 1,646 |
using System;
namespace Apaf.NFSdb.Core.Exceptions
{
[Serializable]
public class NFSdbEmptyFileException : NFSdbBaseExcepton
{
internal NFSdbEmptyFileException()
{
}
internal NFSdbEmptyFileException(string message, params object[] args)
: base(message, args)
{
}
internal NFSdbEmptyFileException(string message, Exception ex, params object[] args)
: base(message, ex, args)
{
}
}
} | NFSdb/nfsdb-csharp | Apaf.NFSdb.Core/Exceptions/NFSdbEmptyFileException.cs | C# | apache-2.0 | 499 |
/**
* Copyright (C) 2006-2020 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.talend.sdk.component.runtime.manager.reflect;
import org.junit.jupiter.api.Test;
import org.talend.sdk.component.api.component.Icon;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import java.util.Locale;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
class IconFinderTest {
private final IconFinder finder = new IconFinder();
@Test
void findDirectIcon() {
assertFalse(finder.findDirectIcon(None.class).isPresent());
assertEquals("foo", finder.findDirectIcon(Direct.class).get());
}
@Test
void findInDirectIcon() {
assertFalse(finder.findDirectIcon(Indirect.class).isPresent());
assertEquals("yes", finder.findIndirectIcon(Indirect.class).get());
}
@Test
void findMetaIcon() {
assertFalse(finder.findDirectIcon(Indirect.class).isPresent());
assertEquals("complex_", finder.findIndirectIcon(Meta.class).get());
}
@Test
void findIcon() {
assertEquals("foo", finder.findIcon(Direct.class));
assertEquals("yes", finder.findIcon(Indirect.class));
assertEquals("default", finder.findIcon(None.class));
}
@Test
void helperMethod() {
{
final boolean isCustom = finder.isCustom(finder.extractIcon(Direct.class));
final String name = finder.findIcon(Direct.class);
assertEquals("foo/true", name + '/' + isCustom);
}
{
final boolean isCustom = finder.isCustom(finder.extractIcon(Meta.class));
final String name = finder.findIcon(Meta.class);
assertEquals("complex_/false", name + '/' + isCustom);
}
}
public static class None {
}
@Icon(custom = "foo")
public static class Direct {
}
@MyIcon
public static class Indirect {
}
@MetaIcon(MetaIcon.MetaIconValue.COMPLEX)
public static class Meta {
}
@Icon
@Target(TYPE)
@Retention(RUNTIME)
public @interface MetaIcon {
MetaIconValue value();
// optional but normally not needed EnumOrString type() default "custom";
enum MetaIconValue {
SIMPLE,
COMPLEX;
public String getKey() {
return name().toLowerCase(Locale.ROOT) + '_';
}
}
}
@Target(TYPE)
@Retention(RUNTIME)
public @interface MyIcon {
String value() default "yes";
}
}
| chmyga/component-runtime | component-runtime-manager/src/test/java/org/talend/sdk/component/runtime/manager/reflect/IconFinderTest.java | Java | apache-2.0 | 3,250 |
package indep_screen3;
import com.navid.nifty.flow.ScreenFlowManager;
import com.navid.nifty.flow.ScreenFlowManagerImpl;
import de.lessvoid.nifty.Nifty;
import de.lessvoid.nifty.screen.Screen;
import de.lessvoid.nifty.screen.ScreenController;
/**
* Created by alberto on 08/07/15.
*/
public class Controller3 implements ScreenController {
private final ScreenFlowManager screenFlowManager;
private Nifty nifty;
public Controller3(ScreenFlowManager screenFlowManager) {
this.screenFlowManager = screenFlowManager;
}
@Override
public void bind(Nifty nifty, Screen screen) {
this.nifty = nifty;
}
@Override
public void onStartScreen() {
}
@Override
public void onEndScreen() {
}
public void back() {
screenFlowManager.setNextScreenHint(ScreenFlowManagerImpl.PREV);
nifty.gotoScreen("redirector");
}
public void next() {
screenFlowManager.setNextScreenHint(ScreenFlowManagerImpl.NEXT);
nifty.gotoScreen("redirector");
}
}
| albertonavarro/nifty-flow | examples/ex1/src/main/java/indep_screen3/Controller3.java | Java | apache-2.0 | 1,048 |
/**
*
*/
package com.catalyst.sonar.score.api;
import static org.junit.Assert.*;
import static com.catalyst.sonar.score.api.ApiTestConstants.*;
import org.junit.Before;
import org.junit.Test;
/**
* Test Class for {@link Criterion}.
*
* @author JDunn
*/
public class CriterionTest {
private Criterion testCriterion;
private Criterion twinCriterion;
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
testCriterion = new Criterion(METRIC_1, AMOUNT_1, DAYS_1);
twinCriterion = new Criterion(METRIC_1, AMOUNT_1, DAYS_1);
}
/**
* Test method for {@link Criterion#hashCode()} .
*/
@Test
public void testHashCode() {
assertEquals(testCriterion.hashCode(), twinCriterion.hashCode());
testCriterion.setMetric(NULL_METRIC);
twinCriterion.setMetric(NULL_METRIC);
assertEquals(testCriterion.hashCode(), twinCriterion.hashCode());
}
/**
* Test method for {@link Criterion#Criterion()}.
*/
@Test
public void testCriterion() {
Criterion defaultCriterion = new Criterion();
assertEquals(0, defaultCriterion.getAmount(), 0);
assertNull(defaultCriterion.getMetric());
assertEquals(0, defaultCriterion.getDays());
}
/**
* Test method for
* {@link Criterion#Criterion(org.sonar.api.measures.Metric, double, int)} .
*/
@Test
public void testCriterionMetricDoubleInt() {
assertEquals(METRIC_1, testCriterion.getMetric());
assertEquals(AMOUNT_1, testCriterion.getAmount(), 0);
assertEquals(DAYS_1, testCriterion.getDays());
}
/**
* Test method for {@link Criterion#getMetric()}.
*/
@Test
public void testGetMetric() {
assertEquals(METRIC_1, testCriterion.getMetric());
}
/**
* Test method for {@link Criterion#setMetric(Metric)} .
*/
@Test
public void testSetMetric() {
testCriterion.setMetric(METRIC_2);
assertEquals(METRIC_2, testCriterion.getMetric());
}
/**
* Test method for {@link Criterion#getAmount()}.
*/
@Test
public void testGetAmount() {
assertEquals(AMOUNT_1, testCriterion.getAmount(), 0);
}
/**
* Test method for {@link Criterion#setAmount(double)}.
*/
@Test
public void testSetAmount() {
testCriterion.setAmount(AMOUNT_2);
assertEquals(AMOUNT_2, testCriterion.getAmount(), 0);
}
/**
* Test method for {@link Criterion#getDays()}.
*/
@Test
public void testGetDays() {
assertEquals(DAYS_1, testCriterion.getDays());
}
/**
* Test method for {@link Criterion#setDays(int)}.
*/
@Test
public void testSetDays() {
testCriterion.setDays(DAYS_2);
assertEquals(DAYS_2, testCriterion.getDays());
}
}
| CatalystIT/sonar-score-plugin | src/test/java/com/catalyst/sonar/score/api/CriterionTest.java | Java | apache-2.0 | 2,683 |
reverseStr(s string, k int) string {
bs := []byte(s)
for i := 0; i < len(bs); i += 2 * k {
if (i+k-1) >= len(bs) {
reverse(&bs, i, len(bs)-1)
}else{
reverse(&bs, i, i+k-1)
}
}
return string(bs[:])
}
func reverse(s *[]byte, start int, end int) {
for i, j := start, end; i < j; i, j = i+1, j-1 {
(*s)[i], (*s)[j] = (*s)[j], (*s)[i]
}
}
| MingfeiPan/leetcode | string/541.go | GO | apache-2.0 | 436 |
package com.youdu.core.display;
import android.view.ViewGroup;
/**
* Created by qndroid on 16/10/27.
*
* @function 用来与外界通信的类, 接收参数, 发送请求
*/
public class DisplayAdContext implements
DisplayAdSlot.DisplayConextListener {
/**
* Data
*/
private DisplayAdSlot mDisplayAdSlot; //图片类型广告
private DisplayAdAppListener mAdAppListener;
public DisplayAdContext(ViewGroup parentView) {
mDisplayAdSlot = new DisplayAdSlot(parentView);
mDisplayAdSlot.setContextListener(this);
}
public void setAdAppListener(DisplayAdAppListener listener) {
mAdAppListener = listener;
}
@Override
public void onLoadingComplete() {
if (mAdAppListener != null) {
mAdAppListener.onLoadingComplete();
}
}
//真正的加载图片失败
@Override
public void onLoadingFailed() {
if (mAdAppListener != null) {
mAdAppListener.onLoadingFailed();
}
}
public void onDispose() {
if (mDisplayAdSlot != null) {
mDisplayAdSlot.onDispose();
}
}
public interface DisplayAdAppListener {
void onLoadingComplete();
void onLoadingFailed();
}
}
| FoxconnPeter/IMOCC | imooc_business/vuandroidadsdk/src/main/java/com/youdu/core/display/DisplayAdContext.java | Java | apache-2.0 | 1,271 |
package com.tangjf.tally.dao;
import com.tangjf.framework.dao.BaseMapper;
import com.tangjf.tally.dto.TallyType;
public interface TallyTypeMapper extends BaseMapper<TallyType> {
} | tangjf420/ttweb | ttweb/src/main/java/com/tangjf/tally/dao/TallyTypeMapper.java | Java | apache-2.0 | 187 |
package io.connecto.connectoapi;
import org.json.JSONArray;
import org.json.JSONException;
import java.util.ArrayList;
import java.util.List;
/**
* A model class which wraps around a list of segment objects.
*
* An instance of this class is returned by the getSegments api.
* @see ConnectoAPI#getSegments(String, String)
* */
public class SegmentResponse {
/**
* Formats a generic identify message.
* @param segmentList a payload of the operation. Will be converted to JSON, and should be of types
* Boolean, Double, Integer, Long, String, JSONArray, JSONObject, the JSONObject.NULL object, or null.
* NaN and negative/positive infinity will throw an IllegalArgumentException
* @throws JSONException if the JSON object is not convertible to a segment array
*
* @see ConnectoAPI#getSegments(String, String)
*/
public SegmentResponse(JSONArray segmentList) throws JSONException {
if (segmentList.length() != 0) {
mSegments = new ArrayList<Segment>();
for (int i = 0; i < segmentList.length(); i++) {
Segment segmentObj = null;
segmentObj = new Segment(segmentList.getJSONObject(i));
mSegments.add(segmentObj);
}
}
}
/* package */
public List<Segment> getSegments() {
return mSegments;
}
private List<Segment> mSegments;
}
| connecto/connecto-java | src/main/java/io/connecto/connectoapi/SegmentResponse.java | Java | apache-2.0 | 1,428 |
////////////////////////////////////////////////////////////////////////////////
/// @brief rules for the query optimizer
///
/// @file
///
/// DISCLAIMER
///
/// Copyright 2010-2014 triagens GmbH, Cologne, Germany
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
///
/// Copyright holder is triAGENS GmbH, Cologne, Germany
///
/// @author Max Neunhoeffer
/// @author Jan Steemann
/// @author Copyright 2014, triagens GmbH, Cologne, Germany
////////////////////////////////////////////////////////////////////////////////
#include "OptimizerRules.h"
#include "Aql/AggregateNode.h"
#include "Aql/AggregationOptions.h"
#include "Aql/ClusterNodes.h"
#include "Aql/ConditionFinder.h"
#include "Aql/ExecutionEngine.h"
#include "Aql/ExecutionNode.h"
#include "Aql/Function.h"
#include "Aql/Index.h"
#include "Aql/IndexNode.h"
#include "Aql/ModificationNodes.h"
#include "Aql/SortCondition.h"
#include "Aql/SortNode.h"
#include "Aql/TraversalConditionFinder.h"
#include "Aql/Variable.h"
#include "Aql/types.h"
#include "Basics/json-utilities.h"
using namespace triagens::aql;
using Json = triagens::basics::Json;
using EN = triagens::aql::ExecutionNode;
// -----------------------------------------------------------------------------
// --SECTION-- rules for the optimizer
// -----------------------------------------------------------------------------
////////////////////////////////////////////////////////////////////////////////
/// @brief adds a SORT operation for IN right-hand side operands
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::sortInValuesRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
bool modified = false;
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::FILTER, true));
for (auto const& n : nodes) {
// filter nodes always have one input variable
auto varsUsedHere = n->getVariablesUsedHere();
TRI_ASSERT(varsUsedHere.size() == 1);
// now check who introduced our variable
auto variable = varsUsedHere[0];
auto setter = plan->getVarSetBy(variable->id);
if (setter == nullptr ||
setter->getType() != EN::CALCULATION) {
// filter variable was not introduced by a calculation.
continue;
}
// filter variable was introduced a CalculationNode. now check the expression
auto s = static_cast<CalculationNode*>(setter);
auto filterExpression = s->expression();
auto inNode = filterExpression->nodeForModification();
TRI_ASSERT(inNode != nullptr);
// check the filter condition
if ((inNode->type != NODE_TYPE_OPERATOR_BINARY_IN && inNode->type != NODE_TYPE_OPERATOR_BINARY_NIN) ||
inNode->canThrow() ||
! inNode->isDeterministic()) {
// we better not tamper with this filter
continue;
}
auto rhs = inNode->getMember(1);
if (rhs->type != NODE_TYPE_REFERENCE) {
continue;
}
auto loop = n->getLoop();
if (loop == nullptr) {
// FILTER is not used inside a loop. so it will be used at most once
// not need to sort the IN values then
continue;
}
variable = static_cast<Variable const*>(rhs->getData());
setter = plan->getVarSetBy(variable->id);
if (setter == nullptr ||
(setter->getType() != EN::CALCULATION && setter->getType() != EN::SUBQUERY)) {
// variable itself was not introduced by a calculation.
continue;
}
if (loop == setter->getLoop()) {
// the FILTER and its value calculation are contained in the same loop
// this means the FILTER will be executed as many times as its value
// calculation. sorting the IN values will not provide a benefit here
continue;
}
static size_t const Threshold = 8;
auto ast = plan->getAst();
AstNode const* originalArg = nullptr;
if (setter->getType() == EN::CALCULATION) {
AstNode const* originalNode = static_cast<CalculationNode*>(setter)->expression()->node();
TRI_ASSERT(originalNode != nullptr);
AstNode const* testNode = originalNode;
if (originalNode->type == NODE_TYPE_FCALL &&
static_cast<Function const*>(originalNode->getData())->externalName == "NOOPT") {
// bypass NOOPT(...)
TRI_ASSERT(originalNode->numMembers() == 1);
auto args = originalNode->getMember(0);
if (args->numMembers() > 0) {
testNode = args->getMember(0);
}
}
if (testNode->type == NODE_TYPE_VALUE ||
testNode->type == NODE_TYPE_OBJECT) {
// not really usable...
continue;
}
if (testNode->type == NODE_TYPE_ARRAY &&
testNode->numMembers() < Threshold) {
// number of values is below threshold
continue;
}
if (testNode->isSorted()) {
// already sorted
continue;
}
originalArg = originalNode;
}
else {
TRI_ASSERT(setter->getType() == EN::SUBQUERY);
auto sub = static_cast<SubqueryNode*>(setter);
// estimate items in subquery
size_t nrItems = 0;
sub->getSubquery()->getCost(nrItems);
if (nrItems < Threshold) {
continue;
}
originalArg = ast->createNodeReference(sub->outVariable());
}
TRI_ASSERT(originalArg != nullptr);
auto args = ast->createNodeArray();
args->addMember(originalArg);
auto sorted = ast->createNodeFunctionCall("SORTED_UNIQUE", args);
auto outVar = ast->variables()->createTemporaryVariable();
ExecutionNode* calculationNode = nullptr;
auto expression = new Expression(ast, sorted);
try {
calculationNode = new CalculationNode(plan, plan->nextId(), expression, outVar);
}
catch (...) {
delete expression;
throw;
}
plan->registerNode(calculationNode);
// make the new node a parent of the original calculation node
calculationNode->addDependency(setter);
auto const& oldParents = setter->getParents();
TRI_ASSERT(! oldParents.empty());
calculationNode->addParent(oldParents[0]);
oldParents[0]->removeDependencies();
oldParents[0]->addDependency(calculationNode);
setter->setParent(calculationNode);
if (setter->getType() == EN::CALCULATION) {
// mark the original node as being removable, even if it can throw
// this is special as the optimizer will normally not remove any nodes
// if they throw - even when fully unused otherwise
static_cast<CalculationNode*>(setter)->canRemoveIfThrows(true);
}
// finally adjust the variable inside the IN calculation
inNode->changeMember(1, ast->createNodeReference(outVar));
// set sortedness bit for the IN operator
inNode->setBoolValue(true);
modified = true;
}
opt->addPlan(plan, rule, modified);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief remove redundant sorts
/// this rule modifies the plan in place:
/// - sorts that are covered by earlier sorts will be removed
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::removeRedundantSortsRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::SORT, true));
if (nodes.empty()) {
// quick exit
opt->addPlan(plan, rule, false);
return;
}
std::unordered_set<ExecutionNode*> toUnlink;
triagens::basics::StringBuffer buffer(TRI_UNKNOWN_MEM_ZONE);
for (auto const& n : nodes) {
if (toUnlink.find(n) != toUnlink.end()) {
// encountered a sort node that we already deleted
continue;
}
auto const sortNode = static_cast<SortNode*>(n);
auto sortInfo = sortNode->getSortInformation(plan, &buffer);
if (sortInfo.isValid && ! sortInfo.criteria.empty()) {
// we found a sort that we can understand
std::vector<ExecutionNode*> stack;
sortNode->addDependencies(stack);
int nodesRelyingOnSort = 0;
while (! stack.empty()) {
auto current = stack.back();
stack.pop_back();
if (current->getType() == EN::SORT) {
// we found another sort. now check if they are compatible!
auto other = static_cast<SortNode*>(current)->getSortInformation(plan, &buffer);
switch (sortInfo.isCoveredBy(other)) {
case SortInformation::unequal: {
// different sort criteria
if (nodesRelyingOnSort == 0) {
// a sort directly followed by another sort: now remove one of them
if (other.canThrow || ! other.isDeterministic) {
// if the sort can throw or is non-deterministic, we must not remove it
break;
}
if (sortNode->isStable()) {
// we should not optimize predecessors of a stable sort (used in a COLLECT node)
// the stable sort is for a reason, and removing any predecessors sorts might
// change the result
break;
}
// remove sort that is a direct predecessor of a sort
toUnlink.emplace(current);
}
break;
}
case SortInformation::otherLessAccurate: {
toUnlink.emplace(current);
break;
}
case SortInformation::ourselvesLessAccurate: {
// the sort at the start of the pipeline makes the sort at the end
// superfluous, so we'll remove it
toUnlink.emplace(n);
break;
}
case SortInformation::allEqual: {
// the sort at the end of the pipeline makes the sort at the start
// superfluous, so we'll remove it
toUnlink.emplace(current);
break;
}
}
}
else if (current->getType() == EN::FILTER) {
// ok: a filter does not depend on sort order
}
else if (current->getType() == EN::CALCULATION) {
// ok: a filter does not depend on sort order only if it does not throw
if (current->canThrow()) {
++nodesRelyingOnSort;
}
}
else if (current->getType() == EN::ENUMERATE_LIST ||
current->getType() == EN::ENUMERATE_COLLECTION ||
current->getType() == EN::TRAVERSAL) {
// ok, but we cannot remove two different sorts if one of these node types is between them
// example: in the following query, the one sort will be optimized away:
// FOR i IN [ { a: 1 }, { a: 2 } , { a: 3 } ] SORT i.a ASC SORT i.a DESC RETURN i
// but in the following query, the sorts will stay:
// FOR i IN [ { a: 1 }, { a: 2 } , { a: 3 } ] SORT i.a ASC LET a = i.a SORT i.a DESC RETURN i
++nodesRelyingOnSort;
}
else {
// abort at all other type of nodes. we cannot remove a sort beyond them
// this includes COLLECT and LIMIT
break;
}
if (! current->hasDependency()) {
// node either has no or more than one dependency. we don't know what to do and must abort
// note: this will also handle Singleton nodes
break;
}
current->addDependencies(stack);
}
if (toUnlink.find(n) == toUnlink.end() &&
sortNode->simplify(plan)) {
// sort node had only constant expressions. it will make no difference if we execute it or not
// so we can remove it
toUnlink.emplace(n);
}
}
}
if (! toUnlink.empty()) {
plan->unlinkNodes(toUnlink);
}
opt->addPlan(plan, rule, ! toUnlink.empty());
}
////////////////////////////////////////////////////////////////////////////////
/// @brief remove all unnecessary filters
/// this rule modifies the plan in place:
/// - filters that are always true are removed completely
/// - filters that are always false will be replaced by a NoResults node
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::removeUnnecessaryFiltersRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
bool modified = false;
std::unordered_set<ExecutionNode*> toUnlink;
// should we enter subqueries??
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::FILTER, true));
for (auto const& n : nodes) {
// filter nodes always have one input variable
auto varsUsedHere = n->getVariablesUsedHere();
TRI_ASSERT(varsUsedHere.size() == 1);
// now check who introduced our variable
auto variable = varsUsedHere[0];
auto setter = plan->getVarSetBy(variable->id);
if (setter == nullptr ||
setter->getType() != EN::CALCULATION) {
// filter variable was not introduced by a calculation.
continue;
}
// filter variable was introduced a CalculationNode. now check the expression
auto s = static_cast<CalculationNode*>(setter);
auto root = s->expression()->node();
TRI_ASSERT(root != nullptr);
if (root->canThrow() || ! root->isDeterministic()) {
// we better not tamper with this filter
continue;
}
// filter expression is constant and thus cannot throw
// we can now evaluate it safely
TRI_ASSERT(! s->expression()->canThrow());
if (root->isTrue()) {
// filter is always true
// remove filter node and merge with following node
toUnlink.emplace(n);
modified = true;
}
else if (root->isFalse()) {
// filter is always false
// now insert a NoResults node below it
auto noResults = new NoResultsNode(plan, plan->nextId());
plan->registerNode(noResults);
plan->replaceNode(n, noResults);
modified = true;
}
}
if (! toUnlink.empty()) {
plan->unlinkNodes(toUnlink);
}
opt->addPlan(plan, rule, modified);
}
#if 0
struct CollectVariableFinder {
Variable const* searchVariable;
std::unordered_set<std::string>& attributeNames;
std::vector<AstNode const*> stack;
bool canUseOptimization;
bool isArgumentToLength;
CollectVariableFinder (AggregateNode const* collectNode,
std::unordered_set<std::string>& attributeNames)
: searchVariable(collectNode->outVariable()),
attributeNames(attributeNames),
stack(),
canUseOptimization(true),
isArgumentToLength(false) {
TRI_ASSERT(searchVariable != nullptr);
stack.reserve(4);
}
void analyze (AstNode const* node) {
TRI_ASSERT(node != nullptr);
if (! canUseOptimization) {
// we already know we cannot apply this optimization
return;
}
stack.push_back(node);
size_t const n = node->numMembers();
for (size_t i = 0; i < n; ++i) {
auto sub = node->getMember(i);
if (sub != nullptr) {
// recurse into subnodes
analyze(sub);
}
}
if (node->type == NODE_TYPE_REFERENCE) {
auto variable = static_cast<Variable const*>(node->getData());
TRI_ASSERT(variable != nullptr);
if (variable->id == searchVariable->id) {
bool handled = false;
auto const size = stack.size();
if (size >= 3 &&
stack[size - 3]->type == NODE_TYPE_EXPANSION) {
// our variable is used in an expansion, e.g. g[*].attribute
auto expandNode = stack[size - 3];
TRI_ASSERT(expandNode->numMembers() == 2);
TRI_ASSERT(expandNode->getMember(0)->type == NODE_TYPE_ITERATOR);
auto expansion = expandNode->getMember(1);
TRI_ASSERT(expansion != nullptr);
while (expansion->type == NODE_TYPE_ATTRIBUTE_ACCESS) {
// note which attribute is used with our variable
if (expansion->getMember(0)->type == NODE_TYPE_ATTRIBUTE_ACCESS) {
expansion = expansion->getMember(0);
}
else {
attributeNames.emplace(expansion->getStringValue());
handled = true;
break;
}
}
}
else if (size >= 3 &&
stack[size - 2]->type == NODE_TYPE_ARRAY &&
stack[size - 3]->type == NODE_TYPE_FCALL) {
auto func = static_cast<Function const*>(stack[size - 3]->getData());
if (func->externalName == "LENGTH" &&
stack[size - 2]->numMembers() == 1) {
// call to function LENGTH() with our variable as its single argument
handled = true;
isArgumentToLength = true;
}
}
if (! handled) {
canUseOptimization = false;
}
}
}
stack.pop_back();
}
};
#endif
////////////////////////////////////////////////////////////////////////////////
/// @brief specialize the variables used in a COLLECT INTO
////////////////////////////////////////////////////////////////////////////////
#if 0
void triagens::aql::specializeCollectVariables (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
bool modified = false;
std::vector<ExecutionNode*> nodes = plan->findNodesOfType(EN::AGGREGATE, true);
for (auto n : nodes) {
auto collectNode = static_cast<AggregateNode*>(n);
TRI_ASSERT(collectNode != nullptr);
auto deps = collectNode->getDependencies();
if (deps.size() != 1) {
continue;
}
if (! collectNode->hasOutVariable() ||
collectNode->hasExpressionVariable() ||
collectNode->count()) {
// COLLECT without INTO or a COLLECT that already uses an
// expression variable or a COLLECT that only counts
continue;
}
auto outVariable = collectNode->outVariable();
// must have an outVariable if we got here
TRI_ASSERT(outVariable != nullptr);
std::unordered_set<std::string> attributeNames;
CollectVariableFinder finder(collectNode, attributeNames);
// check all following nodes for usage of the out variable
std::vector<ExecutionNode*> parents(n->getParents());
while (! parents.empty() &&
finder.canUseOptimization) {
auto current = parents.back();
parents.pop_back();
for (auto it : current->getParents()) {
parents.emplace_back(it);
}
// now check current node for usage of out variable
auto const&& variablesUsed = current->getVariablesUsedHere();
bool found = false;
for (auto it : variablesUsed) {
if (it == outVariable) {
found = true;
break;
}
}
if (found) {
// variable is used. now find out how it is used
if (current->getType() != EN::CALCULATION) {
// variable is used outside of a calculation... skip optimization
// TODO
break;
}
auto calculationNode = static_cast<CalculationNode*>(current);
auto expression = calculationNode->expression();
TRI_ASSERT(expression != nullptr);
finder.analyze(expression->node());
}
}
if (finder.canUseOptimization) {
// can use the optimization
if (! finder.attributeNames.empty()) {
auto obj = plan->getAst()->createNodeObject();
for (auto const& attributeName : finder.attributeNames) {
for (auto it : collectNode->getVariablesUsedHere()) {
if (it->name == attributeName) {
auto refNode = plan->getAst()->createNodeReference(it);
auto element = plan->getAst()->createNodeObjectElement(it->name.c_str(), refNode);
obj->addMember(element);
}
}
}
if (obj->numMembers() == attributeNames.size()) {
collectNode->removeDependency(deps[0]);
auto calculationNode = plan->createTemporaryCalculation(obj);
calculationNode->addDependency(deps[0]);
collectNode->addDependency(calculationNode);
collectNode->setExpressionVariable(calculationNode->outVariable());
modified = true;
}
}
}
}
opt->addPlan(plan, rule, modified);
}
#endif
////////////////////////////////////////////////////////////////////////////////
/// @brief remove INTO of a COLLECT if not used
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::removeCollectIntoRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
bool modified = false;
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::AGGREGATE, true));
for (auto const& n : nodes) {
auto collectNode = static_cast<AggregateNode*>(n);
TRI_ASSERT(collectNode != nullptr);
auto outVariable = collectNode->outVariable();
if (outVariable == nullptr) {
// no out variable. nothing to do
continue;
}
auto varsUsedLater = n->getVarsUsedLater();
if (varsUsedLater.find(outVariable) != varsUsedLater.end()) {
// outVariable is used later
continue;
}
// outVariable is not used later. remove it!
collectNode->clearOutVariable();
modified = true;
}
opt->addPlan(plan, rule, modified);
}
// -----------------------------------------------------------------------------
// --SECTION-- helper class for propagateConstantAttributesRule
// -----------------------------------------------------------------------------
class PropagateConstantAttributesHelper {
public:
PropagateConstantAttributesHelper ()
: _constants(),
_modified(false) {
}
bool modified () const {
return _modified;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief inspects a plan and propages constant values in expressions
////////////////////////////////////////////////////////////////////////////////
void propagateConstants (ExecutionPlan* plan) {
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::FILTER, true));
for (auto const& node : nodes) {
auto fn = static_cast<FilterNode*>(node);
auto inVar = fn->getVariablesUsedHere();
TRI_ASSERT(inVar.size() == 1);
auto setter = plan->getVarSetBy(inVar[0]->id);
if (setter != nullptr &&
setter->getType() == EN::CALCULATION) {
auto cn = static_cast<CalculationNode*>(setter);
auto expression = cn->expression();
if (expression != nullptr) {
collectConstantAttributes(const_cast<AstNode*>(expression->node()));
}
}
}
if (! _constants.empty()) {
for (auto const& node : nodes) {
auto fn = static_cast<FilterNode*>(node);
auto inVar = fn->getVariablesUsedHere();
TRI_ASSERT(inVar.size() == 1);
auto setter = plan->getVarSetBy(inVar[0]->id);
if (setter != nullptr &&
setter->getType() == EN::CALCULATION) {
auto cn = static_cast<CalculationNode*>(setter);
auto expression = cn->expression();
if (expression != nullptr) {
insertConstantAttributes(const_cast<AstNode*>(expression->node()));
}
}
}
}
}
private:
AstNode const* getConstant (Variable const* variable,
std::string const& attribute) const {
auto it = _constants.find(variable);
if (it == _constants.end()) {
return nullptr;
}
auto it2 = (*it).second.find(attribute);
if (it2 == (*it).second.end()) {
return nullptr;
}
return (*it2).second;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief inspects an expression (recursively) and notes constant attribute
/// values so they can be propagated later
////////////////////////////////////////////////////////////////////////////////
void collectConstantAttributes (AstNode* node) {
if (node == nullptr) {
return;
}
if (node->type == NODE_TYPE_OPERATOR_BINARY_AND) {
auto lhs = node->getMember(0);
auto rhs = node->getMember(1);
collectConstantAttributes(lhs);
collectConstantAttributes(rhs);
}
else if (node->type == NODE_TYPE_OPERATOR_BINARY_EQ) {
auto lhs = node->getMember(0);
auto rhs = node->getMember(1);
if (lhs->isConstant() && rhs->type == NODE_TYPE_ATTRIBUTE_ACCESS) {
inspectConstantAttribute(rhs, lhs);
}
else if (rhs->isConstant() && lhs->type == NODE_TYPE_ATTRIBUTE_ACCESS) {
inspectConstantAttribute(lhs, rhs);
}
}
}
////////////////////////////////////////////////////////////////////////////////
/// @brief traverses an AST part recursively and patches it by inserting
/// constant values
////////////////////////////////////////////////////////////////////////////////
void insertConstantAttributes (AstNode* node) {
if (node == nullptr) {
return;
}
if (node->type == NODE_TYPE_OPERATOR_BINARY_AND) {
auto lhs = node->getMember(0);
auto rhs = node->getMember(1);
insertConstantAttributes(lhs);
insertConstantAttributes(rhs);
}
else if (node->type == NODE_TYPE_OPERATOR_BINARY_EQ) {
auto lhs = node->getMember(0);
auto rhs = node->getMember(1);
if (! lhs->isConstant() && rhs->type == NODE_TYPE_ATTRIBUTE_ACCESS) {
insertConstantAttribute(node, 1);
}
if (! rhs->isConstant() && lhs->type == NODE_TYPE_ATTRIBUTE_ACCESS) {
insertConstantAttribute(node, 0);
}
}
}
////////////////////////////////////////////////////////////////////////////////
/// @brief extract an attribute and its variable from an attribute access
/// (e.g. `a.b.c` will return variable `a` and attribute name `b.c.`.
////////////////////////////////////////////////////////////////////////////////
bool getAttribute (AstNode const* attribute,
Variable const*& variable,
std::string& name) {
TRI_ASSERT(attribute != nullptr &&
attribute->type == NODE_TYPE_ATTRIBUTE_ACCESS);
TRI_ASSERT(name.empty());
while (attribute->type == NODE_TYPE_ATTRIBUTE_ACCESS) {
name = std::string(".") + std::string(attribute->getStringValue(), attribute->getStringLength()) + name;
attribute = attribute->getMember(0);
}
if (attribute->type != NODE_TYPE_REFERENCE) {
return false;
}
variable = static_cast<Variable const*>(attribute->getData());
TRI_ASSERT(variable != nullptr);
return true;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief inspect the constant value assigned to an attribute
/// the attribute value will be stored so it can be inserted for the attribute
/// later
////////////////////////////////////////////////////////////////////////////////
void inspectConstantAttribute (AstNode const* attribute,
AstNode const* value) {
Variable const* variable = nullptr;
std::string name;
if (! getAttribute(attribute, variable, name)) {
return;
}
auto it = _constants.find(variable);
if (it == _constants.end()) {
_constants.emplace(variable, std::unordered_map<std::string, AstNode const*>{ { name, value } });
return;
}
auto it2 = (*it).second.find(name);
if (it2 == (*it).second.end()) {
// first value for the attribute
(*it).second.emplace(name, value);
}
else {
auto previous = (*it2).second;
if (previous == nullptr) {
// we have multiple different values for the attribute. better not use this attribute
return;
}
if (TRI_CompareValuesJson(value->computeJson(), previous->computeJson(), true) != 0) {
// different value found for an already tracked attribute. better not use this attribute
(*it2).second = nullptr;
}
}
}
////////////////////////////////////////////////////////////////////////////////
/// @brief patches an AstNode by inserting a constant value into it
////////////////////////////////////////////////////////////////////////////////
void insertConstantAttribute (AstNode* parentNode,
size_t accessIndex) {
Variable const* variable = nullptr;
std::string name;
if (! getAttribute(parentNode->getMember(accessIndex), variable, name)) {
return;
}
auto constantValue = getConstant(variable, name);
if (constantValue != nullptr) {
parentNode->changeMember(accessIndex, const_cast<AstNode*>(constantValue));
_modified = true;
}
}
std::unordered_map<Variable const*, std::unordered_map<std::string, AstNode const*>> _constants;
bool _modified;
};
////////////////////////////////////////////////////////////////////////////////
/// @brief propagate constant attributes in FILTERs
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::propagateConstantAttributesRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
PropagateConstantAttributesHelper helper;
helper.propagateConstants(plan);
opt->addPlan(plan, rule, helper.modified());
}
////////////////////////////////////////////////////////////////////////////////
/// @brief remove SORT RAND() if appropriate
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::removeSortRandRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
bool modified = false;
// should we enter subqueries??
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::SORT, true));
for (auto const& n : nodes) {
auto node = static_cast<SortNode*>(n);
auto const& elements = node->getElements();
if (elements.size() != 1) {
// we're looking for "SORT RAND()", which has just one sort criterion
continue;
}
auto const variable = elements[0].first;
TRI_ASSERT(variable != nullptr);
auto setter = plan->getVarSetBy(variable->id);
if (setter == nullptr ||
setter->getType() != EN::CALCULATION) {
continue;
}
auto cn = static_cast<CalculationNode*>(setter);
auto const expression = cn->expression();
if (expression == nullptr ||
expression->node() == nullptr ||
expression->node()->type != NODE_TYPE_FCALL) {
// not the right type of node
continue;
}
auto funcNode = expression->node();
auto func = static_cast<Function const*>(funcNode->getData());
// we're looking for "RAND()", which is a function call
// with an empty parameters array
if (func->externalName != "RAND" ||
funcNode->numMembers() != 1 ||
funcNode->getMember(0)->numMembers() != 0) {
continue;
}
// now we're sure we got SORT RAND() !
// we found what we were looking for!
// now check if the dependencies qualify
if (! n->hasDependency()) {
break;
}
auto current = n->getFirstDependency();
ExecutionNode* collectionNode = nullptr;
while (current != nullptr) {
if (current->canThrow()) {
// we shouldn't bypass a node that can throw
collectionNode = nullptr;
break;
}
switch (current->getType()) {
case EN::SORT:
case EN::AGGREGATE:
case EN::FILTER:
case EN::SUBQUERY:
case EN::ENUMERATE_LIST:
case EN::TRAVERSAL:
case EN::INDEX: {
// if we found another SortNode, an AggregateNode, FilterNode, a SubqueryNode,
// an EnumerateListNode, a TraversalNode or an IndexNode
// this means we cannot apply our optimization
collectionNode = nullptr;
current = nullptr;
continue; // this will exit the while loop
}
case EN::ENUMERATE_COLLECTION: {
if (collectionNode == nullptr) {
// note this node
collectionNode = current;
break;
}
else {
// we already found another collection node before. this means we
// should not apply our optimization
collectionNode = nullptr;
current = nullptr;
continue; // this will exit the while loop
}
// cannot get here
TRI_ASSERT(false);
}
default: {
// ignore all other nodes
}
}
if (! current->hasDependency()) {
break;
}
current = current->getFirstDependency();
}
if (collectionNode != nullptr) {
// we found a node to modify!
TRI_ASSERT(collectionNode->getType() == EN::ENUMERATE_COLLECTION);
// set the random iteration flag for the EnumerateCollectionNode
static_cast<EnumerateCollectionNode*>(collectionNode)->setRandom();
// remove the SortNode
// note: the CalculationNode will be removed by "remove-unnecessary-calculations"
// rule if not used
plan->unlinkNode(n);
modified = true;
}
}
opt->addPlan(plan, rule, modified);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief move calculations up in the plan
/// this rule modifies the plan in place
/// it aims to move up calculations as far up in the plan as possible, to
/// avoid redundant calculations in inner loops
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::moveCalculationsUpRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::CALCULATION, true));
bool modified = false;
for (auto const& n : nodes) {
auto nn = static_cast<CalculationNode*>(n);
if (nn->expression()->canThrow() ||
! nn->expression()->isDeterministic()) {
// we will only move expressions up that cannot throw and that are deterministic
continue;
}
std::unordered_set<Variable const*> neededVars;
n->getVariablesUsedHere(neededVars);
std::vector<ExecutionNode*> stack;
n->addDependencies(stack);
while (! stack.empty()) {
auto current = stack.back();
stack.pop_back();
bool found = false;
for (auto const& v : current->getVariablesSetHere()) {
if (neededVars.find(v) != neededVars.end()) {
// shared variable, cannot move up any more
found = true;
break;
}
}
if (found) {
// done with optimizing this calculation node
break;
}
if (! current->hasDependency()) {
// node either has no or more than one dependency. we don't know what to do and must abort
// note: this will also handle Singleton nodes
break;
}
current->addDependencies(stack);
// first, unlink the calculation from the plan
plan->unlinkNode(n);
// and re-insert into before the current node
plan->insertDependency(current, n);
modified = true;
}
}
opt->addPlan(plan, rule, modified);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief move calculations down in the plan
/// this rule modifies the plan in place
/// it aims to move calculations as far down in the plan as possible, beyond
/// FILTER and LIMIT operations
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::moveCalculationsDownRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::CALCULATION, true));
bool modified = false;
for (auto const& n : nodes) {
auto nn = static_cast<CalculationNode*>(n);
if (nn->expression()->canThrow() ||
! nn->expression()->isDeterministic()) {
// we will only move expressions down that cannot throw and that are deterministic
continue;
}
// this is the variable that the calculation will set
auto variable = nn->outVariable();
std::vector<ExecutionNode*> stack;
n->addParents(stack);
bool shouldMove = false;
ExecutionNode* lastNode = nullptr;
while (! stack.empty()) {
auto current = stack.back();
stack.pop_back();
lastNode = current;
bool done = false;
for (auto const& v : current->getVariablesUsedHere()) {
if (v == variable) {
// the node we're looking at needs the variable we're setting.
// can't push further!
done = true;
break;
}
}
if (done) {
// done with optimizing this calculation node
break;
}
auto const currentType = current->getType();
if (currentType == EN::FILTER ||
currentType == EN::SORT ||
currentType == EN::LIMIT ||
currentType == EN::SUBQUERY) {
// we found something interesting that justifies moving our node down
shouldMove = true;
}
else if (currentType == EN::INDEX ||
currentType == EN::ENUMERATE_COLLECTION ||
currentType == EN::ENUMERATE_LIST ||
currentType == EN::TRAVERSAL ||
currentType == EN::AGGREGATE ||
currentType == EN::NORESULTS) {
// we will not push further down than such nodes
shouldMove = false;
break;
}
if (! current->hasParent()) {
break;
}
current->addParents(stack);
}
if (shouldMove && lastNode != nullptr) {
// first, unlink the calculation from the plan
plan->unlinkNode(n);
// and re-insert into before the current node
plan->insertDependency(lastNode, n);
modified = true;
}
}
opt->addPlan(plan, rule, modified);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief fuse calculations in the plan
/// this rule modifies the plan in place
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::fuseCalculationsRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::CALCULATION, true));
if (nodes.size() < 2) {
opt->addPlan(plan, rule, false);
return;
}
std::unordered_set<ExecutionNode*> toUnlink;
for (auto const& n : nodes) {
auto nn = static_cast<CalculationNode*>(n);
if (nn->expression()->canThrow() ||
! nn->expression()->isDeterministic()) {
// we will only fuse calculations of expressions that cannot throw and that are deterministic
continue;
}
if (toUnlink.find(n) != toUnlink.end()) {
// do not process the same node twice
continue;
}
std::unordered_map<Variable const*, ExecutionNode*> toInsert;
for (auto& it : nn->getVariablesUsedHere()) {
if (! n->isVarUsedLater(it)) {
toInsert.emplace(it, n);
}
}
TRI_ASSERT(n->hasDependency());
std::vector<ExecutionNode*> stack{ n->getFirstDependency() };
while (! stack.empty()) {
auto current = stack.back();
stack.pop_back();
bool handled = false;
if (current->getType() == EN::CALCULATION) {
auto otherExpression = static_cast<CalculationNode const*>(current)->expression();
if (otherExpression->isDeterministic() &&
! otherExpression->canThrow() &&
otherExpression->canRunOnDBServer() == nn->expression()->canRunOnDBServer()) {
// found another calculation node
auto varsSet(std::move(current->getVariablesSetHere()));
if (varsSet.size() == 1) {
// check if it is a calculation for a variable that we are looking for
auto it = toInsert.find(varsSet[0]);
if (it != toInsert.end()) {
// remove the variable from the list of search variables
toInsert.erase(it);
// replace the variable reference in the original expression with the expression for that variable
auto expression = nn->expression();
TRI_ASSERT(expression != nullptr);
expression->replaceVariableReference((*it).first, otherExpression->node());
toUnlink.emplace(current);
// insert the calculations' own referenced variables into the list of search variables
for (auto& it2 : current->getVariablesUsedHere()) {
if (! n->isVarUsedLater(it2)) {
toInsert.emplace(it2, n);
}
}
handled = true;
}
}
}
}
if (! handled) {
// remove all variables from our list that might be used elsewhere
for (auto& it : current->getVariablesUsedHere()) {
toInsert.erase(it);
}
}
if (toInsert.empty()) {
// done
break;
}
if (! current->hasDependency()) {
break;
}
stack.emplace_back(current->getFirstDependency());
}
}
if (! toUnlink.empty()) {
plan->unlinkNodes(toUnlink);
}
opt->addPlan(plan, rule, ! toUnlink.empty());
}
////////////////////////////////////////////////////////////////////////////////
/// @brief determine the "right" type of AggregateNode and
/// add a sort node for each COLLECT (note: the sort may be removed later)
/// this rule cannot be turned off (otherwise, the query result might be wrong!)
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::specializeCollectRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::AGGREGATE, true));
bool modified = false;
for (auto const& n : nodes) {
auto collectNode = static_cast<AggregateNode*>(n);
if (collectNode->isSpecialized()) {
// already specialized this node
continue;
}
auto const& aggregateVariables = collectNode->aggregateVariables();
// test if we can use an alternative version of COLLECT with a hash table
bool const canUseHashAggregation = (! aggregateVariables.empty() &&
(! collectNode->hasOutVariable() || collectNode->count()) &&
collectNode->getOptions().canUseHashMethod());
if (canUseHashAggregation) {
// create a new plan with the adjusted COLLECT node
std::unique_ptr<ExecutionPlan> newPlan(plan->clone());
// use the cloned COLLECT node
auto newCollectNode = static_cast<AggregateNode*>(newPlan->getNodeById(collectNode->id()));
TRI_ASSERT(newCollectNode != nullptr);
// specialize the AggregateNode so it will become a HashAggregateBlock later
// additionally, add a SortNode BEHIND the AggregateNode (to sort the final result)
newCollectNode->aggregationMethod(AggregationOptions::AggregationMethod::AGGREGATION_METHOD_HASH);
newCollectNode->specialized();
if (! collectNode->isDistinctCommand()) {
// add the post-SORT
std::vector<std::pair<Variable const*, bool>> sortElements;
for (auto const& v : newCollectNode->aggregateVariables()) {
sortElements.emplace_back(std::make_pair(v.first, true));
}
auto sortNode = new SortNode(newPlan.get(), newPlan->nextId(), sortElements, false);
newPlan->registerNode(sortNode);
TRI_ASSERT(newCollectNode->hasParent());
auto const& parents = newCollectNode->getParents();
auto parent = parents[0];
sortNode->addDependency(newCollectNode);
parent->replaceDependency(newCollectNode, sortNode);
}
newPlan->findVarUsage();
if (nodes.size() > 1) {
// this will tell the optimizer to optimize the cloned plan with this specific rule again
opt->addPlan(newPlan.release(), rule, true, static_cast<int>(rule->level - 1));
}
else {
// no need to run this specific rule again on the cloned plan
opt->addPlan(newPlan.release(), rule, true);
}
}
// mark node as specialized, so we do not process it again
collectNode->specialized();
// finally, adjust the original plan and create a sorted version of COLLECT
// specialize the AggregateNode so it will become a SortedAggregateBlock later
collectNode->aggregationMethod(AggregationOptions::AggregationMethod::AGGREGATION_METHOD_SORTED);
// insert a SortNode IN FRONT OF the AggregateNode
if (! aggregateVariables.empty()) {
std::vector<std::pair<Variable const*, bool>> sortElements;
for (auto const& v : aggregateVariables) {
sortElements.emplace_back(std::make_pair(v.second, true));
}
auto sortNode = new SortNode(plan, plan->nextId(), sortElements, true);
plan->registerNode(sortNode);
TRI_ASSERT(collectNode->hasDependency());
auto dep = collectNode->getFirstDependency();
sortNode->addDependency(dep);
collectNode->replaceDependency(dep, sortNode);
modified = true;
}
}
opt->addPlan(plan, rule, modified);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief split and-combined filters and break them into smaller parts
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::splitFiltersRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::FILTER, true));
bool modified = false;
for (auto const& n : nodes) {
auto inVars(std::move(n->getVariablesUsedHere()));
TRI_ASSERT(inVars.size() == 1);
auto setter = plan->getVarSetBy(inVars[0]->id);
if (setter == nullptr || setter->getType() != EN::CALCULATION) {
continue;
}
auto cn = static_cast<CalculationNode*>(setter);
auto const expression = cn->expression();
if (expression->canThrow() ||
! expression->isDeterministic() ||
expression->node()->type != NODE_TYPE_OPERATOR_BINARY_AND) {
continue;
}
std::vector<AstNode*> stack{ expression->nodeForModification() };
while (! stack.empty()) {
auto current = stack.back();
stack.pop_back();
if (current->type == NODE_TYPE_OPERATOR_BINARY_AND) {
stack.emplace_back(current->getMember(0));
stack.emplace_back(current->getMember(1));
}
else {
modified = true;
ExecutionNode* calculationNode = nullptr;
auto outVar = plan->getAst()->variables()->createTemporaryVariable();
auto expression = new Expression(plan->getAst(), current);
try {
calculationNode = new CalculationNode(plan, plan->nextId(), expression, outVar);
}
catch (...) {
delete expression;
throw;
}
plan->registerNode(calculationNode);
plan->insertDependency(n, calculationNode);
auto filterNode = new FilterNode(plan, plan->nextId(), outVar);
plan->registerNode(filterNode);
plan->insertDependency(n, filterNode);
}
}
if (modified) {
plan->unlinkNode(n, false);
}
}
opt->addPlan(plan, rule, modified);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief move filters up in the plan
/// this rule modifies the plan in place
/// filters are moved as far up in the plan as possible to make result sets
/// as small as possible as early as possible
/// filters are not pushed beyond limits
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::moveFiltersUpRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::FILTER, true));
bool modified = false;
for (auto const& n : nodes) {
auto neededVars = n->getVariablesUsedHere();
TRI_ASSERT(neededVars.size() == 1);
std::vector<ExecutionNode*> stack;
n->addDependencies(stack);
while (! stack.empty()) {
auto current = stack.back();
stack.pop_back();
if (current->getType() == EN::LIMIT) {
// cannot push a filter beyond a LIMIT node
break;
}
if (current->canThrow()) {
// must not move a filter beyond a node that can throw
break;
}
if (current->getType() == EN::CALCULATION) {
// must not move a filter beyond a node with a non-deterministic result
auto calculation = static_cast<CalculationNode const*>(current);
if (! calculation->expression()->isDeterministic()) {
break;
}
}
bool found = false;
for (auto const& v : current->getVariablesSetHere()) {
for (auto it = neededVars.begin(); it != neededVars.end(); ++it) {
if ((*it)->id == v->id) {
// shared variable, cannot move up any more
found = true;
break;
}
}
}
if (found) {
// done with optimizing this calculation node
break;
}
if (! current->hasDependency()) {
// node either has no or more than one dependency. we don't know what to do and must abort
// note: this will also handle Singleton nodes
break;
}
current->addDependencies(stack);
// first, unlink the filter from the plan
plan->unlinkNode(n);
// and re-insert into plan in front of the current node
plan->insertDependency(current, n);
modified = true;
}
}
opt->addPlan(plan, rule, modified);
}
class triagens::aql::RedundantCalculationsReplacer final : public WalkerWorker<ExecutionNode> {
public:
explicit RedundantCalculationsReplacer (std::unordered_map<VariableId, Variable const*> const& replacements)
: _replacements(replacements) {
}
template<typename T>
void replaceInVariable (ExecutionNode* en) {
auto node = static_cast<T*>(en);
node->_inVariable = Variable::replace(node->_inVariable, _replacements);
}
void replaceInCalculation (ExecutionNode* en) {
auto node = static_cast<CalculationNode*>(en);
std::unordered_set<Variable const*> variables;
node->expression()->variables(variables);
// check if the calculation uses any of the variables that we want to replace
for (auto const& it : variables) {
if (_replacements.find(it->id) != _replacements.end()) {
// calculation uses a to-be-replaced variable
node->expression()->replaceVariables(_replacements);
return;
}
}
}
bool before (ExecutionNode* en) override final {
switch (en->getType()) {
case EN::ENUMERATE_LIST: {
replaceInVariable<EnumerateListNode>(en);
break;
}
case EN::RETURN: {
replaceInVariable<ReturnNode>(en);
break;
}
case EN::CALCULATION: {
replaceInCalculation(en);
break;
}
case EN::FILTER: {
replaceInVariable<FilterNode>(en);
break;
}
case EN::AGGREGATE: {
auto node = static_cast<AggregateNode*>(en);
for (auto& variable : node->_aggregateVariables) {
variable.second = Variable::replace(variable.second, _replacements);
}
break;
}
case EN::SORT: {
auto node = static_cast<SortNode*>(en);
for (auto& variable : node->_elements) {
variable.first = Variable::replace(variable.first, _replacements);
}
break;
}
default: {
// ignore all other types of nodes
}
}
// always continue
return false;
}
private:
std::unordered_map<VariableId, Variable const*> const& _replacements;
};
////////////////////////////////////////////////////////////////////////////////
/// @brief remove CalculationNode(s) that are repeatedly used in a query
/// (i.e. common expressions)
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::removeRedundantCalculationsRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::CALCULATION, true));
if (nodes.size() < 2) {
// quick exit
opt->addPlan(plan, rule, false);
return;
}
triagens::basics::StringBuffer buffer(TRI_UNKNOWN_MEM_ZONE);
std::unordered_map<VariableId, Variable const*> replacements;
for (auto const& n : nodes) {
auto nn = static_cast<CalculationNode*>(n);
if (! nn->expression()->isDeterministic()) {
// If this node is non-deterministic, we must not touch it!
continue;
}
auto outvar = n->getVariablesSetHere();
TRI_ASSERT(outvar.size() == 1);
try {
nn->expression()->stringifyIfNotTooLong(&buffer);
}
catch (...) {
// expression could not be stringified (maybe because not all node types
// are supported). this is not an error, we just skip the optimization
buffer.reset();
continue;
}
std::string const referenceExpression(buffer.c_str(), buffer.length());
buffer.reset();
std::vector<ExecutionNode*> stack;
n->addDependencies(stack);
while (! stack.empty()) {
auto current = stack.back();
stack.pop_back();
if (current->getType() == EN::CALCULATION) {
try {
static_cast<CalculationNode*>(current)->expression()->stringifyIfNotTooLong(&buffer);
}
catch (...) {
// expression could not be stringified (maybe because not all node types
// are supported). this is not an error, we just skip the optimization
buffer.reset();
continue;
}
std::string const compareExpression(buffer.c_str(), buffer.length());
buffer.reset();
if (compareExpression == referenceExpression) {
// expressions are identical
auto outvars = current->getVariablesSetHere();
TRI_ASSERT(outvars.size() == 1);
// check if target variable is already registered as a replacement
// this covers the following case:
// - replacements is set to B => C
// - we're now inserting a replacement A => B
// the goal now is to enter a replacement A => C instead of A => B
auto target = outvars[0];
while (target != nullptr) {
auto it = replacements.find(target->id);
if (it != replacements.end()) {
target = (*it).second;
}
else {
break;
}
}
replacements.emplace(outvar[0]->id, target);
// also check if the insertion enables further shortcuts
// this covers the following case:
// - replacements is set to A => B
// - we have just inserted a replacement B => C
// the goal now is to change the replacement A => B to A => C
for (auto it = replacements.begin(); it != replacements.end(); ++it) {
if ((*it).second == outvar[0]) {
(*it).second = target;
}
}
}
}
if (current->getType() == EN::AGGREGATE) {
if (static_cast<AggregateNode*>(current)->hasOutVariable()) {
// COLLECT ... INTO is evil (tm): it needs to keep all already defined variables
// we need to abort optimization here
break;
}
}
if (! current->hasDependency()) {
// node either has no or more than one dependency. we don't know what to do and must abort
// note: this will also handle Singleton nodes
break;
}
current->addDependencies(stack);
}
}
if (! replacements.empty()) {
// finally replace the variables
RedundantCalculationsReplacer finder(replacements);
plan->root()->walk(&finder);
opt->addPlan(plan, rule, true);
}
else {
// no changes
opt->addPlan(plan, rule, false);
}
}
////////////////////////////////////////////////////////////////////////////////
/// @brief remove CalculationNodes and SubqueryNodes that are never needed
/// this modifies an existing plan in place
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::removeUnnecessaryCalculationsRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::vector<ExecutionNode::NodeType> const types = {
EN::CALCULATION,
EN::SUBQUERY
};
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(types, true));
std::unordered_set<ExecutionNode*> toUnlink;
for (auto const& n : nodes) {
if (n->getType() == EN::CALCULATION) {
auto nn = static_cast<CalculationNode*>(n);
if (nn->canThrow() && ! nn->canRemoveIfThrows()) {
// If this node can throw, we must not optimize it away!
continue;
}
}
else {
auto nn = static_cast<SubqueryNode*>(n);
if (nn->canThrow()) {
// subqueries that can throw must not be optimized away
continue;
}
if (nn->isModificationQuery()) {
// subqueries that modify data must not be optimized away
continue;
}
}
auto outvar = n->getVariablesSetHere();
TRI_ASSERT(outvar.size() == 1);
auto varsUsedLater = n->getVarsUsedLater();
if (varsUsedLater.find(outvar[0]) == varsUsedLater.end()) {
// The variable whose value is calculated here is not used at
// all further down the pipeline! We remove the whole
// calculation node,
toUnlink.emplace(n);
}
}
if (! toUnlink.empty()) {
plan->unlinkNodes(toUnlink);
}
opt->addPlan(plan, rule, ! toUnlink.empty());
}
////////////////////////////////////////////////////////////////////////////////
/// @brief useIndex, try to use an index for filtering
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::useIndexesRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
// These are all the nodes where we start traversing (including all subqueries)
std::vector<ExecutionNode*> nodes(plan->findEndNodes(true));
std::unordered_map<size_t, ExecutionNode*> changes;
auto cleanupChanges = [&changes] () -> void {
for (auto& v : changes) {
delete v.second;
}
changes.clear();
};
TRI_DEFER(cleanupChanges());
bool hasEmptyResult = false;
for (auto const& n : nodes) {
ConditionFinder finder(plan, &changes, &hasEmptyResult);
n->walk(&finder);
}
if (! changes.empty()) {
for (auto& it : changes) {
plan->registerNode(it.second);
plan->replaceNode(plan->getNodeById(it.first), it.second);
// prevent double deletion by cleanupChanges()
it.second = nullptr;
}
opt->addPlan(plan, rule, true);
}
else {
opt->addPlan(plan, rule, hasEmptyResult);
}
}
struct SortToIndexNode final : public WalkerWorker<ExecutionNode> {
ExecutionPlan* _plan;
SortNode* _sortNode;
std::vector<std::pair<VariableId, bool>> _sorts;
std::unordered_map<VariableId, AstNode const*> _variableDefinitions;
bool _modified;
public:
explicit SortToIndexNode (ExecutionPlan* plan)
: _plan(plan),
_sortNode(nullptr),
_sorts(),
_variableDefinitions(),
_modified(false) {
}
bool handleEnumerateCollectionNode (EnumerateCollectionNode* enumerateCollectionNode) {
if (_sortNode == nullptr) {
return true;
}
if (enumerateCollectionNode->isInInnerLoop()) {
// index node contained in an outer loop. must not optimize away the sort!
return true;
}
SortCondition sortCondition(_sorts, _variableDefinitions);
if (! sortCondition.isEmpty() &&
sortCondition.isOnlyAttributeAccess() &&
sortCondition.isUnidirectional()) {
// we have found a sort condition, which is unidirectionl
// now check if any of the collection's indexes covers it
Variable const* outVariable = enumerateCollectionNode->outVariable();
auto const& indexes = enumerateCollectionNode->collection()->getIndexes();
triagens::aql::Index const* bestIndex = nullptr;
double bestCost = 0.0;
size_t bestNumCovered = 0;
for (auto& index : indexes) {
if (! index->isSorted() || index->sparse) {
// can only use a sorted index
// cannot use a sparse index for sorting
continue;
}
auto numCovered = sortCondition.coveredAttributes(outVariable, index->fields);
if (numCovered == 0) {
continue;
}
double estimatedCost = 0.0;
if (! index->supportsSortCondition(&sortCondition, outVariable, enumerateCollectionNode->collection()->count(), estimatedCost)) {
// should never happen
TRI_ASSERT(false);
continue;
}
if (bestIndex == nullptr || estimatedCost < bestCost) {
bestIndex = index;
bestCost = estimatedCost;
bestNumCovered = numCovered;
}
}
if (bestIndex != nullptr) {
auto condition = std::make_unique<Condition>(_plan->getAst());
condition->normalize(_plan);
std::unique_ptr<ExecutionNode> newNode(new IndexNode(
_plan,
_plan->nextId(),
enumerateCollectionNode->vocbase(),
enumerateCollectionNode->collection(),
outVariable,
std::vector<Index const*>({ bestIndex }),
condition.get(),
sortCondition.isDescending()
));
condition.release();
auto n = newNode.release();
_plan->registerNode(n);
_plan->replaceNode(enumerateCollectionNode, n);
_modified = true;
if (bestNumCovered == sortCondition.numAttributes()) {
// if the index covers the complete sort condition, we can also remove the sort node
_plan->unlinkNode(_plan->getNodeById(_sortNode->id()));
}
}
}
return true; // always abort further searching here
}
bool handleIndexNode (IndexNode* indexNode) {
if (_sortNode == nullptr) {
return true;
}
if (indexNode->isInInnerLoop()) {
// index node contained in an outer loop. must not optimize away the sort!
return true;
}
auto const& indexes = indexNode->getIndexes();
auto cond = indexNode->condition();
if (indexes.size() != 1) {
// can only use this index node if it uses exactly one index or multiple indexes on exactly the same attributes
if (! cond->isSorted()) {
// index conditions do not guarantee sortedness
return true;
}
std::vector<std::vector<triagens::basics::AttributeName>> seen;
for (auto& index : indexes) {
if (index->sparse) {
// cannot use a sparse index for sorting
return true;
}
if (! seen.empty() && triagens::basics::AttributeName::isIdentical(index->fields, seen, true)) {
// different attributes
return true;
}
}
// all indexes use the same attributes and index conditions guarantee sorted output
}
// if we get here, we either have one index or multiple indexes on the same attributes
auto index = indexes[0];
bool handled = false;
SortCondition sortCondition(_sorts, _variableDefinitions);
bool const isOnlyAttributeAccess = (! sortCondition.isEmpty() && sortCondition.isOnlyAttributeAccess());
if (isOnlyAttributeAccess &&
index->isSorted() &&
! index->sparse &&
sortCondition.isUnidirectional() &&
sortCondition.isDescending() == indexNode->reverse()) {
// we have found a sort condition, which is unidirectional and in the same
// order as the IndexNode...
// now check if the sort attributes match the ones of the index
Variable const* outVariable = indexNode->outVariable();
auto numCovered = sortCondition.coveredAttributes(outVariable, index->fields);
if (numCovered == sortCondition.numAttributes()) {
// sort condition is fully covered by index... now we can remove the sort node from the plan
_plan->unlinkNode(_plan->getNodeById(_sortNode->id()));
_modified = true;
handled = true;
}
}
if (! handled &&
isOnlyAttributeAccess &&
indexes.size() == 1) {
// special case... the index cannot be used for sorting, but we only compare with equality
// lookups. now check if the equality lookup attributes are the same as the index attributes
auto root = cond->root();
if (root != nullptr) {
auto condNode = root->getMember(0);
if (condNode->isOnlyEqualityMatch()) {
// now check if the index fields are the same as the sort condition fields
// e.g. FILTER c.value1 == 1 && c.value2 == 42 SORT c.value1, c.value2
Variable const* outVariable = indexNode->outVariable();
size_t coveredFields = sortCondition.coveredAttributes(outVariable, index->fields);
if (coveredFields == sortCondition.numAttributes() &&
(index->isSorted() || index->fields.size() == sortCondition.numAttributes())) {
// no need to sort
_plan->unlinkNode(_plan->getNodeById(_sortNode->id()));
_modified = true;
}
}
}
}
return true; // always abort after we found an IndexNode
}
bool enterSubquery (ExecutionNode*, ExecutionNode*) override final {
return false;
}
bool before (ExecutionNode* en) override final {
switch (en->getType()) {
case EN::TRAVERSAL:
case EN::ENUMERATE_LIST:
case EN::SUBQUERY:
case EN::FILTER:
return false; // skip. we don't care.
case EN::CALCULATION: {
auto outvars = en->getVariablesSetHere();
TRI_ASSERT(outvars.size() == 1);
_variableDefinitions.emplace(outvars[0]->id, static_cast<CalculationNode const*>(en)->expression()->node());
return false;
}
case EN::SINGLETON:
case EN::AGGREGATE:
case EN::INSERT:
case EN::REMOVE:
case EN::REPLACE:
case EN::UPDATE:
case EN::UPSERT:
case EN::RETURN:
case EN::NORESULTS:
case EN::SCATTER:
case EN::DISTRIBUTE:
case EN::GATHER:
case EN::REMOTE:
case EN::ILLEGAL:
case EN::LIMIT: // LIMIT is criterion to stop
return true; // abort.
case EN::SORT: // pulling two sorts together is done elsewhere.
if (! _sorts.empty() || _sortNode != nullptr) {
return true; // a different SORT node. abort
}
_sortNode = static_cast<SortNode*>(en);
for (auto& it : _sortNode->getElements()) {
_sorts.emplace_back((it.first)->id, it.second);
}
return false;
case EN::INDEX:
return handleIndexNode(static_cast<IndexNode*>(en));
case EN::ENUMERATE_COLLECTION:
return handleEnumerateCollectionNode(static_cast<EnumerateCollectionNode*>(en));
}
return true;
}
};
void triagens::aql::useIndexForSortRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
bool modified = false;
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::SORT, true));
for (auto const& n : nodes) {
auto sortNode = static_cast<SortNode*>(n);
SortToIndexNode finder(plan);
sortNode->walk(&finder);
if (finder._modified) {
modified = true;
}
}
opt->addPlan(plan, rule, modified);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief try to remove filters which are covered by indexes
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::removeFiltersCoveredByIndexRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::unordered_set<ExecutionNode*> toUnlink;
bool modified = false;
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::FILTER, true));
for (auto const& node : nodes) {
auto fn = static_cast<FilterNode const*>(node);
// find the node with the filter expression
auto inVar = fn->getVariablesUsedHere();
TRI_ASSERT(inVar.size() == 1);
auto setter = plan->getVarSetBy(inVar[0]->id);
if (setter == nullptr || setter->getType() != EN::CALCULATION) {
continue;
}
auto calculationNode = static_cast<CalculationNode*>(setter);
auto conditionNode = calculationNode->expression()->node();
// build the filter condition
auto condition = std::make_unique<Condition>(plan->getAst());
condition->andCombine(conditionNode);
condition->normalize(plan);
if (condition->root() == nullptr) {
continue;
}
size_t const n = condition->root()->numMembers();
if (n != 1) {
// either no condition or multiple ORed conditions...
continue;
}
bool handled = false;
auto current = node;
while (current != nullptr) {
if (current->getType() == EN::INDEX) {
auto indexNode = static_cast<IndexNode const*>(current);
// found an index node, now check if the expression is covered by the index
auto indexCondition = indexNode->condition();
if (indexCondition != nullptr && ! indexCondition->isEmpty()) {
auto const& indexesUsed = indexNode->getIndexes();
if (indexesUsed.size() == 1) {
// single index. this is something that we can handle
auto newNode = condition->removeIndexCondition(indexNode->outVariable(), indexCondition->root());
if (newNode == nullptr) {
// no condition left...
// FILTER node can be completely removed
toUnlink.emplace(setter);
toUnlink.emplace(node);
modified = true;
handled = true;
}
else if (newNode != condition->root()) {
// some condition is left, but it is a different one than
// the one from the FILTER node
auto expr = std::make_unique<Expression>(plan->getAst(), newNode);
CalculationNode* cn = new CalculationNode(plan, plan->nextId(), expr.get(), calculationNode->outVariable());
expr.release();
plan->registerNode(cn);
plan->replaceNode(setter, cn);
modified = true;
handled = true;
}
}
}
if (handled) {
break;
}
}
if (handled ||
current->getType() == EN::LIMIT ||
! current->hasDependency()) {
break;
}
current = current->getFirstDependency();
}
}
if (! toUnlink.empty()) {
plan->unlinkNodes(toUnlink);
}
opt->addPlan(plan, rule, modified);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief helper to compute lots of permutation tuples
/// a permutation tuple is represented as a single vector together with
/// another vector describing the boundaries of the tuples.
/// Example:
/// data: 0,1,2, 3,4, 5,6
/// starts: 0, 3, 5, (indices of starts of sections)
/// means a tuple of 3 permutations of 3, 2 and 2 points respectively
/// This function computes the next permutation tuple among the
/// lexicographically sorted list of all such tuples. It returns true
/// if it has successfully computed this and false if the tuple is already
/// the lexicographically largest one. If false is returned, the permutation
/// tuple is back to the beginning.
////////////////////////////////////////////////////////////////////////////////
static bool NextPermutationTuple (std::vector<size_t>& data,
std::vector<size_t>& starts) {
auto begin = data.begin(); // a random access iterator
for (size_t i = starts.size(); i-- != 0; ) {
std::vector<size_t>::iterator from = begin + starts[i];
std::vector<size_t>::iterator to;
if (i == starts.size() - 1) {
to = data.end();
}
else {
to = begin + starts[i + 1];
}
if (std::next_permutation(from, to)) {
return true;
}
}
return false;
}
////////////////////////////////////////////////////////////////////////////////
/// @brief interchange adjacent EnumerateCollectionNodes in all possible ways
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::interchangeAdjacentEnumerationsRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::ENUMERATE_COLLECTION, true));
std::unordered_set<ExecutionNode*> nodesSet;
for (auto const& n : nodes) {
TRI_ASSERT(nodesSet.find(n) == nodesSet.end());
nodesSet.emplace(n);
}
std::vector<ExecutionNode*> nodesToPermute;
std::vector<size_t> permTuple;
std::vector<size_t> starts;
// We use that the order of the nodes is such that a node B that is among the
// recursive dependencies of a node A is later in the vector.
for (auto const& n : nodes) {
if (nodesSet.find(n) != nodesSet.end()) {
std::vector<ExecutionNode*> nn{ n };
nodesSet.erase(n);
// Now follow the dependencies as long as we see further such nodes:
auto nwalker = n;
while (true) {
if (! nwalker->hasDependency()) {
break;
}
auto dep = nwalker->getFirstDependency();
if (dep->getType() != EN::ENUMERATE_COLLECTION) {
break;
}
nwalker = dep;
nn.emplace_back(nwalker);
nodesSet.erase(nwalker);
}
if (nn.size() > 1) {
// Move it into the permutation tuple:
starts.emplace_back(permTuple.size());
for (auto const& nnn : nn) {
nodesToPermute.emplace_back(nnn);
permTuple.emplace_back(permTuple.size());
}
}
}
}
// Now we have collected all the runs of EnumerateCollectionNodes in the
// plan, we need to compute all possible permutations of all of them,
// independently. This is why we need to compute all permutation tuples.
opt->addPlan(plan, rule, false);
if (! starts.empty()) {
NextPermutationTuple(permTuple, starts); // will never return false
do {
// Clone the plan:
auto newPlan = plan->clone();
try { // get rid of plan if any of this fails
// Find the nodes in the new plan corresponding to the ones in the
// old plan that we want to permute:
std::vector<ExecutionNode*> newNodes;
for (size_t j = 0; j < nodesToPermute.size(); j++) {
newNodes.emplace_back(newPlan->getNodeById(nodesToPermute[j]->id()));
}
// Now get going with the permutations:
for (size_t i = 0; i < starts.size(); i++) {
size_t lowBound = starts[i];
size_t highBound = (i < starts.size()-1)
? starts[i+1]
: permTuple.size();
// We need to remove the nodes
// newNodes[lowBound..highBound-1] in newPlan and replace
// them by the same ones in a different order, given by
// permTuple[lowBound..highBound-1].
auto const& parents = newNodes[lowBound]->getParents();
TRI_ASSERT(parents.size() == 1);
auto parent = parents[0]; // needed for insertion later
// Unlink all those nodes:
for (size_t j = lowBound; j < highBound; j++) {
newPlan->unlinkNode(newNodes[j]);
}
// And insert them in the new order:
for (size_t j = highBound; j-- != lowBound; ) {
newPlan->insertDependency(parent, newNodes[permTuple[j]]);
}
}
// OK, the new plan is ready, let's report it:
if (! opt->addPlan(newPlan, rule, true)) {
// have enough plans. stop permutations
break;
}
}
catch (...) {
delete newPlan;
throw;
}
}
while (NextPermutationTuple(permTuple, starts));
}
}
////////////////////////////////////////////////////////////////////////////////
/// @brief scatter operations in cluster
/// this rule inserts scatter, gather and remote nodes so operations on sharded
/// collections actually work
/// it will change plans in place
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::scatterInClusterRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
bool wasModified = false;
if (triagens::arango::ServerState::instance()->isCoordinator()) {
// find subqueries
std::unordered_map<ExecutionNode*, ExecutionNode*> subqueries;
for (auto& it : plan->findNodesOfType(ExecutionNode::SUBQUERY, true)) {
subqueries.emplace(static_cast<SubqueryNode const*>(it)->getSubquery(), it);
}
// we are a coordinator. now look in the plan for nodes of type
// EnumerateCollectionNode, IndexNode and modification nodes
std::vector<ExecutionNode::NodeType> const types = {
ExecutionNode::ENUMERATE_COLLECTION,
ExecutionNode::INDEX,
ExecutionNode::INSERT,
ExecutionNode::UPDATE,
ExecutionNode::REPLACE,
ExecutionNode::REMOVE,
ExecutionNode::UPSERT // TODO: check if ok here
};
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(types, true));
for (auto& node: nodes) {
// found a node we need to replace in the plan
auto const& parents = node->getParents();
auto const& deps = node->getDependencies();
TRI_ASSERT(deps.size() == 1);
// don't do this if we are already distributing!
if (deps[0]->getType() == ExecutionNode::REMOTE &&
deps[0]->getFirstDependency()->getType() == ExecutionNode::DISTRIBUTE) {
continue;
}
bool const isRootNode = plan->isRoot(node);
plan->unlinkNode(node, true);
auto const nodeType = node->getType();
// extract database and collection from plan node
TRI_vocbase_t* vocbase = nullptr;
Collection const* collection = nullptr;
if (nodeType == ExecutionNode::ENUMERATE_COLLECTION) {
vocbase = static_cast<EnumerateCollectionNode*>(node)->vocbase();
collection = static_cast<EnumerateCollectionNode*>(node)->collection();
}
else if (nodeType == ExecutionNode::INDEX) {
vocbase = static_cast<IndexNode*>(node)->vocbase();
collection = static_cast<IndexNode*>(node)->collection();
}
else if (nodeType == ExecutionNode::INSERT ||
nodeType == ExecutionNode::UPDATE ||
nodeType == ExecutionNode::REPLACE ||
nodeType == ExecutionNode::REMOVE ||
nodeType == ExecutionNode::UPSERT) {
vocbase = static_cast<ModificationNode*>(node)->vocbase();
collection = static_cast<ModificationNode*>(node)->collection();
if (nodeType == ExecutionNode::REMOVE ||
nodeType == ExecutionNode::UPDATE) {
// Note that in the REPLACE or UPSERT case we are not getting here, since
// the distributeInClusterRule fires and a DistributionNode is
// used.
auto* modNode = static_cast<ModificationNode*>(node);
modNode->getOptions().ignoreDocumentNotFound = true;
}
}
else {
TRI_ASSERT(false);
}
// insert a scatter node
ExecutionNode* scatterNode = new ScatterNode(plan, plan->nextId(),
vocbase, collection);
plan->registerNode(scatterNode);
scatterNode->addDependency(deps[0]);
// insert a remote node
ExecutionNode* remoteNode = new RemoteNode(plan, plan->nextId(), vocbase,
collection, "", "", "");
plan->registerNode(remoteNode);
remoteNode->addDependency(scatterNode);
// re-link with the remote node
node->addDependency(remoteNode);
// insert another remote node
remoteNode = new RemoteNode(plan, plan->nextId(), vocbase, collection, "", "", "");
plan->registerNode(remoteNode);
remoteNode->addDependency(node);
// insert a gather node
ExecutionNode* gatherNode = new GatherNode(plan, plan->nextId(), vocbase,
collection);
plan->registerNode(gatherNode);
gatherNode->addDependency(remoteNode);
// and now link the gather node with the rest of the plan
if (parents.size() == 1) {
parents[0]->replaceDependency(deps[0], gatherNode);
}
// check if the node that we modified was at the end of a subquery
auto it = subqueries.find(node);
if (it != subqueries.end()) {
static_cast<SubqueryNode*>((*it).second)->setSubquery(gatherNode, true);
}
if (isRootNode) {
// if we replaced the root node, set a new root node
plan->root(gatherNode);
}
wasModified = true;
}
}
opt->addPlan(plan, rule, wasModified);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief distribute operations in cluster
///
/// this rule inserts distribute, remote nodes so operations on sharded
/// collections actually work, this differs from scatterInCluster in that every
/// incoming row is only sent to one shard and not all as in scatterInCluster
///
/// it will change plans in place
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::distributeInClusterRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
bool wasModified = false;
if (triagens::arango::ServerState::instance()->isCoordinator()) {
// we are a coordinator, we replace the root if it is a modification node
// only replace if it is the last node in the plan
auto node = plan->root();
TRI_ASSERT(node != nullptr);
while (node != nullptr) {
// loop until we find a modification node or the end of the plan
auto nodeType = node->getType();
if (nodeType == ExecutionNode::INSERT ||
nodeType == ExecutionNode::REMOVE ||
nodeType == ExecutionNode::UPDATE ||
nodeType == ExecutionNode::REPLACE ||
nodeType == ExecutionNode::UPSERT) {
// found a node!
break;
}
if (! node->hasDependency()) {
// reached the end
opt->addPlan(plan, rule, wasModified);
return;
}
node = node->getFirstDependency();
}
TRI_ASSERT(node != nullptr);
if (node == nullptr) {
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_INTERNAL, "logic error");
}
ExecutionNode* originalParent = nullptr;
{
if (node->hasParent()) {
auto const& parents = node->getParents();
originalParent = parents[0];
TRI_ASSERT(originalParent != nullptr);
TRI_ASSERT(node != plan->root());
}
else {
TRI_ASSERT(node == plan->root());
}
}
// when we get here, we have found a matching data-modification node!
auto const nodeType = node->getType();
TRI_ASSERT(nodeType == ExecutionNode::INSERT ||
nodeType == ExecutionNode::REMOVE ||
nodeType == ExecutionNode::UPDATE ||
nodeType == ExecutionNode::REPLACE ||
nodeType == ExecutionNode::UPSERT);
Collection const* collection = static_cast<ModificationNode*>(node)->collection();
bool const defaultSharding = collection->usesDefaultSharding();
if (nodeType == ExecutionNode::REMOVE ||
nodeType == ExecutionNode::UPDATE) {
if (! defaultSharding) {
// We have to use a ScatterNode.
opt->addPlan(plan, rule, wasModified);
return;
}
}
// In the INSERT and REPLACE cases we use a DistributeNode...
TRI_ASSERT(node->hasDependency());
auto const& deps = node->getDependencies();
if (originalParent != nullptr) {
originalParent->removeDependency(node);
// unlink the node
auto root = plan->root();
plan->unlinkNode(node, true);
plan->root(root, true); // fix root node
}
else {
// unlink the node
plan->unlinkNode(node, true);
plan->root(deps[0], true); // fix root node
}
// extract database from plan node
TRI_vocbase_t* vocbase = static_cast<ModificationNode*>(node)->vocbase();
// insert a distribute node
ExecutionNode* distNode = nullptr;
Variable const* inputVariable;
if (nodeType == ExecutionNode::INSERT ||
nodeType == ExecutionNode::REMOVE) {
TRI_ASSERT(node->getVariablesUsedHere().size() == 1);
// in case of an INSERT, the DistributeNode is responsible for generating keys
// if none present
bool const createKeys = (nodeType == ExecutionNode::INSERT);
inputVariable = node->getVariablesUsedHere()[0];
distNode = new DistributeNode(plan, plan->nextId(),
vocbase, collection, inputVariable->id, createKeys, true);
}
else if (nodeType == ExecutionNode::REPLACE) {
std::vector<Variable const*> v = node->getVariablesUsedHere();
if (defaultSharding && v.size() > 1) {
// We only look into _inKeyVariable
inputVariable = v[1];
}
else {
// We only look into _inDocVariable
inputVariable = v[0];
}
distNode = new DistributeNode(plan, plan->nextId(),
vocbase, collection, inputVariable->id, false, v.size() > 1);
}
else if (nodeType == ExecutionNode::UPDATE) {
std::vector<Variable const*> v = node->getVariablesUsedHere();
if (v.size() > 1) {
// If there is a key variable:
inputVariable = v[1];
// This is the _inKeyVariable! This works, since we use a ScatterNode
// for non-default-sharding attributes.
}
else {
// was only UPDATE <doc> IN <collection>
inputVariable = v[0];
}
distNode = new DistributeNode(plan, plan->nextId(),
vocbase, collection, inputVariable->id, false, v.size() > 1);
}
else if (nodeType == ExecutionNode::UPSERT) {
// an UPSERT nodes has two input variables!
std::vector<Variable const*> v(node->getVariablesUsedHere());
TRI_ASSERT(v.size() >= 2);
distNode = new DistributeNode(plan, plan->nextId(),
vocbase, collection, v[0]->id, v[2]->id, false, true);
}
else {
TRI_ASSERT(false);
THROW_ARANGO_EXCEPTION_MESSAGE(TRI_ERROR_INTERNAL, "logic error");
}
TRI_ASSERT(distNode != nullptr);
plan->registerNode(distNode);
distNode->addDependency(deps[0]);
// insert a remote node
ExecutionNode* remoteNode = new RemoteNode(plan, plan->nextId(), vocbase,
collection, "", "", "");
plan->registerNode(remoteNode);
remoteNode->addDependency(distNode);
// re-link with the remote node
node->addDependency(remoteNode);
// insert another remote node
remoteNode = new RemoteNode(plan, plan->nextId(), vocbase, collection, "", "", "");
plan->registerNode(remoteNode);
remoteNode->addDependency(node);
// insert a gather node
ExecutionNode* gatherNode = new GatherNode(plan, plan->nextId(), vocbase, collection);
plan->registerNode(gatherNode);
gatherNode->addDependency(remoteNode);
if (originalParent != nullptr) {
// we did not replace the root node
originalParent->addDependency(gatherNode);
}
else {
// we replaced the root node, set a new root node
plan->root(gatherNode, true);
}
wasModified = true;
}
opt->addPlan(plan, rule, wasModified);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief move filters up into the cluster distribution part of the plan
/// this rule modifies the plan in place
/// filters are moved as far up in the plan as possible to make result sets
/// as small as possible as early as possible
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::distributeFilternCalcToClusterRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
bool modified = false;
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::GATHER, true));
for (auto& n : nodes) {
auto const& remoteNodeList = n->getDependencies();
TRI_ASSERT(remoteNodeList.size() > 0);
auto rn = remoteNodeList[0];
if (! n->hasParent()) {
continue;
}
std::unordered_set<Variable const*> varsSetHere;
auto parents = n->getParents();
while (true) {
bool stopSearching = false;
auto inspectNode = parents[0];
switch (inspectNode->getType()) {
case EN::ENUMERATE_LIST:
case EN::SINGLETON:
case EN::INSERT:
case EN::REMOVE:
case EN::REPLACE:
case EN::UPDATE:
case EN::UPSERT: {
for (auto& v : inspectNode->getVariablesSetHere()) {
varsSetHere.emplace(v);
}
parents = inspectNode->getParents();
continue;
}
case EN::AGGREGATE:
case EN::SUBQUERY:
case EN::RETURN:
case EN::NORESULTS:
case EN::SCATTER:
case EN::DISTRIBUTE:
case EN::GATHER:
case EN::ILLEGAL:
case EN::REMOTE:
case EN::LIMIT:
case EN::SORT:
case EN::INDEX:
case EN::ENUMERATE_COLLECTION:
case EN::TRAVERSAL:
//do break
stopSearching = true;
break;
case EN::CALCULATION: {
auto calc = static_cast<CalculationNode const*>(inspectNode);
// check if the expression can be executed on a DB server safely
if (! calc->expression()->canRunOnDBServer()) {
stopSearching = true;
break;
}
// intentionally fall through here
}
case EN::FILTER:
for (auto& v : inspectNode->getVariablesUsedHere()) {
if (varsSetHere.find(v) != varsSetHere.end()) {
// do not move over the definition of variables that we need
stopSearching = true;
break;
}
}
if (! stopSearching) {
// remember our cursor...
parents = inspectNode->getParents();
// then unlink the filter/calculator from the plan
plan->unlinkNode(inspectNode);
// and re-insert into plan in front of the remoteNode
plan->insertDependency(rn, inspectNode);
modified = true;
//ready to rumble!
}
break;
}
if (stopSearching) {
break;
}
}
}
opt->addPlan(plan, rule, modified);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief move sorts up into the cluster distribution part of the plan
/// this rule modifies the plan in place
/// sorts are moved as far up in the plan as possible to make result sets
/// as small as possible as early as possible
///
/// filters are not pushed beyond limits
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::distributeSortToClusterRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
bool modified = false;
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::GATHER, true));
for (auto& n : nodes) {
auto const& remoteNodeList = n->getDependencies();
auto gatherNode = static_cast<GatherNode*>(n);
TRI_ASSERT(remoteNodeList.size() > 0);
auto rn = remoteNodeList[0];
if (! n->hasParent()) {
continue;
}
auto parents = n->getParents();
while (1) {
bool stopSearching = false;
auto inspectNode = parents[0];
switch (inspectNode->getType()) {
case EN::ENUMERATE_LIST:
case EN::SINGLETON:
case EN::AGGREGATE:
case EN::INSERT:
case EN::REMOVE:
case EN::REPLACE:
case EN::UPDATE:
case EN::UPSERT:
case EN::CALCULATION:
case EN::FILTER:
case EN::SUBQUERY:
case EN::RETURN:
case EN::NORESULTS:
case EN::SCATTER:
case EN::DISTRIBUTE:
case EN::GATHER:
case EN::ILLEGAL:
case EN::REMOTE:
case EN::LIMIT:
case EN::INDEX:
case EN::TRAVERSAL:
case EN::ENUMERATE_COLLECTION:
// For all these, we do not want to pull a SortNode further down
// out to the DBservers, note that potential FilterNodes and
// CalculationNodes that can be moved to the DBservers have
// already been moved over by the distribute-filtercalc-to-cluster
// rule which is done first.
stopSearching = true;
break;
case EN::SORT:
auto thisSortNode = static_cast<SortNode*>(inspectNode);
// remember our cursor...
parents = inspectNode->getParents();
// then unlink the filter/calculator from the plan
plan->unlinkNode(inspectNode);
// and re-insert into plan in front of the remoteNode
plan->insertDependency(rn, inspectNode);
gatherNode->setElements(thisSortNode->getElements());
modified = true;
//ready to rumble!
}
if (stopSearching) {
break;
}
}
}
opt->addPlan(plan, rule, modified);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief try to get rid of a RemoteNode->ScatterNode combination which has
/// only a SingletonNode and possibly some CalculationNodes as dependencies
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::removeUnnecessaryRemoteScatterRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::REMOTE, true));
std::unordered_set<ExecutionNode*> toUnlink;
for (auto& n : nodes) {
// check if the remote node is preceeded by a scatter node and any number of
// calculation and singleton nodes. if yes, remove remote and scatter
if (! n->hasDependency()) {
continue;
}
auto const dep = n->getFirstDependency();
if (dep->getType() != EN::SCATTER) {
continue;
}
bool canOptimize = true;
auto node = dep;
while (node != nullptr) {
auto const& d = node->getDependencies();
if (d.size() != 1) {
break;
}
node = d[0];
if (node->getType() != EN::SINGLETON &&
node->getType() != EN::CALCULATION) {
// found some other node type...
// this disqualifies the optimization
canOptimize = false;
break;
}
if (node->getType() == EN::CALCULATION) {
auto calc = static_cast<CalculationNode const*>(node);
// check if the expression can be executed on a DB server safely
if (! calc->expression()->canRunOnDBServer()) {
canOptimize = false;
break;
}
}
}
if (canOptimize) {
toUnlink.emplace(n);
toUnlink.emplace(dep);
}
}
if (! toUnlink.empty()) {
plan->unlinkNodes(toUnlink);
}
opt->addPlan(plan, rule, ! toUnlink.empty());
}
////////////////////////////////////////////////////////////////////////////////
/// WalkerWorker for undistributeRemoveAfterEnumColl
////////////////////////////////////////////////////////////////////////////////
class RemoveToEnumCollFinder final : public WalkerWorker<ExecutionNode> {
ExecutionPlan* _plan;
std::unordered_set<ExecutionNode*>& _toUnlink;
bool _remove;
bool _scatter;
bool _gather;
EnumerateCollectionNode* _enumColl;
ExecutionNode* _setter;
const Variable* _variable;
ExecutionNode* _lastNode;
public:
RemoveToEnumCollFinder (ExecutionPlan* plan,
std::unordered_set<ExecutionNode*>& toUnlink)
: _plan(plan),
_toUnlink(toUnlink),
_remove(false),
_scatter(false),
_gather(false),
_enumColl(nullptr),
_setter(nullptr),
_variable(nullptr),
_lastNode(nullptr) {
};
~RemoveToEnumCollFinder () {
}
bool before (ExecutionNode* en) override final {
switch (en->getType()) {
case EN::REMOVE: {
if (_remove) {
break;
}
// find the variable we are removing . . .
auto rn = static_cast<RemoveNode*>(en);
auto varsToRemove = rn->getVariablesUsedHere();
// remove nodes always have one input variable
TRI_ASSERT(varsToRemove.size() == 1);
_setter = _plan->getVarSetBy(varsToRemove[0]->id);
TRI_ASSERT(_setter != nullptr);
auto enumColl = _setter;
if (_setter->getType() == EN::CALCULATION) {
// this should be an attribute access for _key
auto cn = static_cast<CalculationNode*>(_setter);
if (! cn->expression()->isAttributeAccess()) {
break; // abort . . .
}
// check the variable is the same as the remove variable
auto vars = cn->getVariablesSetHere();
if (vars.size() != 1 || vars[0]->id != varsToRemove[0]->id) {
break; // abort . . .
}
// check the remove node's collection is sharded over _key
std::vector<std::string> shardKeys = rn->collection()->shardKeys();
if (shardKeys.size() != 1 || shardKeys[0] != TRI_VOC_ATTRIBUTE_KEY) {
break; // abort . . .
}
// set the varsToRemove to the variable in the expression of this
// node and also define enumColl
varsToRemove = cn->getVariablesUsedHere();
TRI_ASSERT(varsToRemove.size() == 1);
enumColl = _plan->getVarSetBy(varsToRemove[0]->id);
TRI_ASSERT(_setter != nullptr);
}
if (enumColl->getType() != EN::ENUMERATE_COLLECTION) {
break; // abort . . .
}
_enumColl = static_cast<EnumerateCollectionNode*>(enumColl);
if (_enumColl->collection() != rn->collection()) {
break; // abort . . .
}
_variable = varsToRemove[0]; // the variable we'll remove
_remove = true;
_lastNode = en;
return false; // continue . . .
}
case EN::REMOTE: {
_toUnlink.emplace(en);
_lastNode = en;
return false; // continue . . .
}
case EN::DISTRIBUTE:
case EN::SCATTER: {
if (_scatter) { // met more than one scatter node
break; // abort . . .
}
_scatter = true;
_toUnlink.emplace(en);
_lastNode = en;
return false; // continue . . .
}
case EN::GATHER: {
if (_gather) { // met more than one gather node
break; // abort . . .
}
_gather = true;
_toUnlink.emplace(en);
_lastNode = en;
return false; // continue . . .
}
case EN::FILTER: {
_lastNode = en;
return false; // continue . . .
}
case EN::CALCULATION: {
TRI_ASSERT(_setter != nullptr);
if (_setter->getType() == EN::CALCULATION && _setter->id() == en->id()) {
_lastNode = en;
return false; // continue . . .
}
if (_lastNode == nullptr || _lastNode->getType() != EN::FILTER) {
// doesn't match the last filter node
break; // abort . . .
}
auto cn = static_cast<CalculationNode*>(en);
auto fn = static_cast<FilterNode*>(_lastNode);
// check these are a Calc-Filter pair
if (cn->getVariablesSetHere()[0]->id != fn->getVariablesUsedHere()[0]->id) {
break; // abort . . .
}
// check that we are filtering/calculating something with the variable
// we are to remove
auto varsUsedHere = cn->getVariablesUsedHere();
if (varsUsedHere.size() != 1) {
break; //abort . . .
}
if (varsUsedHere[0]->id != _variable->id) {
break;
}
_lastNode = en;
return false; // continue . . .
}
case EN::ENUMERATE_COLLECTION: {
// check that we are enumerating the variable we are to remove
// and that we have already seen a remove node
TRI_ASSERT(_enumColl != nullptr);
if (en->id() != _enumColl->id()) {
break;
}
return true; // reached the end!
}
case EN::SINGLETON:
case EN::ENUMERATE_LIST:
case EN::SUBQUERY:
case EN::AGGREGATE:
case EN::INSERT:
case EN::REPLACE:
case EN::UPDATE:
case EN::UPSERT:
case EN::RETURN:
case EN::NORESULTS:
case EN::ILLEGAL:
case EN::LIMIT:
case EN::SORT:
case EN::TRAVERSAL:
case EN::INDEX: {
// if we meet any of the above, then we abort . . .
}
}
_toUnlink.clear();
return true;
}
};
////////////////////////////////////////////////////////////////////////////////
/// @brief recognizes that a RemoveNode can be moved to the shards.
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::undistributeRemoveAfterEnumCollRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::REMOVE, true));
std::unordered_set<ExecutionNode*> toUnlink;
for (auto& n : nodes) {
RemoveToEnumCollFinder finder(plan, toUnlink);
n->walk(&finder);
}
bool modified = false;
if (! toUnlink.empty()) {
plan->unlinkNodes(toUnlink);
modified = true;
}
opt->addPlan(plan, rule, modified);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief auxilliary struct for finding common nodes in OR conditions
////////////////////////////////////////////////////////////////////////////////
struct CommonNodeFinder {
std::vector<AstNode const*> possibleNodes;
bool find (AstNode const* node,
AstNodeType condition,
AstNode const*& commonNode,
std::string& commonName) {
if (node->type == NODE_TYPE_OPERATOR_BINARY_OR) {
return (find(node->getMember(0), condition, commonNode, commonName)
&& find(node->getMember(1), condition, commonNode, commonName));
}
if (node->type == NODE_TYPE_VALUE) {
possibleNodes.clear();
return true;
}
if (node->type == condition
|| (condition != NODE_TYPE_OPERATOR_BINARY_EQ
&& ( node->type == NODE_TYPE_OPERATOR_BINARY_LE
|| node->type == NODE_TYPE_OPERATOR_BINARY_LT
|| node->type == NODE_TYPE_OPERATOR_BINARY_GE
|| node->type == NODE_TYPE_OPERATOR_BINARY_GT
|| node->type == NODE_TYPE_OPERATOR_BINARY_IN))) {
auto lhs = node->getMember(0);
auto rhs = node->getMember(1);
bool const isIn = (node->type == NODE_TYPE_OPERATOR_BINARY_IN && rhs->isArray());
if (node->type == NODE_TYPE_OPERATOR_BINARY_IN &&
rhs->type == NODE_TYPE_EXPANSION) {
// ooh, cannot optimize this (yet)
possibleNodes.clear();
return false;
}
if (! isIn && lhs->isConstant()) {
commonNode = rhs;
commonName = commonNode->toString();
possibleNodes.clear();
return true;
}
if (rhs->isConstant()) {
commonNode = lhs;
commonName = commonNode->toString();
possibleNodes.clear();
return true;
}
if (rhs->type == NODE_TYPE_FCALL ||
rhs->type == NODE_TYPE_FCALL_USER ||
rhs->type == NODE_TYPE_REFERENCE) {
commonNode = lhs;
commonName = commonNode->toString();
possibleNodes.clear();
return true;
}
if (! isIn &&
(lhs->type == NODE_TYPE_FCALL ||
lhs->type == NODE_TYPE_FCALL_USER ||
lhs->type == NODE_TYPE_REFERENCE)) {
commonNode = rhs;
commonName = commonNode->toString();
possibleNodes.clear();
return true;
}
if (! isIn &&
(lhs->type == NODE_TYPE_ATTRIBUTE_ACCESS ||
lhs->type == NODE_TYPE_INDEXED_ACCESS)) {
if (possibleNodes.size() == 2) {
for (size_t i = 0; i < 2; i++) {
if (lhs->toString() == possibleNodes[i]->toString()) {
commonNode = possibleNodes[i];
commonName = commonNode->toString();
possibleNodes.clear();
return true;
}
}
// don't return, must consider the other side of the condition
}
else {
possibleNodes.emplace_back(lhs);
}
}
if (rhs->type == NODE_TYPE_ATTRIBUTE_ACCESS ||
rhs->type == NODE_TYPE_INDEXED_ACCESS) {
if (possibleNodes.size() == 2) {
for (size_t i = 0; i < 2; i++) {
if (rhs->toString() == possibleNodes[i]->toString()) {
commonNode = possibleNodes[i];
commonName = commonNode->toString();
possibleNodes.clear();
return true;
}
}
return false;
}
else {
possibleNodes.emplace_back(rhs);
return true;
}
}
}
possibleNodes.clear();
return (! commonName.empty());
}
};
////////////////////////////////////////////////////////////////////////////////
/// @brief auxilliary struct for the OR-to-IN conversion
////////////////////////////////////////////////////////////////////////////////
struct OrToInConverter {
std::vector<AstNode const*> valueNodes;
CommonNodeFinder finder;
AstNode const* commonNode = nullptr;
std::string commonName;
AstNode* buildInExpression (Ast* ast) {
// the list of comparison values
auto list = ast->createNodeArray();
for (auto& x : valueNodes) {
list->addMember(x);
}
// return a new IN operator node
return ast->createNodeBinaryOperator(NODE_TYPE_OPERATOR_BINARY_IN,
commonNode->clone(ast),
list);
}
bool canConvertExpression (AstNode const* node) {
if (finder.find(node, NODE_TYPE_OPERATOR_BINARY_EQ, commonNode, commonName)) {
return canConvertExpressionWalker(node);
}
else if (finder.find(node, NODE_TYPE_OPERATOR_BINARY_IN, commonNode, commonName)) {
return canConvertExpressionWalker(node);
}
return false;
}
bool canConvertExpressionWalker (AstNode const* node) {
if (node->type == NODE_TYPE_OPERATOR_BINARY_OR) {
return (canConvertExpressionWalker(node->getMember(0)) &&
canConvertExpressionWalker(node->getMember(1)));
}
if (node->type == NODE_TYPE_OPERATOR_BINARY_EQ) {
auto lhs = node->getMember(0);
auto rhs = node->getMember(1);
if (canConvertExpressionWalker(rhs) && ! canConvertExpressionWalker(lhs)) {
valueNodes.emplace_back(lhs);
return true;
}
if (canConvertExpressionWalker(lhs) && ! canConvertExpressionWalker(rhs)) {
valueNodes.emplace_back(rhs);
return true;
}
// if canConvertExpressionWalker(lhs) and canConvertExpressionWalker(rhs), then one of
// the equalities in the OR statement is of the form x == x
// fall-through intentional
}
else if (node->type == NODE_TYPE_OPERATOR_BINARY_IN) {
auto lhs = node->getMember(0);
auto rhs = node->getMember(1);
if (canConvertExpressionWalker(lhs) && ! canConvertExpressionWalker(rhs) && rhs->isArray()) {
size_t const n = rhs->numMembers();
for (size_t i = 0; i < n; ++i) {
valueNodes.emplace_back(rhs->getMemberUnchecked(i));
}
return true;
}
// fall-through intentional
}
else if (node->type == NODE_TYPE_REFERENCE ||
node->type == NODE_TYPE_ATTRIBUTE_ACCESS ||
node->type == NODE_TYPE_INDEXED_ACCESS) {
// get a string representation of the node for comparisons
return (node->toString() == commonName);
}
return false;
}
};
////////////////////////////////////////////////////////////////////////////////
/// @brief this rule replaces expressions of the type:
/// x.val == 1 || x.val == 2 || x.val == 3
// with
// x.val IN [1,2,3]
// when the OR conditions are present in the same FILTER node, and refer to the
// same (single) attribute.
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::replaceOrWithInRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::FILTER, true));
bool modified = false;
for (auto const& n : nodes) {
TRI_ASSERT(n->hasDependency());
auto const dep = n->getFirstDependency();
if (dep->getType() != EN::CALCULATION) {
continue;
}
auto fn = static_cast<FilterNode*>(n);
auto inVar = fn->getVariablesUsedHere();
auto cn = static_cast<CalculationNode*>(dep);
auto outVar = cn->getVariablesSetHere();
if (outVar.size() != 1 || outVar[0]->id != inVar[0]->id) {
continue;
}
if (cn->expression()->node()->type != NODE_TYPE_OPERATOR_BINARY_OR) {
continue;
}
OrToInConverter converter;
if (converter.canConvertExpression(cn->expression()->node())) {
ExecutionNode* newNode = nullptr;
auto inNode = converter.buildInExpression(plan->getAst());
Expression* expr = new Expression(plan->getAst(), inNode);
try {
TRI_IF_FAILURE("OptimizerRules::replaceOrWithInRuleOom") {
THROW_ARANGO_EXCEPTION(TRI_ERROR_DEBUG);
}
newNode = new CalculationNode(plan, plan->nextId(), expr, outVar[0]);
}
catch (...) {
delete expr;
throw;
}
plan->registerNode(newNode);
plan->replaceNode(cn, newNode);
modified = true;
}
}
opt->addPlan(plan, rule, modified);
}
struct RemoveRedundantOr {
AstNode const* bestValue = nullptr;
AstNodeType comparison;
bool inclusive;
bool isComparisonSet = false;
CommonNodeFinder finder;
AstNode const* commonNode = nullptr;
std::string commonName;
AstNode* createReplacementNode (Ast* ast) {
TRI_ASSERT(commonNode != nullptr);
TRI_ASSERT(bestValue != nullptr);
TRI_ASSERT(isComparisonSet == true);
return ast->createNodeBinaryOperator(comparison, commonNode->clone(ast),
bestValue);
}
bool isInclusiveBound (AstNodeType type) {
return (type == NODE_TYPE_OPERATOR_BINARY_GE || type == NODE_TYPE_OPERATOR_BINARY_LE);
}
int isCompatibleBound (AstNodeType type, AstNode const* value) {
if ((comparison == NODE_TYPE_OPERATOR_BINARY_LE
|| comparison == NODE_TYPE_OPERATOR_BINARY_LT) &&
(type == NODE_TYPE_OPERATOR_BINARY_LE
|| type == NODE_TYPE_OPERATOR_BINARY_LT)) {
return -1; //high bound
}
else if ((comparison == NODE_TYPE_OPERATOR_BINARY_GE
|| comparison == NODE_TYPE_OPERATOR_BINARY_GT) &&
(type == NODE_TYPE_OPERATOR_BINARY_GE
|| type == NODE_TYPE_OPERATOR_BINARY_GT)) {
return 1; //low bound
}
return 0; //incompatible bounds
}
// returns false if the existing value is better and true if the input value is
// better
bool compareBounds (AstNodeType type, AstNode const* value, int lowhigh) {
int cmp = CompareAstNodes(bestValue, value, true);
if (cmp == 0 && (isInclusiveBound(comparison) != isInclusiveBound(type))) {
return (isInclusiveBound(type) ? true : false);
}
return (cmp * lowhigh == 1);
}
bool hasRedundantCondition (AstNode const* node) {
if (finder.find(node, NODE_TYPE_OPERATOR_BINARY_LT, commonNode, commonName)) {
return hasRedundantConditionWalker(node);
}
return false;
}
bool hasRedundantConditionWalker (AstNode const* node) {
AstNodeType type = node->type;
if (type == NODE_TYPE_OPERATOR_BINARY_OR) {
return (hasRedundantConditionWalker(node->getMember(0)) &&
hasRedundantConditionWalker(node->getMember(1)));
}
if (type == NODE_TYPE_OPERATOR_BINARY_LE
|| type == NODE_TYPE_OPERATOR_BINARY_LT
|| type == NODE_TYPE_OPERATOR_BINARY_GE
|| type == NODE_TYPE_OPERATOR_BINARY_GT) {
auto lhs = node->getMember(0);
auto rhs = node->getMember(1);
if (hasRedundantConditionWalker(rhs)
&& ! hasRedundantConditionWalker(lhs)
&& lhs->isConstant()) {
if (! isComparisonSet) {
comparison = Ast::ReverseOperator(type);
bestValue = lhs;
isComparisonSet = true;
return true;
}
int lowhigh = isCompatibleBound(Ast::ReverseOperator(type), lhs);
if (lowhigh == 0) {
return false;
}
if (compareBounds(type, lhs, lowhigh)) {
comparison = Ast::ReverseOperator(type);
bestValue = lhs;
}
return true;
}
if (hasRedundantConditionWalker(lhs)
&& ! hasRedundantConditionWalker(rhs)
&& rhs->isConstant()) {
if (! isComparisonSet) {
comparison = type;
bestValue = rhs;
isComparisonSet = true;
return true;
}
int lowhigh = isCompatibleBound(type, rhs);
if (lowhigh == 0) {
return false;
}
if (compareBounds(type, rhs, lowhigh)) {
comparison = type;
bestValue = rhs;
}
return true;
}
// if hasRedundantConditionWalker(lhs) and
// hasRedundantConditionWalker(rhs), then one of the conditions in the OR
// statement is of the form x == x fall-through intentional
}
else if (type == NODE_TYPE_REFERENCE ||
type == NODE_TYPE_ATTRIBUTE_ACCESS ||
type == NODE_TYPE_INDEXED_ACCESS) {
// get a string representation of the node for comparisons
return (node->toString() == commonName);
}
return false;
}
};
void triagens::aql::removeRedundantOrRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::FILTER, true));
bool modified = false;
for (auto const& n : nodes) {
TRI_ASSERT(n->hasDependency());
auto const dep = n->getFirstDependency();
if (dep->getType() != EN::CALCULATION) {
continue;
}
auto fn = static_cast<FilterNode*>(n);
auto inVar = fn->getVariablesUsedHere();
auto cn = static_cast<CalculationNode*>(dep);
auto outVar = cn->getVariablesSetHere();
if (outVar.size() != 1 || outVar[0]->id != inVar[0]->id) {
continue;
}
if (cn->expression()->node()->type != NODE_TYPE_OPERATOR_BINARY_OR) {
continue;
}
RemoveRedundantOr remover;
if (remover.hasRedundantCondition(cn->expression()->node())) {
Expression* expr = nullptr;
ExecutionNode* newNode = nullptr;
auto astNode = remover.createReplacementNode(plan->getAst());
expr = new Expression(plan->getAst(), astNode);
try {
newNode = new CalculationNode(plan, plan->nextId(), expr, outVar[0]);
}
catch (...) {
delete expr;
throw;
}
plan->registerNode(newNode);
plan->replaceNode(cn, newNode);
modified = true;
}
}
opt->addPlan(plan, rule, modified);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief remove $OLD and $NEW variables from data-modification statements
/// if not required
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::removeDataModificationOutVariablesRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
bool modified = false;
std::vector<ExecutionNode::NodeType> const types = {
EN::REMOVE,
EN::INSERT,
EN::UPDATE,
EN::REPLACE,
EN::UPSERT
};
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(types, true));
for (auto const& n : nodes) {
auto node = static_cast<ModificationNode*>(n);
TRI_ASSERT(node != nullptr);
auto varsUsedLater = n->getVarsUsedLater();
if (varsUsedLater.find(node->getOutVariableOld()) == varsUsedLater.end()) {
// "$OLD" is not used later
node->clearOutVariableOld();
modified = true;
}
if (varsUsedLater.find(node->getOutVariableNew()) == varsUsedLater.end()) {
// "$NEW" is not used later
node->clearOutVariableNew();
modified = true;
}
}
opt->addPlan(plan, rule, modified);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief patch UPDATE statement on single collection that iterates over the
/// entire collection to operate in batches
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::patchUpdateStatementsRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
bool modified = false;
// not need to dive into subqueries here, as UPDATE needs to be on the top level
std::vector<ExecutionNode*> nodes(plan->findNodesOfType(EN::UPDATE, false));
for (auto const& n : nodes) {
// we should only get through here a single time
auto node = static_cast<ModificationNode*>(n);
TRI_ASSERT(node != nullptr);
auto& options = node->getOptions();
if (! options.readCompleteInput) {
// already ok
continue;
}
auto const collection = node->collection();
auto dep = n->getFirstDependency();
while (dep != nullptr) {
auto const type = dep->getType();
if (type == EN::ENUMERATE_LIST ||
type == EN::INDEX ||
type == EN::SUBQUERY) {
// not suitable
modified = false;
break;
}
if (type == EN::ENUMERATE_COLLECTION) {
auto collectionNode = static_cast<EnumerateCollectionNode const*>(dep);
if (collectionNode->collection() != collection) {
// different collection, not suitable
modified = false;
break;
}
else {
modified = true;
}
}
if (type == EN::TRAVERSAL) {
// unclear what will be read by the traversal
modified = false;
break;
}
dep = dep->getFirstDependency();
}
if (modified) {
options.readCompleteInput = false;
}
}
// always re-add the original plan, be it modified or not
// only a flag in the plan will be modified
opt->addPlan(plan, rule, modified);
}
////////////////////////////////////////////////////////////////////////////////
/// @brief merges filter nodes into graph traversal nodes
////////////////////////////////////////////////////////////////////////////////
void triagens::aql::mergeFilterIntoTraversalRule (Optimizer* opt,
ExecutionPlan* plan,
Optimizer::Rule const* rule) {
std::vector<ExecutionNode*> tNodes(plan->findNodesOfType(EN::TRAVERSAL, true));
if (tNodes.empty()) {
opt->addPlan(plan, rule, false);
return;
}
// These are all the end nodes where we start
std::vector<ExecutionNode*> nodes(plan->findEndNodes(true));
bool planAltered = false;
for (auto const& n : nodes) {
TraversalConditionFinder finder(plan, &planAltered);
n->walk(&finder);
}
opt->addPlan(plan, rule, planAltered);
}
// Local Variables:
// mode: outline-minor
// outline-regexp: "^\\(/// @brief\\|/// {@inheritDoc}\\|/// @addtogroup\\|// --SECTION--\\|/// @\\}\\)"
// End:
| thurt/arangodb | arangod/Aql/OptimizerRules.cpp | C++ | apache-2.0 | 125,311 |
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.codec.base64;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandler.Sharable;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPipeline;
import io.netty.handler.codec.ByteToMessageDecoder;
import io.netty.handler.codec.DelimiterBasedFrameDecoder;
import io.netty.handler.codec.Delimiters;
import io.netty.handler.codec.MessageToMessageDecoder;
import io.netty.util.internal.ObjectUtil;
import java.util.List;
/**
* Decodes a Base64-encoded {@link ByteBuf} or US-ASCII {@link String}
* into a {@link ByteBuf}. Please note that this decoder must be used
* with a proper {@link ByteToMessageDecoder} such as {@link DelimiterBasedFrameDecoder}
* if you are using a stream-based transport such as TCP/IP. A typical decoder
* setup for TCP/IP would be:
* <pre>
* {@link ChannelPipeline} pipeline = ...;
*
* // Decoders
* pipeline.addLast("frameDecoder", new {@link DelimiterBasedFrameDecoder}(80, {@link Delimiters#nulDelimiter()}));
* pipeline.addLast("base64Decoder", new {@link Base64Decoder}());
*
* // Encoder
* pipeline.addLast("base64Encoder", new {@link Base64Encoder}());
* </pre>
*/
@Sharable
public class Base64Decoder extends MessageToMessageDecoder<ByteBuf> {
private final Base64Dialect dialect;
public Base64Decoder() {
this(Base64Dialect.STANDARD);
}
public Base64Decoder(Base64Dialect dialect) {
this.dialect = ObjectUtil.checkNotNull(dialect, "dialect");
}
@Override
protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out) throws Exception {
out.add(Base64.decode(msg, msg.readerIndex(), msg.readableBytes(), dialect));
}
}
| fenik17/netty | codec/src/main/java/io/netty/handler/codec/base64/Base64Decoder.java | Java | apache-2.0 | 2,347 |
<?php @ob_start();session_start();require "config/config.php";?>
<?php
date_default_timezone_set('America/Manaus');
$id = isset($_GET['id'])?$_GET['id']:"";
$prof = new \Frequencia\Models\Professor;
$reg = new \Frequencia\Models\Registro_Academico;
$professor = $prof->findByType('idProfessor',$id);
$prof_RA = $professor->RA;
$reg_Prof = $reg->findByType('idRegistro',$professor->RA);
$registro = $reg->lastId('idRegistro');
$RA = $registro->RA+1;
?>
<!DOCTYPE html>
<html lang="pt-br">
<head>
<?php header('Content-type: text/html; charset=ISO-8859-1');?>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>BFE | Cadastro</title>
<script src="js/jquery.js"></script>
<link href="vendor/twbs/bootstrap/dist/css/bootstrap.min.css" rel="stylesheet">
<link href="css/default.css" rel="stylesheet">
<link href="css/forms.css" rel="stylesheet">
<link href="css/datepicker.css" rel="stylesheet">
<script src="js/html5shiv.min.js"></script>
<script src="js/respond.min.js"></script>
<script src="js/back_button.js"></script>
<script src="js/bootstrap-datepicker.js"></script>
<script src="js/datepicker-pt-BR.js"></script>
<script>
$(function(){
$("#date_nascimento").datepicker({
format: 'dd/mm/yyyy',
language:"pt-BR"
});
});
$(function(){
$("#date_admissao").datepicker({
format: 'dd/mm/yyyy',
language:"pt-BR"
});
});
$(function(){
$("#date_demissao").datepicker({
format: 'dd/mm/yyyy',
language:"pt-BR"
});
});
</script>
<link href="css/modal.css" rel="stylesheet"/>
<link href="css/inputs.css" rel="stylesheet"/>
</head>
<body>
<div class="container-fluid display-table">
<div class="row display-table-row">
<!-- My SideBar-->
<div class="col-md-2 col-sm-1 hidden-xs display-table-cell valign-top" id="side-menu">
<h1 class="hidden-xs hidden-sm">BFE</h1>
<ul>
<li class="link active">
<a href="frmPrincipal.php" >
<span class="glyphicon glyphicon-th" aria-hidden="true"></span>
<span class="hidden-sm hidden-xs">Principal</span>
</a>
</li>
<li class="link">
<a href="#collapse-aluno" data-toggle="collapse" aria-controls="collapse-post">
<span class="glyphicon glyphicon-user"></span>
<span class="hidden-sm hidden-xs">Alunos</span>
<span class="pull-right glyphicon glyphicon-menu-down"></span>
</a>
<ul class="collapse collapseable" id="collapse-aluno">
<li><a href="frmCadAluno.php">Cadastrar</a></li>
<li><a href="frmFindAluno.php">Alterar</a></li>
<li><a href="frmFindAluno.php">Pesquisar</a></li>
</ul>
</li>
<li class="link">
<a href="#collapse-professores" data-toggle="collapse" aria-controls="collapse-post">
<span class="glyphicon glyphicon-education"></span>
<span class="hidden-sm hidden-xs">Professores</span>
<span class="pull-right glyphicon glyphicon-menu-down"></span>
</a>
<ul class="collapse collapseable" id="collapse-professores">
<li><a href="frmCadProfessor.php">Cadastrar</a></li>
<li><a href="frmFindProfessor.php">Alterar</a></li>
<li><a href="frmFindProfessor.php">Pesquisar</a></li>
</ul>
</li>
<li class="link">
<a href="#collapse-materias" data-toggle="collapse" aria-controls="collapse-post">
<span class="glyphicon glyphicon-blackboard"></span>
<span class="hidden-sm hidden-xs">Matérias</span>
<span class="pull-right glyphicon glyphicon-menu-down"></span>
</a>
<ul class="collapse collapseable" id="collapse-materias">
<li><a href="frmCadMateria.php">Cadastrar</a></li>
<li><a href="frmFindMateria.php">Alterar</a></li>
<li><a href="frmFindMateria.php">Pesquisar</a></li>
</ul>
</li>
<li class="link">
<a href="#collapse-turmas" data-toggle="collapse" aria-controls="collapse-post">
<span class="glyphicon glyphicon-file"></span>
<span class="hidden-sm hidden-xs">Turmas</span>
<span class="pull-right glyphicon glyphicon-menu-down"></span>
</a>
<ul class="collapse collapseable" id="collapse-turmas">
<li><a href="frmCadTurma.php">Cadastrar</a></li>
<li><a href="frmFindTurma.php">Alterar</a></li>
<li><a href="frmFindTurma.php">Pesquisar</a></li>
</ul>
</li>
<!--
<li class="link">
<a href="#collapse-frequencia" data-toggle="collapse" aria-controls="collapse-post">
<span class="glyphicon glyphicon-list-alt"></span>
<span class="hidden-sm hidden-xs">Frequ?ncia</span>
<span class="pull-right glyphicon glyphicon-menu-down"></span>
</a>
<ul class="collapse collapseable" id="collapse-frequencia">
<li><a href="frmCadFrequencia.php">Cadastrar</a></li>
<li><a href="frmFindFrequencia.php">Alterar</a></li>
<li><a href="frmFindFrequencia.php">Pesquisar</a></li>
</ul>
</li>
<li class="link">
<a href="#collapse-report" data-toggle="collapse" aria-controls="collapse-post">
<span class="glyphicon glyphicon-list"></span>
<span class="hidden-sm hidden-xs">Relat?rios</span>
<span class="pull-right glyphicon glyphicon-menu-down"></span>
</a>
<ul class="collapse collapseable" id="collapse-report">
<li><a href="">Alunos</a></li>
<li><a href="">Professores</a></li>
<li><a href="">Materias</a></li>
<li><a href="">Turmas</a></li>
</ul>
</li>
-->
</ul>
</div>
<!-- My Content Area-->
<div class="col-md-10 col-sm-11 display-table-cell valign-top box">
<div class="row">
<header id="nav-header" class="clearfix">
<div class="col-md-5">
<nav class="navbar-default pull-left">
<button type="button" class="navbar-toggle collapsed " data-toggle="offcanvas" data-target="#side-menu" aria-expanded="false">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
</nav>
</div>
<div class="col-md-7">
<ul class="pull-left">
<li id="welcome" class="hidden-sm hidden-xs pull-left">Seja Bem Vindo <b><?php echo $_SESSION['Usuario'];?></b></li>
</ul>
<ul class="pull-right">
<li>
<a href="logout.php" class="logout">
<span class="glyphicon glyphicon-log-out" aria-hidden="true"></span>
Logout
</a>
</li>
</ul>
</div>
</header>
</div>
<div id="content">
<header>
<h2 class="page_title">Atualizar Professor</h2>
</header>
<div class="content-inner">
<div class="form form-wrapper">
<form class="form form-horizontal" method="post" name="form">
<div class="form-group col-xs-2 col-md-2" style="margin-right: 300px">
<label for="ra_aluno" class="label label-message">Registro Academico</label>
<?php
if(empty($professor->RA))
echo '<input type="text" class="form-control" id="ra_aluno" required name="ra" placeholder="Registro Academico">';
else
echo '<input type="text" class="form-control" id="ra_aluno" required name="ra" value="'.$reg_Prof->RA.'">';
?>
</div>
<div class="form-group col-xs-8 col-md-8" style="margin-right: 200px">
<label for="nome_aluno" class="label label-message">Nome Completo</label>
<?php
if(empty($professor->Nome))
echo '<input type="text" class="form-control" id="nome_aluno" required name="nome" placeholder="Nome Completo">';
else
echo '<input type="text" class="form-control" id="nome_aluno" required name="nome" value="'.$professor->Nome.'">';
?>
</div>
<div class="form-group col-xs-8 col-md-8" style="margin-right: 200px">
<label for="nome_mae" class="label label-message">Nome da Mãe</label>
<?php
if(empty($professor->Nome_Mae))
echo '<input type="text" class="form-control" id="nome_mae" required name="nome_mae" placeholder="Nome Completo">';
else
echo '<input type="text" class="form-control" id="nome_mae" required name="nome_mae" value="'.$professor->Nome_Mae.'">';
?>
</div>
<div class="form-group col-xs-3 col-xs-3" >
<label for="date" class="label label-message">Data de Nascimento</label>
<?php
if(empty($professor->Data_Nascimento))
echo '<input type="text" class="form-control" id="date_nascimento" maxlength="10" OnKeyPress="formatar('.'##/##/####'.', this)" name="data_nascimento" placeholder="dd/mm/aaaa">';
else
echo '<input type="text" class="form-control" id="date_nascimento" maxlength="10" OnKeyPress="formatar('.'##/##/####'.', this)" name="data_nascimento"
value="'.(isset($professor->Data_Nascimento)?date('d/m/Y',strtotime($professor->Data_Nascimento)):"").'">';
?>
</div>
<div class="form-group col-xs-2 col-md-2" style="margin-right: 30px;margin-left: 30px" >
<label for="rg" class="label label-message">RG</label>
<?php
if (empty($professor->RG))
echo '<input type="text" class="form-control" name="rg" placeholder="Somente Numeros">';
else
echo '<input type="text" class="form-control" name="rg" value="'.$professor->RG.'">';
?>
</div>
<div class="form-group col-xs-4 col-md-4" >
<label class="label label-message">CPF</label>
<?php
if(empty($professor->CPF))
echo '<input type="text" class="form-control" id="cpf" maxlength="13" name="cpf" placeholder="somente numeros">';
else
echo '<input type="text" class="form-control" id="cpf" maxlength="13" name="cpf" value="'.$professor->CPF.'">';
?>
</div>
<br/>
<div class="form-group col-xs-8 col-md-8" style="margin-right: 200px">
<label for="nome_aluno" class="label label-message">Endereço</label>
<?php
if(empty($professor->Endereco))
echo '<input type="text" class="form-control" id="nome_aluno" required name="endereco" placeholder="Endereço Completo">';
else
echo '<input type="text" class="form-control" id="nome_aluno" required name="endereco" value="'.$professor->Endereco.'">';
?>
</div>
<div class="form-group col-xs-3 col-xs-3" style="margin-right: 30px">
<label for="date" class="label label-message">Data de Admiss?o</label>
<?php
if(empty($professor->Data_Admissao))
echo '<input type="text" class="form-control" id="date_admissao" maxlength="10" OnKeyPress="formatar('.'##/##/####'.', this)" name="data_admissao" placeholder="dd/mm/aaaa">';
else
echo '<input type="text" class="form-control" id="date_admissao" maxlength="10" OnKeyPress="formatar('.'##/##/####'.', this)" name="data_admissao"
value="'.(isset($professor->Data_Admissao)?date('d/m/Y',strtotime($professor->Data_Admissao)):"").'">';
?>
</div>
<div class="form-group col-xs-3 col-xs-3" style="margin-right: 30px">
<label for="date" class="label label-message">Data de Demissão</label>
<?php
if($professor->Data_Demissao == '0000-00-00')
echo '<input type="text" class="form-control" id="date_demissao" maxlength="10" OnKeyPress="formatar('.'##/##/####'.', this)" name="data_demissao" placeholder="dd/mm/aaaa">';
else
echo '<input type="text" class="form-control" id="date_demissao" maxlength="10" OnKeyPress="formatar('.'##/##/####'.', this)" name="data_demissao"
value="'.(isset($professor->Data_Demissao)?date('d/m/Y',strtotime($professor->Data_Demissao)):"").'">';
?>
</div>
<div class="form-group col-md-3 col-xs-3" >
<label class="label label-message">Telefone</label>
<?php
if (empty($professor->Telefone))
echo '<input type="text" class="form-control" id="title" name="telefone" placeholder="Telefone de contato">';
else
echo '<input type="text" class="form-control" id="title" name="telefone" value="'.$professor->Telefone.'">';
?>
</div>
<div class="form-group col-md-10 col-xs-10" >
<label class="label label-message">E-mail</label>
<?php
if(empty($professor->Email))
echo '<input type="text" class="form-control" id="title" name="email" placeholder="yourEmail@me.com">';
else
echo '<input type="text" class="form-control" id="title" name="email" value="'.$professor->Email.'">';
?>
</div>
<br/>
<div id="date"></div>
<div class="clearfix">
<button type="submit" style="margin-top: 20px" class="btn btn-primary pull-right"> <span class="glyphicon glyphicon-refresh"></span> Atualizar</button>
</div>
<input type="hidden" name="update">
</form>
</div>
</div>
</div>
<div class="row">
<footer id="admin-footer" class="clearfix">
<div class="pull-left"><b>Copyright </b>© 2016</div>
<div class="pull-right">OpenSource</div>
</footer>
</div>
</div>
</div>
</div>
<script src="vendor/twbs/bootstrap/dist/js/bootstrap.min.js"></script>
<script src="js/default.js"></script>
</body>
</html>
<?php
if(isset($_POST['update']))
{
$data_n = $_POST['data_nascimento'];
$date = str_replace('/', '-', $data_n);
$data_nascimento = date('Y-m-d', strtotime($date));
$data = $_POST['data_admissao'];
$date = str_replace('/', '-', $data);
$data_admissao = date('Y-m-d', strtotime($date));
if($_POST['data_demissao'] == null){
$data_demissao = '0000-00-00';
$situacao = 1;
}else{
$data = $_POST['data_demissao'];
$date = str_replace('/', '-', $data);
$data_demissao = date('Y-m-d', strtotime($date));
$situacao = 0;
}
$prof->update($id,
[
'RA' => $prof_RA,
'Nome' => $_POST['nome'],
'Data_Nascimento' => $data_nascimento,
'Data_Admissao' => $data_admissao,
'Data_Demissao' => $data_demissao,
'Nome_Mae' => $_POST['nome_mae'],
'RG' => $_POST['rg'],
'CPF' => $_POST['cpf'],
'Endereco' => $_POST['endereco'],
'Telefone' => $_POST['telefone'],
'Email' => $_POST['email'],
'Situacao' => $situacao
],'idProfessor');
echo '
<script>
$(document).ready(function(){
$("#messagemSucess").modal("show");
});
</script>
<div class="modal fade" id="messagemSucess" tabindex="-1" role="dialog">
<div class="modal-dialog">
<div class="modal-content modal-window">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">×</span></button>
<h4 class="modal-title">Mensagem BFE</h4>
</div>
<div class="modal-body">
<p>Dados do Professor <b><i>'.$_POST['nome'].'</i></b> atualizado com sucesso</p>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-default" data-dismiss="modal" id="button_update_professor">Fechar</button>
</div>
</div><!-- /.modal-content -->
</div><!-- /.modal-dialog -->
</div><!-- /.modal -->
';
}
?> | ElissonAlvesSilva/OpenSource | OpenSource/frmUpdateProfessor.php | PHP | apache-2.0 | 20,930 |
var
util = require('util'),
querystring = require('querystring'),
request = require('request');
function FacebookProvider(client_id, client_secret, redirect_uri) {
this.client_id = client_id;
this.client_secret = client_secret;
this.redirect_uri = redirect_uri;
}
FacebookProvider.prototype.getAuthenticateURL = function (options) {
return util.format('https://www.facebook.com/dialog/oauth?client_id=%s&response_type=%s&state=%s&redirect_uri=%s',
(options && options.client_id) || this.client_id,
'code',
String(Math.random() * 100000000),
encodeURIComponent((options && options.redirect_uri) || this.redirect_uri));
};
FacebookProvider.prototype.getAuthentication = function (options, callback) {
var
that = this,
qs = {
client_id: this.client_id,
client_secret: this.client_secret,
grant_type: 'authorization_code',
redirect_uri: options.redirect_uri || this.redirect_uri,
code: options.code
};
request({
method: 'GET',
uri: 'https://graph.facebook.com/oauth/access_token',
qs: qs,
timeout: 5000 // 5 seconds
}, function (err, res, body) {
if (err) {
return callback(err);
}
if (res.statusCode !== 200) {
return callback(new Error('Bad response code: ' + res.statusCode));
}
console.log('>>> ' + body);
var r = querystring.parse(body);
// get id & profile:
that.requestAPI('GET', 'me', r.access_token, null, function (err, p) {
if (err) {
return callback(err);
}
callback(null, {
access_token: r.access_token,
refresh_token: '',
expires_in: parseInt(r.expires, 10),
auth_id: p.id,
name: p.name,
url: p.link,
image_url: ''
});
});
});
};
FacebookProvider.prototype.requestAPI = function (method, apiName, access_token, options, callback) {
options = options || {};
options.access_token = access_token;
var opts = {
method: method,
uri: 'https://graph.facebook.com/' + apiName,
timeout: 5000
};
if (method === 'GET') {
opts.qs = options;
}
if (method === 'POST') {
opts.form = options;
}
request(opts, function (err, res, body) {
if (err) {
return callback(err);
}
if (res.statusCode !== 200) {
return callback(new Error('Bad response code: ' + res.statusCode));
}
var r;
try {
r = JSON.parse(body);
} catch (e) {
return callback(e);
}
if (r.error) {
return callback(new Error(r.error.message));
}
callback(null, r);
});
};
module.exports = FacebookProvider;
| michaelliao/oauth2-warp | providers/facebook.js | JavaScript | apache-2.0 | 2,957 |
// min example
#include <cassert>
#include <iostream>
#include <algorithm>
using namespace std;
int main () {
cout << "min(1,2)==" << min(1,2) << endl;
assert(min(1,2) == 1);
cout << "min(2,1)==" << min(2,1) << endl;
cout << "min('a','z')==" << min('a','z') << endl;
assert(min('a','z') != 'a');
cout << "min(3.14,2.72)==" << min(3.14,2.72) << endl;
assert(min(3.14,2.72) == 2.72);
return 0;
}
| ssvlab/esbmc-gpu | regression/esbmc-cpp/algorithm/algorithm118/main.cpp | C++ | apache-2.0 | 411 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.ComponentModel.DataAnnotations;
using ModelUNRegister.Utilities;
using System.Reflection;
namespace ModelUNRegister.Models
{
public enum BootstrapTheme
{
[Display(Name = "default")]
Default,
[Display(Name = "primary")]
Primary,
[Display(Name = "success")]
Success,
[Display(Name = "info")]
Inforamtion,
[Display(Name = "warning")]
Warning,
[Display(Name = "danger")]
Danger
}
public class MessageViewModel
{
public string Title { get; set; }
public string Message { get; set; }
public BootstrapTheme Theme { get; set; }
public string ThemeString
{
get
{
return Theme.GetDisplayName();
}
}
}
} | t123yh/ModelUNRegister | ModelUNRegister/Models/MessageViewModel.cs | C# | apache-2.0 | 919 |
//===----------------------------------------------------------------------===//
//
// Peloton
//
// parameter_value_expression.cpp
//
// Identification: src/backend/expression/parameter_value_expression.cpp
//
// Copyright (c) 2015-16, Carnegie Mellon University Database Group
//
//===----------------------------------------------------------------------===//
#include "backend/common/logger.h"
#include "backend/expression/parameter_value_expression.h"
#include "backend/executor/executor_context.h"
namespace peloton {
namespace expression {
ParameterValueExpression::ParameterValueExpression(ValueType type,
int value_idx)
: AbstractExpression(EXPRESSION_TYPE_VALUE_PARAMETER, type),
value_idx_(value_idx),
param_value_() {
LOG_TRACE("ParameterValueExpression %d", value_idx);
};
ParameterValueExpression::ParameterValueExpression(oid_t value_idx,
Value param_value)
: AbstractExpression(EXPRESSION_TYPE_VALUE_PARAMETER,
param_value.GetValueType()),
value_idx_(value_idx),
param_value_(param_value) {}
Value ParameterValueExpression::Evaluate(
__attribute__((unused)) const AbstractTuple *tuple1,
__attribute__((unused)) const AbstractTuple *tuple2,
executor::ExecutorContext *context) const {
auto& params = context->GetParams();
assert(value_idx_ < params.size());
return params[value_idx_];
}
} // namespace expression
} // namespace peloton
| larryxiao/peloton | src/backend/expression/parameter_value_expression.cpp | C++ | apache-2.0 | 1,562 |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.longrunning;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* Manages long-running operations with an API service.
* When an API method normally takes long time to complete, it can be designed
* to return [Operation][google.longrunning.Operation] to the client, and the client can use this
* interface to receive the real response asynchronously by polling the
* operation resource, or pass the operation resource to another API (such as
* Google Cloud Pub/Sub API) to receive the response. Any API service that
* returns long-running operations should implement the `Operations` interface
* so developers can have a consistent client experience.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/longrunning/operations.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class OperationsGrpc {
private OperationsGrpc() {}
public static final String SERVICE_NAME = "google.longrunning.Operations";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.longrunning.ListOperationsRequest,
com.google.longrunning.ListOperationsResponse>
getListOperationsMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "ListOperations",
requestType = com.google.longrunning.ListOperationsRequest.class,
responseType = com.google.longrunning.ListOperationsResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.longrunning.ListOperationsRequest,
com.google.longrunning.ListOperationsResponse>
getListOperationsMethod() {
io.grpc.MethodDescriptor<
com.google.longrunning.ListOperationsRequest,
com.google.longrunning.ListOperationsResponse>
getListOperationsMethod;
if ((getListOperationsMethod = OperationsGrpc.getListOperationsMethod) == null) {
synchronized (OperationsGrpc.class) {
if ((getListOperationsMethod = OperationsGrpc.getListOperationsMethod) == null) {
OperationsGrpc.getListOperationsMethod =
getListOperationsMethod =
io.grpc.MethodDescriptor
.<com.google.longrunning.ListOperationsRequest,
com.google.longrunning.ListOperationsResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListOperations"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.ListOperationsRequest.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.ListOperationsResponse.getDefaultInstance()))
.setSchemaDescriptor(new OperationsMethodDescriptorSupplier("ListOperations"))
.build();
}
}
}
return getListOperationsMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.longrunning.GetOperationRequest, com.google.longrunning.Operation>
getGetOperationMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GetOperation",
requestType = com.google.longrunning.GetOperationRequest.class,
responseType = com.google.longrunning.Operation.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.longrunning.GetOperationRequest, com.google.longrunning.Operation>
getGetOperationMethod() {
io.grpc.MethodDescriptor<
com.google.longrunning.GetOperationRequest, com.google.longrunning.Operation>
getGetOperationMethod;
if ((getGetOperationMethod = OperationsGrpc.getGetOperationMethod) == null) {
synchronized (OperationsGrpc.class) {
if ((getGetOperationMethod = OperationsGrpc.getGetOperationMethod) == null) {
OperationsGrpc.getGetOperationMethod =
getGetOperationMethod =
io.grpc.MethodDescriptor
.<com.google.longrunning.GetOperationRequest,
com.google.longrunning.Operation>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetOperation"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.GetOperationRequest.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.Operation.getDefaultInstance()))
.setSchemaDescriptor(new OperationsMethodDescriptorSupplier("GetOperation"))
.build();
}
}
}
return getGetOperationMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.longrunning.DeleteOperationRequest, com.google.protobuf.Empty>
getDeleteOperationMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "DeleteOperation",
requestType = com.google.longrunning.DeleteOperationRequest.class,
responseType = com.google.protobuf.Empty.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.longrunning.DeleteOperationRequest, com.google.protobuf.Empty>
getDeleteOperationMethod() {
io.grpc.MethodDescriptor<
com.google.longrunning.DeleteOperationRequest, com.google.protobuf.Empty>
getDeleteOperationMethod;
if ((getDeleteOperationMethod = OperationsGrpc.getDeleteOperationMethod) == null) {
synchronized (OperationsGrpc.class) {
if ((getDeleteOperationMethod = OperationsGrpc.getDeleteOperationMethod) == null) {
OperationsGrpc.getDeleteOperationMethod =
getDeleteOperationMethod =
io.grpc.MethodDescriptor
.<com.google.longrunning.DeleteOperationRequest, com.google.protobuf.Empty>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteOperation"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.DeleteOperationRequest.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.protobuf.Empty.getDefaultInstance()))
.setSchemaDescriptor(
new OperationsMethodDescriptorSupplier("DeleteOperation"))
.build();
}
}
}
return getDeleteOperationMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.longrunning.CancelOperationRequest, com.google.protobuf.Empty>
getCancelOperationMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "CancelOperation",
requestType = com.google.longrunning.CancelOperationRequest.class,
responseType = com.google.protobuf.Empty.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.longrunning.CancelOperationRequest, com.google.protobuf.Empty>
getCancelOperationMethod() {
io.grpc.MethodDescriptor<
com.google.longrunning.CancelOperationRequest, com.google.protobuf.Empty>
getCancelOperationMethod;
if ((getCancelOperationMethod = OperationsGrpc.getCancelOperationMethod) == null) {
synchronized (OperationsGrpc.class) {
if ((getCancelOperationMethod = OperationsGrpc.getCancelOperationMethod) == null) {
OperationsGrpc.getCancelOperationMethod =
getCancelOperationMethod =
io.grpc.MethodDescriptor
.<com.google.longrunning.CancelOperationRequest, com.google.protobuf.Empty>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "CancelOperation"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.CancelOperationRequest.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.protobuf.Empty.getDefaultInstance()))
.setSchemaDescriptor(
new OperationsMethodDescriptorSupplier("CancelOperation"))
.build();
}
}
}
return getCancelOperationMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.longrunning.WaitOperationRequest, com.google.longrunning.Operation>
getWaitOperationMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "WaitOperation",
requestType = com.google.longrunning.WaitOperationRequest.class,
responseType = com.google.longrunning.Operation.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.longrunning.WaitOperationRequest, com.google.longrunning.Operation>
getWaitOperationMethod() {
io.grpc.MethodDescriptor<
com.google.longrunning.WaitOperationRequest, com.google.longrunning.Operation>
getWaitOperationMethod;
if ((getWaitOperationMethod = OperationsGrpc.getWaitOperationMethod) == null) {
synchronized (OperationsGrpc.class) {
if ((getWaitOperationMethod = OperationsGrpc.getWaitOperationMethod) == null) {
OperationsGrpc.getWaitOperationMethod =
getWaitOperationMethod =
io.grpc.MethodDescriptor
.<com.google.longrunning.WaitOperationRequest,
com.google.longrunning.Operation>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "WaitOperation"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.WaitOperationRequest.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.Operation.getDefaultInstance()))
.setSchemaDescriptor(new OperationsMethodDescriptorSupplier("WaitOperation"))
.build();
}
}
}
return getWaitOperationMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static OperationsStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<OperationsStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<OperationsStub>() {
@java.lang.Override
public OperationsStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new OperationsStub(channel, callOptions);
}
};
return OperationsStub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static OperationsBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<OperationsBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<OperationsBlockingStub>() {
@java.lang.Override
public OperationsBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new OperationsBlockingStub(channel, callOptions);
}
};
return OperationsBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static OperationsFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<OperationsFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<OperationsFutureStub>() {
@java.lang.Override
public OperationsFutureStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new OperationsFutureStub(channel, callOptions);
}
};
return OperationsFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* Manages long-running operations with an API service.
* When an API method normally takes long time to complete, it can be designed
* to return [Operation][google.longrunning.Operation] to the client, and the client can use this
* interface to receive the real response asynchronously by polling the
* operation resource, or pass the operation resource to another API (such as
* Google Cloud Pub/Sub API) to receive the response. Any API service that
* returns long-running operations should implement the `Operations` interface
* so developers can have a consistent client experience.
* </pre>
*/
public abstract static class OperationsImplBase implements io.grpc.BindableService {
/**
*
*
* <pre>
* Lists operations that match the specified filter in the request. If the
* server doesn't support this method, it returns `UNIMPLEMENTED`.
* NOTE: the `name` binding allows API services to override the binding
* to use different resource name schemes, such as `users/*/operations`. To
* override the binding, API services can add a binding such as
* `"/v1/{name=users/*}/operations"` to their service configuration.
* For backwards compatibility, the default name includes the operations
* collection id, however overriding users must ensure the name binding
* is the parent resource, without the operations collection id.
* </pre>
*/
public void listOperations(
com.google.longrunning.ListOperationsRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.ListOperationsResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getListOperationsMethod(), responseObserver);
}
/**
*
*
* <pre>
* Gets the latest state of a long-running operation. Clients can use this
* method to poll the operation result at intervals as recommended by the API
* service.
* </pre>
*/
public void getOperation(
com.google.longrunning.GetOperationRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getGetOperationMethod(), responseObserver);
}
/**
*
*
* <pre>
* Deletes a long-running operation. This method indicates that the client is
* no longer interested in the operation result. It does not cancel the
* operation. If the server doesn't support this method, it returns
* `google.rpc.Code.UNIMPLEMENTED`.
* </pre>
*/
public void deleteOperation(
com.google.longrunning.DeleteOperationRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getDeleteOperationMethod(), responseObserver);
}
/**
*
*
* <pre>
* Starts asynchronous cancellation on a long-running operation. The server
* makes a best effort to cancel the operation, but success is not
* guaranteed. If the server doesn't support this method, it returns
* `google.rpc.Code.UNIMPLEMENTED`. Clients can use
* [Operations.GetOperation][google.longrunning.Operations.GetOperation] or
* other methods to check whether the cancellation succeeded or whether the
* operation completed despite cancellation. On successful cancellation,
* the operation is not deleted; instead, it becomes an operation with
* an [Operation.error][google.longrunning.Operation.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1,
* corresponding to `Code.CANCELLED`.
* </pre>
*/
public void cancelOperation(
com.google.longrunning.CancelOperationRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getCancelOperationMethod(), responseObserver);
}
/**
*
*
* <pre>
* Waits until the specified long-running operation is done or reaches at most
* a specified timeout, returning the latest state. If the operation is
* already done, the latest state is immediately returned. If the timeout
* specified is greater than the default HTTP/RPC timeout, the HTTP/RPC
* timeout is used. If the server does not support this method, it returns
* `google.rpc.Code.UNIMPLEMENTED`.
* Note that this method is on a best-effort basis. It may return the latest
* state before the specified timeout (including immediately), meaning even an
* immediate response is no guarantee that the operation is done.
* </pre>
*/
public void waitOperation(
com.google.longrunning.WaitOperationRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getWaitOperationMethod(), responseObserver);
}
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getListOperationsMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.longrunning.ListOperationsRequest,
com.google.longrunning.ListOperationsResponse>(
this, METHODID_LIST_OPERATIONS)))
.addMethod(
getGetOperationMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.longrunning.GetOperationRequest, com.google.longrunning.Operation>(
this, METHODID_GET_OPERATION)))
.addMethod(
getDeleteOperationMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.longrunning.DeleteOperationRequest, com.google.protobuf.Empty>(
this, METHODID_DELETE_OPERATION)))
.addMethod(
getCancelOperationMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.longrunning.CancelOperationRequest, com.google.protobuf.Empty>(
this, METHODID_CANCEL_OPERATION)))
.addMethod(
getWaitOperationMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.longrunning.WaitOperationRequest,
com.google.longrunning.Operation>(this, METHODID_WAIT_OPERATION)))
.build();
}
}
/**
*
*
* <pre>
* Manages long-running operations with an API service.
* When an API method normally takes long time to complete, it can be designed
* to return [Operation][google.longrunning.Operation] to the client, and the client can use this
* interface to receive the real response asynchronously by polling the
* operation resource, or pass the operation resource to another API (such as
* Google Cloud Pub/Sub API) to receive the response. Any API service that
* returns long-running operations should implement the `Operations` interface
* so developers can have a consistent client experience.
* </pre>
*/
public static final class OperationsStub extends io.grpc.stub.AbstractAsyncStub<OperationsStub> {
private OperationsStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected OperationsStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new OperationsStub(channel, callOptions);
}
/**
*
*
* <pre>
* Lists operations that match the specified filter in the request. If the
* server doesn't support this method, it returns `UNIMPLEMENTED`.
* NOTE: the `name` binding allows API services to override the binding
* to use different resource name schemes, such as `users/*/operations`. To
* override the binding, API services can add a binding such as
* `"/v1/{name=users/*}/operations"` to their service configuration.
* For backwards compatibility, the default name includes the operations
* collection id, however overriding users must ensure the name binding
* is the parent resource, without the operations collection id.
* </pre>
*/
public void listOperations(
com.google.longrunning.ListOperationsRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.ListOperationsResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getListOperationsMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Gets the latest state of a long-running operation. Clients can use this
* method to poll the operation result at intervals as recommended by the API
* service.
* </pre>
*/
public void getOperation(
com.google.longrunning.GetOperationRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGetOperationMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Deletes a long-running operation. This method indicates that the client is
* no longer interested in the operation result. It does not cancel the
* operation. If the server doesn't support this method, it returns
* `google.rpc.Code.UNIMPLEMENTED`.
* </pre>
*/
public void deleteOperation(
com.google.longrunning.DeleteOperationRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getDeleteOperationMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Starts asynchronous cancellation on a long-running operation. The server
* makes a best effort to cancel the operation, but success is not
* guaranteed. If the server doesn't support this method, it returns
* `google.rpc.Code.UNIMPLEMENTED`. Clients can use
* [Operations.GetOperation][google.longrunning.Operations.GetOperation] or
* other methods to check whether the cancellation succeeded or whether the
* operation completed despite cancellation. On successful cancellation,
* the operation is not deleted; instead, it becomes an operation with
* an [Operation.error][google.longrunning.Operation.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1,
* corresponding to `Code.CANCELLED`.
* </pre>
*/
public void cancelOperation(
com.google.longrunning.CancelOperationRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getCancelOperationMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Waits until the specified long-running operation is done or reaches at most
* a specified timeout, returning the latest state. If the operation is
* already done, the latest state is immediately returned. If the timeout
* specified is greater than the default HTTP/RPC timeout, the HTTP/RPC
* timeout is used. If the server does not support this method, it returns
* `google.rpc.Code.UNIMPLEMENTED`.
* Note that this method is on a best-effort basis. It may return the latest
* state before the specified timeout (including immediately), meaning even an
* immediate response is no guarantee that the operation is done.
* </pre>
*/
public void waitOperation(
com.google.longrunning.WaitOperationRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getWaitOperationMethod(), getCallOptions()),
request,
responseObserver);
}
}
/**
*
*
* <pre>
* Manages long-running operations with an API service.
* When an API method normally takes long time to complete, it can be designed
* to return [Operation][google.longrunning.Operation] to the client, and the client can use this
* interface to receive the real response asynchronously by polling the
* operation resource, or pass the operation resource to another API (such as
* Google Cloud Pub/Sub API) to receive the response. Any API service that
* returns long-running operations should implement the `Operations` interface
* so developers can have a consistent client experience.
* </pre>
*/
public static final class OperationsBlockingStub
extends io.grpc.stub.AbstractBlockingStub<OperationsBlockingStub> {
private OperationsBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected OperationsBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new OperationsBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Lists operations that match the specified filter in the request. If the
* server doesn't support this method, it returns `UNIMPLEMENTED`.
* NOTE: the `name` binding allows API services to override the binding
* to use different resource name schemes, such as `users/*/operations`. To
* override the binding, API services can add a binding such as
* `"/v1/{name=users/*}/operations"` to their service configuration.
* For backwards compatibility, the default name includes the operations
* collection id, however overriding users must ensure the name binding
* is the parent resource, without the operations collection id.
* </pre>
*/
public com.google.longrunning.ListOperationsResponse listOperations(
com.google.longrunning.ListOperationsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListOperationsMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Gets the latest state of a long-running operation. Clients can use this
* method to poll the operation result at intervals as recommended by the API
* service.
* </pre>
*/
public com.google.longrunning.Operation getOperation(
com.google.longrunning.GetOperationRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetOperationMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes a long-running operation. This method indicates that the client is
* no longer interested in the operation result. It does not cancel the
* operation. If the server doesn't support this method, it returns
* `google.rpc.Code.UNIMPLEMENTED`.
* </pre>
*/
public com.google.protobuf.Empty deleteOperation(
com.google.longrunning.DeleteOperationRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteOperationMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Starts asynchronous cancellation on a long-running operation. The server
* makes a best effort to cancel the operation, but success is not
* guaranteed. If the server doesn't support this method, it returns
* `google.rpc.Code.UNIMPLEMENTED`. Clients can use
* [Operations.GetOperation][google.longrunning.Operations.GetOperation] or
* other methods to check whether the cancellation succeeded or whether the
* operation completed despite cancellation. On successful cancellation,
* the operation is not deleted; instead, it becomes an operation with
* an [Operation.error][google.longrunning.Operation.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1,
* corresponding to `Code.CANCELLED`.
* </pre>
*/
public com.google.protobuf.Empty cancelOperation(
com.google.longrunning.CancelOperationRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCancelOperationMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Waits until the specified long-running operation is done or reaches at most
* a specified timeout, returning the latest state. If the operation is
* already done, the latest state is immediately returned. If the timeout
* specified is greater than the default HTTP/RPC timeout, the HTTP/RPC
* timeout is used. If the server does not support this method, it returns
* `google.rpc.Code.UNIMPLEMENTED`.
* Note that this method is on a best-effort basis. It may return the latest
* state before the specified timeout (including immediately), meaning even an
* immediate response is no guarantee that the operation is done.
* </pre>
*/
public com.google.longrunning.Operation waitOperation(
com.google.longrunning.WaitOperationRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getWaitOperationMethod(), getCallOptions(), request);
}
}
/**
*
*
* <pre>
* Manages long-running operations with an API service.
* When an API method normally takes long time to complete, it can be designed
* to return [Operation][google.longrunning.Operation] to the client, and the client can use this
* interface to receive the real response asynchronously by polling the
* operation resource, or pass the operation resource to another API (such as
* Google Cloud Pub/Sub API) to receive the response. Any API service that
* returns long-running operations should implement the `Operations` interface
* so developers can have a consistent client experience.
* </pre>
*/
public static final class OperationsFutureStub
extends io.grpc.stub.AbstractFutureStub<OperationsFutureStub> {
private OperationsFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected OperationsFutureStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new OperationsFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Lists operations that match the specified filter in the request. If the
* server doesn't support this method, it returns `UNIMPLEMENTED`.
* NOTE: the `name` binding allows API services to override the binding
* to use different resource name schemes, such as `users/*/operations`. To
* override the binding, API services can add a binding such as
* `"/v1/{name=users/*}/operations"` to their service configuration.
* For backwards compatibility, the default name includes the operations
* collection id, however overriding users must ensure the name binding
* is the parent resource, without the operations collection id.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.longrunning.ListOperationsResponse>
listOperations(com.google.longrunning.ListOperationsRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getListOperationsMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Gets the latest state of a long-running operation. Clients can use this
* method to poll the operation result at intervals as recommended by the API
* service.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation>
getOperation(com.google.longrunning.GetOperationRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGetOperationMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Deletes a long-running operation. This method indicates that the client is
* no longer interested in the operation result. It does not cancel the
* operation. If the server doesn't support this method, it returns
* `google.rpc.Code.UNIMPLEMENTED`.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty>
deleteOperation(com.google.longrunning.DeleteOperationRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getDeleteOperationMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Starts asynchronous cancellation on a long-running operation. The server
* makes a best effort to cancel the operation, but success is not
* guaranteed. If the server doesn't support this method, it returns
* `google.rpc.Code.UNIMPLEMENTED`. Clients can use
* [Operations.GetOperation][google.longrunning.Operations.GetOperation] or
* other methods to check whether the cancellation succeeded or whether the
* operation completed despite cancellation. On successful cancellation,
* the operation is not deleted; instead, it becomes an operation with
* an [Operation.error][google.longrunning.Operation.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1,
* corresponding to `Code.CANCELLED`.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty>
cancelOperation(com.google.longrunning.CancelOperationRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getCancelOperationMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Waits until the specified long-running operation is done or reaches at most
* a specified timeout, returning the latest state. If the operation is
* already done, the latest state is immediately returned. If the timeout
* specified is greater than the default HTTP/RPC timeout, the HTTP/RPC
* timeout is used. If the server does not support this method, it returns
* `google.rpc.Code.UNIMPLEMENTED`.
* Note that this method is on a best-effort basis. It may return the latest
* state before the specified timeout (including immediately), meaning even an
* immediate response is no guarantee that the operation is done.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation>
waitOperation(com.google.longrunning.WaitOperationRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getWaitOperationMethod(), getCallOptions()), request);
}
}
private static final int METHODID_LIST_OPERATIONS = 0;
private static final int METHODID_GET_OPERATION = 1;
private static final int METHODID_DELETE_OPERATION = 2;
private static final int METHODID_CANCEL_OPERATION = 3;
private static final int METHODID_WAIT_OPERATION = 4;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final OperationsImplBase serviceImpl;
private final int methodId;
MethodHandlers(OperationsImplBase serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_LIST_OPERATIONS:
serviceImpl.listOperations(
(com.google.longrunning.ListOperationsRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.ListOperationsResponse>)
responseObserver);
break;
case METHODID_GET_OPERATION:
serviceImpl.getOperation(
(com.google.longrunning.GetOperationRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver);
break;
case METHODID_DELETE_OPERATION:
serviceImpl.deleteOperation(
(com.google.longrunning.DeleteOperationRequest) request,
(io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver);
break;
case METHODID_CANCEL_OPERATION:
serviceImpl.cancelOperation(
(com.google.longrunning.CancelOperationRequest) request,
(io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver);
break;
case METHODID_WAIT_OPERATION:
serviceImpl.waitOperation(
(com.google.longrunning.WaitOperationRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
private abstract static class OperationsBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
OperationsBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.longrunning.OperationsProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("Operations");
}
}
private static final class OperationsFileDescriptorSupplier
extends OperationsBaseDescriptorSupplier {
OperationsFileDescriptorSupplier() {}
}
private static final class OperationsMethodDescriptorSupplier
extends OperationsBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final String methodName;
OperationsMethodDescriptorSupplier(String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (OperationsGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new OperationsFileDescriptorSupplier())
.addMethod(getListOperationsMethod())
.addMethod(getGetOperationMethod())
.addMethod(getDeleteOperationMethod())
.addMethod(getCancelOperationMethod())
.addMethod(getWaitOperationMethod())
.build();
}
}
}
return result;
}
}
| googleapis/java-common-protos | grpc-google-common-protos/src/main/java/com/google/longrunning/OperationsGrpc.java | Java | apache-2.0 | 42,722 |
# Copyright 2011 Tsutomu Uchino
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unohelper
from com.sun.star.frame import XController, XTitle, XDispatchProvider
from com.sun.star.lang import XServiceInfo
from com.sun.star.task import XStatusIndicatorSupplier
class MRIUIController(unohelper.Base,
XController, XTitle, XDispatchProvider,
XStatusIndicatorSupplier, XServiceInfo):
""" Provides controller which connects between frame and model. """
IMPLE_NAME = "mytools.mri.UIController"
def __init__(self,frame, model):
self.frame = frame
self.model = model
self.ui = None
def set_ui(self, ui):
self.ui = ui
def get_imple_name(self):
return self.ui.pages.get_imple_name()
# XTitle
def getTitle(self):
return self.frame.getTitle()
def setTitle(self, title):
self.frame.setTitle(title)
def dispose(self):
self.frame = None
self.model = None
def addEventListener(self, xListener):
pass
def removeEventListener(self, aListener):
pass
# XController
def attachFrame(self, frame):
self.frame = frame
def attachModel(self, model):
self.model = model
def suspend(self, Suspend):
return True
def getViewData(self):
""" Returns current instance inspected. """
return self.ui.main.current.target
def restoreViewData(self, Data):
pass
def getModel(self):
return self.model
def getFrame(self):
return self.frame
def getStatusIndicator(self):
pass
# XDispatchProvider
def queryDispatch(self, url, name, flags):
pass
def queryDispatches(self, requests):
pass
# XServiceInfo
def getImplementationName(self):
return self.IMPLE_NAME
def supportsService(self, name):
return name == self.IMPLE_NAME
def getSupportedServiceNames(self):
return self.IMPLE_NAME,
| hanya/MRI | pythonpath/mytools_Mri/ui/controller.py | Python | apache-2.0 | 2,539 |
/*
* Copyright 2013-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cloudfoundry.operations.useradmin;
import org.junit.Test;
public final class ListOrganizationUsersRequestTest {
@Test(expected = IllegalStateException.class)
public void noOrganizationName() {
ListOrganizationUsersRequest.builder()
.build();
}
@Test
public void valid() {
ListOrganizationUsersRequest.builder()
.organizationName("test-organization")
.build();
}
}
| alexander071/cf-java-client | cloudfoundry-operations/src/test/java/org/cloudfoundry/operations/useradmin/ListOrganizationUsersRequestTest.java | Java | apache-2.0 | 1,079 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Trifolia.Export.MSWord.Models
{
internal class HeaderDescriptor
{
public HeaderDescriptor()
{ }
public HeaderDescriptor(string headerName)
{
this.HeaderName = headerName;
}
#region Public Properties
/// <summary>
/// Gets or sets the name of this table column
/// </summary>
public string HeaderName { get; set; }
/// <summary>
/// Gets or sets the width, in inches, for this table cell
/// </summary>
public double CellWidth { get; set; }
/// <summary>
/// Gets or sets the width of the column in the table
/// </summary>
public string ColumnWidth { get; set; }
/// <summary>
/// Gets or sets whether this table column is auto-wrapped
/// </summary>
public bool AutoWrap { get; set; }
/// <summary>
/// Gets or sets whether this table column shall auto-resize (based on content)
/// </summary>
public bool AutoResize { get; set; }
#endregion
}
} | lantanagroup/trifolia | Trifolia.Export/MSWord/Models/HeaderDescriptor.cs | C# | apache-2.0 | 1,194 |
/*
* Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.config;
import com.hazelcast.mapreduce.TopologyChangedStrategy;
/**
* Contains the configuration for an {@link com.hazelcast.mapreduce.JobTracker}.
*
* @deprecated this class will be removed in 3.8; it is meant for internal usage only.
*/
public class JobTrackerConfigReadOnly extends JobTrackerConfig {
JobTrackerConfigReadOnly(JobTrackerConfig jobTrackerConfig) {
super(jobTrackerConfig);
}
@Override
public JobTrackerConfigReadOnly setName(String name) {
throw new UnsupportedOperationException("This config is read-only");
}
@Override
public void setMaxThreadSize(int maxThreadSize) {
throw new UnsupportedOperationException("This config is read-only");
}
@Override
public void setRetryCount(int retryCount) {
throw new UnsupportedOperationException("This config is read-only");
}
@Override
public void setChunkSize(int chunkSize) {
throw new UnsupportedOperationException("This config is read-only");
}
@Override
public void setQueueSize(int queueSize) {
throw new UnsupportedOperationException("This config is read-only");
}
@Override
public void setCommunicateStats(boolean communicateStats) {
throw new UnsupportedOperationException("This config is read-only");
}
@Override
public void setTopologyChangedStrategy(TopologyChangedStrategy topologyChangedStrategy) {
throw new UnsupportedOperationException("This config is read-only");
}
}
| lmjacksoniii/hazelcast | hazelcast/src/main/java/com/hazelcast/config/JobTrackerConfigReadOnly.java | Java | apache-2.0 | 2,164 |
package org.ofbiz.camel.services;
import org.apache.camel.CamelExecutionException;
import org.ofbiz.base.util.Debug;
import org.ofbiz.camel.loader.CamelContainer;
import org.ofbiz.service.DispatchContext;
import org.ofbiz.service.ServiceUtil;
import java.util.Collections;
import java.util.Map;
public class CamelServices {
private static String module = CamelServices.class.getName();
public static Map<String, Object> sendCamelMessage(DispatchContext ctx, Map<String, Object> context) {
Object body = context.get("body");
String endpoint = (String) context.get("endpoint");
Map<String, Object> headers = getHeaders(context);
try {
CamelContainer.getProducerTemplate().sendBodyAndHeaders(endpoint, body, headers);
} catch (CamelExecutionException cee) {
Debug.logError(cee, module);
return ServiceUtil.returnError(cee.getMessage());
}
return ServiceUtil.returnSuccess();
}
private static Map<String, Object> getHeaders(Map<String, Object> context) {
Map<String, Object> headers = (Map<String, Object>) context.get("headers");
return headers != null ? headers : Collections.<String, Object>emptyMap();
}
}
| jamesyong/o3erp | java/specialpurpose/camel/src/org/ofbiz/camel/services/CamelServices.java | Java | apache-2.0 | 1,242 |
package com.dgrid.errors;
public class MailException extends DGridException {
/**
*
*/
private static final long serialVersionUID = 1107025858152537437L;
public MailException(String msg) {
super(msg);
}
public MailException(Throwable root) {
super(root);
}
}
| samtingleff/dgrid | java/src/api/com/dgrid/errors/MailException.java | Java | apache-2.0 | 278 |
package com.doordeck.simplegpio.gpio;
/*
* (C) Copyright 2014 libbulldog (http://libbulldog.org/) and others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
/**
* The Class PinBlockedException.
*/
public class PinBlockedException extends RuntimeException {
/**
* The Constant serialVersionUID.
*/
private static final long serialVersionUID = 6737984685844582750L;
/**
* The blocker.
*/
private PinFeature blocker;
/**
* Instantiates a new pin blocked exception.
*
* @param blocker
* the blocker
*/
public PinBlockedException(PinFeature blocker) {
super(String.format("Pin %s is currently blocked by %s", blocker.getPin().getName(), blocker.getName()));
this.blocker = blocker;
}
/**
* Gets the blocker.
*
* @return the blocker
*/
public PinFeature getBlocker() {
return blocker;
}
}
| doordeck/simplegpio | simplegpio-core/src/main/java/com/doordeck/simplegpio/gpio/PinBlockedException.java | Java | apache-2.0 | 1,446 |
package com.gmail.thelimeglass.Scoreboards;
import org.bukkit.event.Event;
import org.bukkit.scoreboard.Team;
import org.eclipse.jdt.annotation.Nullable;
import ch.njol.skript.classes.Changer;
import ch.njol.skript.classes.Changer.ChangeMode;
import ch.njol.skript.lang.Expression;
import ch.njol.skript.lang.SkriptParser.ParseResult;
import ch.njol.skript.lang.util.SimpleExpression;
import ch.njol.util.Kleenean;
import ch.njol.util.coll.CollectionUtils;
public class ExprTeamFriendlyInvisibles extends SimpleExpression<Boolean>{
//(score[ ][board]|[skellett[ ]]board) [friendly] invisible[s] [state] [(for|of)] [team] %team%
private Expression<Team> team;
@Override
public Class<? extends Boolean> getReturnType() {
return Boolean.class;
}
@Override
public boolean isSingle() {
return true;
}
@SuppressWarnings("unchecked")
@Override
public boolean init(Expression<?>[] e, int matchedPattern, Kleenean isDelayed, ParseResult parser) {
team = (Expression<Team>) e[0];
return true;
}
@Override
public String toString(@Nullable Event e, boolean arg1) {
return "(score[ ][board]|[skellett[ ]]board) [friendly] invisible[s] [state] [(for|of)] [team] %team%";
}
@Override
@Nullable
protected Boolean[] get(Event e) {
return new Boolean[]{team.getSingle(e).canSeeFriendlyInvisibles()};
}
@Override
public void change(Event e, Object[] delta, Changer.ChangeMode mode){
if (mode == ChangeMode.SET) {
team.getSingle(e).setCanSeeFriendlyInvisibles((Boolean)delta[0]);
}
}
@Override
public Class<?>[] acceptChange(final Changer.ChangeMode mode) {
if (mode == ChangeMode.SET) {
return CollectionUtils.array(Boolean.class);
}
return null;
}
} | TheLimeGlass/Skellett | src/main/java/com/gmail/thelimeglass/Scoreboards/ExprTeamFriendlyInvisibles.java | Java | apache-2.0 | 1,691 |
/*
* Copyright 2013 The Regents of The University California
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.berkeley.sparrow.daemon.nodemonitor;
import java.net.InetSocketAddress;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import org.apache.commons.configuration.Configuration;
import org.apache.log4j.Logger;
import edu.berkeley.sparrow.daemon.util.Logging;
import edu.berkeley.sparrow.daemon.util.Network;
import edu.berkeley.sparrow.thrift.TEnqueueTaskReservationsRequest;
import edu.berkeley.sparrow.thrift.TFullTaskId;
import edu.berkeley.sparrow.thrift.TTaskLaunchSpec;
import edu.berkeley.sparrow.thrift.TUserGroupInfo;
/**
* A TaskScheduler is a buffer that holds task reservations until an application backend is
* available to run the task. When a backend is ready, the TaskScheduler requests the task
* from the {@link Scheduler} that submitted the reservation.
*
* Each scheduler will implement a different policy determining when to launch tasks.
*
* Schedulers are required to be thread safe, as they will be accessed concurrently from
* multiple threads.
*/
public abstract class TaskScheduler {
protected class TaskSpec {
public String appId;
public TUserGroupInfo user;
public String requestId;
public InetSocketAddress schedulerAddress;
public InetSocketAddress appBackendAddress;
/**
* ID of the task that previously ran in the slot this task is using. Used
* to track how long it takes to fill an empty slot on a slave. Empty if this task was launched
* immediately, because there were empty slots available on the slave. Filled in when
* the task is launched.
*/
public String previousRequestId;
public String previousTaskId;
/** Filled in after the getTask() RPC completes. */
public TTaskLaunchSpec taskSpec;
public TaskSpec(TEnqueueTaskReservationsRequest request, InetSocketAddress appBackendAddress) {
appId = request.getAppId();
user = request.getUser();
requestId = request.getRequestId();
schedulerAddress = new InetSocketAddress(request.getSchedulerAddress().getHost(),
request.getSchedulerAddress().getPort());
this.appBackendAddress = appBackendAddress;
previousRequestId = "";
previousTaskId = "";
}
}
private final static Logger LOG = Logger.getLogger(TaskScheduler.class);
private final static Logger AUDIT_LOG = Logging.getAuditLogger(TaskScheduler.class);
private String ipAddress;
protected Configuration conf;
private final BlockingQueue<TaskSpec> runnableTaskQueue =
new LinkedBlockingQueue<TaskSpec>();
/** Initialize the task scheduler, passing it the current available resources
* on the machine. */
void initialize(Configuration conf, int nodeMonitorPort) {
this.conf = conf;
this.ipAddress = Network.getIPAddress(conf);
}
/**
* Get the next task available for launching. This will block until a task is available.
*/
TaskSpec getNextTask() {
TaskSpec task = null;
try {
task = runnableTaskQueue.take();
} catch (InterruptedException e) {
LOG.fatal(e);
}
return task;
}
/**
* Returns the current number of runnable tasks (for testing).
*/
int runnableTasks() {
return runnableTaskQueue.size();
}
void tasksFinished(List<TFullTaskId> finishedTasks) {
for (TFullTaskId t : finishedTasks) {
AUDIT_LOG.info(Logging.auditEventString("task_completed", t.getRequestId(), t.getTaskId()));
handleTaskFinished(t.getRequestId(), t.getTaskId());
}
}
void noTaskForReservation(TaskSpec taskReservation) {
AUDIT_LOG.info(Logging.auditEventString("node_monitor_get_task_no_task",
taskReservation.requestId,
taskReservation.previousRequestId,
taskReservation.previousTaskId));
handleNoTaskForReservation(taskReservation);
}
protected void makeTaskRunnable(TaskSpec task) {
try {
LOG.debug("Putting reservation for request " + task.requestId + " in runnable queue");
runnableTaskQueue.put(task);
} catch (InterruptedException e) {
LOG.fatal("Unable to add task to runnable queue: " + e.getMessage());
}
}
public synchronized void submitTaskReservations(TEnqueueTaskReservationsRequest request,
InetSocketAddress appBackendAddress) {
for (int i = 0; i < request.getNumTasks(); ++i) {
LOG.debug("Creating reservation " + i + " for request " + request.getRequestId());
TaskSpec reservation = new TaskSpec(request, appBackendAddress);
int queuedReservations = handleSubmitTaskReservation(reservation);
AUDIT_LOG.info(Logging.auditEventString("reservation_enqueued [WDM] on NM", ipAddress, request.requestId,
queuedReservations));
}
}
// TASK SCHEDULERS MUST IMPLEMENT THE FOLLOWING.
/**
* Handles a task reservation. Returns the number of queued reservations.
*/
abstract int handleSubmitTaskReservation(TaskSpec taskReservation);
/**
* Cancels all task reservations with the given request id. Returns the number of task
* reservations cancelled.
*/
abstract int cancelTaskReservations(String requestId);
/**
* Handles the completion of a task that has finished executing.
*/
protected abstract void handleTaskFinished(String requestId, String taskId);
/**
* Handles the case when the node monitor tried to launch a task for a reservation, but
* the corresponding scheduler didn't return a task (typically because all of the corresponding
* job's tasks have been launched).
*/
protected abstract void handleNoTaskForReservation(TaskSpec taskSpec);
/**
* Returns the maximum number of active tasks allowed (the number of slots).
*
* -1 signals that the scheduler does not enforce a maximum number of active tasks.
*/
abstract int getMaxActiveTasks();
}
| dmwu/sparrow | src/main/java/edu/berkeley/sparrow/daemon/nodemonitor/TaskScheduler.java | Java | apache-2.0 | 6,672 |
package com.googlecode.d2j.dex;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.commons.Remapper;
import org.objectweb.asm.commons.RemappingClassAdapter;
public class LambadaNameSafeClassAdapter extends RemappingClassAdapter {
public String getClassName() {
return remapper.mapType(className);
}
public LambadaNameSafeClassAdapter(ClassVisitor cv) {
super(cv, new Remapper() {
@Override
public String mapType(String type) {
return type.replace('-', '_');
}
});
}
}
| rgfernandes/dex2jar | dex-translator/src/main/java/com/googlecode/d2j/dex/LambadaNameSafeClassAdapter.java | Java | apache-2.0 | 576 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.java.operators;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.DataSet;
/**
* Base class for operations that operates on a single input data set.
*
* @param <IN> The data type of the input data set.
* @param <OUT> The data type of the returned data set.
*/
public abstract class SingleInputOperator<IN, OUT, O extends SingleInputOperator<IN, OUT, O>> extends Operator<OUT, O> {
private final DataSet<IN> input;
protected SingleInputOperator(DataSet<IN> input, TypeInformation<OUT> resultType) {
super(input.getExecutionEnvironment(), resultType);
this.input = input;
}
/**
* Gets the data set that this operation uses as its input.
*
* @return The data set that this operation uses as its input.
*/
public DataSet<IN> getInput() {
return this.input;
}
/**
* Gets the type information of the data type of the input data set.
* This method returns equivalent information as {@code getInput().getType()}.
*
* @return The input data type.
*/
public TypeInformation<IN> getInputType() {
return this.input.getType();
}
/**
* Translates this operation to a data flow operator of the common data flow API.
*
* @param input The data flow operator that produces this operation's input data.
* @return The translated data flow operator.
*/
protected abstract org.apache.flink.api.common.operators.SingleInputOperator<?, OUT, ?> translateToDataFlow(
org.apache.flink.api.common.operators.Operator<IN> input);
}
| citlab/vs.msc.ws14 | flink-0-7-custom/flink-java/src/main/java/org/apache/flink/api/java/operators/SingleInputOperator.java | Java | apache-2.0 | 2,357 |
package com.cworld.earngold.sys.action;
import org.springframework.stereotype.Controller;
import com.core.code.action.BaseAction;
import com.cworld.earngold.mobile.inf.InterfaceLogin;
import com.cworld.earngold.mobile.inf.InterfaceUser;
import com.cworld.earngold.mobile.inf.impl.InterfaceLoginImpl;
import com.cworld.earngold.sys.beans.User;
@SuppressWarnings("serial")
@Controller
public class UserAction extends BaseAction {
private String phone;
private String pwd;
private String checkPwd;
private String checkCode;
private String messageCode;
private InterfaceUser infUser = null;
private InterfaceLogin infLogin = null;
private boolean isLogin() {
if (this.httpSession.getAttribute("User") != null) {
return true;
} else {
return false;
}
}
public String login() {
infLogin = new InterfaceLoginImpl();
// User user = infLogin.login(phone, pwd);
// Use to Test Start
Integer count = (Integer) this.httpSession
.getAttribute("PasswordFault");
String checkCode = (String)this.httpSession.getAttribute("checkCode");
System.out.println(count+"--------"+checkCode);
if (null != count && count >= 3) {
if(null == checkCode || checkCode.equalsIgnoreCase((String)this.httpSession.getAttribute("CheckCode"))){
// Set Error Message
return "loginPage";
}
}
if ("admin".equalsIgnoreCase(phone) && "admin".equalsIgnoreCase(pwd)) {
User user = new User();
user.setUserId(1);
user.setPhone("123456789");
user.setType("admin");
user.setAccountHead("123456789.png");
// End
if (user != null) {
this.httpSession.setAttribute("user", user);
if ("admin".equalsIgnoreCase(user.getType())) {
System.out.println("adminPage");
return "adminPage";
} else {
System.out.println("mainPage");
return "mainPage";
}
} else {
System.out.println("loginPage");
passwordFaultFunc();
return "loginPage";
}
} else {
System.out.println("loginPage");
passwordFaultFunc();
return "loginPage";
}
}
public String register(){
return "";
}
public String getPhone() {
return phone;
}
public void setPhone(String phone) {
this.phone = phone;
}
public String getPwd() {
return pwd;
}
public void setPwd(String pwd) {
this.pwd = pwd;
}
private void passwordFaultFunc() {
Integer count = (Integer) this.httpSession
.getAttribute("PasswordFault");
if (null == count) {
this.httpSession.setAttribute("PasswordFault", 1);
} else {
this.httpSession.setAttribute("PasswordFault", count++);
}
}
public String getCheckCode() {
return checkCode;
}
public void setCheckCode(String checkCode) {
this.checkCode = checkCode;
}
public String getCheckPwd() {
return checkPwd;
}
public void setCheckPwd(String checkPwd) {
this.checkPwd = checkPwd;
}
public String getMessageCode() {
return messageCode;
}
public void setMessageCode(String messageCode) {
this.messageCode = messageCode;
}
}
| hejzgithub/CworldEarnGold | src/com/cworld/earngold/sys/action/UserAction.java | Java | apache-2.0 | 3,106 |
package com.playfab;
import java.util.HashMap;
import java.util.Date;
public class LinkGameCenterAccountRequest
{
/// <summary>
/// Game Center identifier for the player account to be linked
/// </summary>
public String GameCenterId;
}
| nathancassano/PlayFabJavaAndroidSDK | PlayFabClientSDK/src/com/playfab/LinkGameCenterAccountRequest.java | Java | apache-2.0 | 249 |
package com.example.newbook4.bean;
import java.util.Comparator;
import org.json.JSONException;
import org.json.JSONObject;
import android.util.Log;
public class InfoBean {
public int info_id;
public int user_id;
public String bookname;
public String price;
//public String time;
public String address;
public int concern_num;
public int accusation_num;
public String generate_time;
public InfoBean() {
}
public String getRealAddress() {
try {
JSONObject jsonObject = new JSONObject(address);
return jsonObject.getString("address1");
} catch (JSONException e) {
Log.d("InfoBean", e.toString());
}
return "";
// {"contact":"ÁªÏµÈË","address":"¼ªÁÖÊ¡³¤´ºÊÐÊÐÏ½ÇøÏêϸµØÖ·","phone":"18940997430"}
}
public static Comparator<InfoBean> Comparator = new Comparator<InfoBean>() {
public int compare(InfoBean s1, InfoBean s2) {
return s2.info_id - s1.info_id;
}
};
}
| wangyusheng/NewBook3 | src/com/example/newbook4/bean/InfoBean.java | Java | apache-2.0 | 918 |
define(
({
"sourceSetting": "搜索源设置",
"instruction": "添加并配置地理编码服务或要素图层为搜索源。这些指定的源决定了搜索框中的可搜索内容。",
"add": "添加搜索源",
"addGeocoder": "添加地理编码器",
"geocoder": "地理编码器",
"setLayerSource": "设置图层源",
"setGeocoderURL": "设置地理编码器 URL",
"searchableLayer": "要素图层",
"name": "名称",
"countryCode": "国家代码或区域代码",
"countryCodeEg": "例如 ",
"countryCodeHint": "将此值留空可搜索所有国家和地区",
"generalSetting": "常规设置",
"allPlaceholder": "用于搜索全部内容的占位符文本: ",
"showInfoWindowOnSelect": "显示已找到要素或位置的弹出窗口",
"searchInCurrentMapExtent": "仅在当前地图范围内搜索",
"zoomScale": "缩放比例",
"locatorUrl": "地理编码器 URL",
"locatorName": "地理编码器名称",
"locatorExample": "示例",
"locatorWarning": "不支持此版本的地理编码服务。该微件支持 10.0 及更高版本的地理编码服务。",
"locatorTips": "由于地理编码服务不支持建议功能,因此建议不可用。",
"layerSource": "图层源",
"searchLayerTips": "由于要素服务不支持分页功能,因此建议不可用。",
"placeholder": "占位符文本",
"searchFields": "搜索字段",
"displayField": "显示字段",
"exactMatch": "完全匹配",
"maxSuggestions": "最大建议数",
"maxResults": "最大结果数",
"setSearchFields": "设置搜索字段",
"set": "设置",
"fieldSearchable": "可搜索",
"fieldName": "名称",
"fieldAlias": "别名",
"ok": "确定",
"cancel": "取消",
"invalidUrlTip": "URL ${URL} 无效或不可访问。"
})
); | fiskinator/WAB2.0_JBox_MutualAid | widgets/Search/setting/nls/zh-cn/strings.js | JavaScript | apache-2.0 | 1,903 |
<?php
/**
* [WeEngine System] Copyright (c) 2014 WE7.CC
* WeEngine is NOT a free software, it under the license terms, visited http://www.we7.cc/ for more details.
*/
defined('IN_IA') or exit('Access Denied');
function cloud_client_define() {
return array(
'/framework/function/communication.func.php',
'/framework/model/cloud.mod.php',
'/web/source/cloud/upgrade.ctrl.php',
'/web/source/cloud/process.ctrl.php',
'/web/source/cloud/dock.ctrl.php',
'/web/themes/default/cloud/upgrade.html',
'/web/themes/default/cloud/process.html'
);
}
function cloud_prepare() {
global $_W;
setting_load();
if(empty($_W['setting']['site']['key']) || empty($_W['setting']['site']['token']))/* {
return error('-1', "您的程序需要在微擎云服务平台注册你的站点资料, 来接入云平台服务后才能使用相应功能.");
}*/
return true;
}
function cloud_m_prepare($name) {
$pars['method'] = 'module.check';
$pars['module'] = $name;
$dat = cloud_request('http://www.weike.com/gateway.php', $pars);
if (is_error($dat)) {
return $dat;
}
if ($dat['content'] == 'install-module-protect') /*{
return error('-1', '此模块已设置版权保护,您只能通过云平台来安装。');
}*/
return true;
}
function _cloud_build_params() {
global $_W;
$pars = array();
$pars['host'] = $_SERVER['HTTP_HOST'];
$pars['family'] = IMS_FAMILY;
$pars['version'] = IMS_VERSION;
$pars['release'] = IMS_RELEASE_DATE;
$pars['key'] = $_W['setting']['site']['key'];
$pars['password'] = md5($_W['setting']['site']['key'] . $_W['setting']['site']['token']);
$clients = cloud_client_define();
$string = '';
foreach($clients as $cli) {
$string .= md5_file(IA_ROOT . $cli);
}
$pars['client'] = md5($string);
return $pars;
}
function cloud_m_build($modulename) {
$sql = 'SELECT * FROM ' . tablename('modules') . ' WHERE `name`=:name';
$module = pdo_fetch($sql, array(':name' => $modulename));
$pars = _cloud_build_params();
$pars['method'] = 'module.build';
$pars['module'] = $modulename;
if (!empty($module)) {
$pars['module_version'] = $module['version'];
}
$dat = cloud_request('http://www.weike.com/gateway.php', $pars);
$file = IA_ROOT . '/data/module.build';
$ret = _cloud_shipping_parse($dat, $file);
if (!is_error($ret)) {
$dir = IA_ROOT . '/addons/' . $modulename;
$files = array();
if (!empty($ret['files'])) {
foreach ($ret['files'] as $file) {
$entry = $dir . $file['path'];
if (!is_file($entry) || md5_file($entry) != $file['checksum']) {
$files[] = '/' . $modulename . $file['path'];
}
}
}
$ret['files'] = $files;
$schemas = array();
if (!empty($ret['schemas'])) {
load()->func('db');
foreach ($ret['schemas'] as $remote) {
$name = substr($remote['tablename'], 4);
$local = db_table_schema(pdo(), $name);
unset($remote['increment']);
unset($local['increment']);
if (empty($local)) {
$schemas[] = $remote;
} else {
$diffs = db_table_fix_sql($local, $remote);
if (!empty($diffs)) {
$schemas[] = $remote;
}
}
}
}
$ret['upgrade'] = true;
$ret['type'] = 'module';
$ret['schemas'] = $schemas;
if (empty($module)) {
$ret['install'] = 1;
}
}
return $ret;
}
function cloud_m_query() {
$pars = _cloud_build_params();
$pars['method'] = 'module.query';
$dat = cloud_request('http://www.weike.com/gateway.php', $pars);
$file = IA_ROOT . '/data/module.query';
$ret = _cloud_shipping_parse($dat, $file);
return $ret;
}
function cloud_m_info($name) {
$pars = _cloud_build_params();
$pars['method'] = 'module.info';
$pars['module'] = $name;
$dat = cloud_request('http://www.weike.com/gateway.php', $pars);
$file = IA_ROOT . '/data/module.info';
$ret = _cloud_shipping_parse($dat, $file);
return $ret;
}
function cloud_m_upgradeinfo($name) {
$module = pdo_fetch("SELECT name, version FROM ".tablename('modules')." WHERE name = '{$name}'");
$pars = _cloud_build_params();
$pars['method'] = 'module.info';
$pars['module'] = $name;
$pars['curversion'] = $module['version'];
$pars['isupgrade'] = 1;
$dat = cloud_request('http://www.weike.com/gateway.php', $pars);
$file = IA_ROOT . '/data/module.info';
$ret = _cloud_shipping_parse($dat, $file);
return $ret;
}
function cloud_t_prepare($name) {
$pars['method'] = 'theme.check';
$pars['theme'] = $name;
$dat = cloud_request('http://www.weike.com/gateway.php', $pars);
if (is_error($dat)) {
return $dat;
}
if ($dat['content'] == 'install-theme-protect') /*{
return error('-1', '此模板已设置版权保护,您只能通过云平台来安装。');
}*/
return true;
}
function cloud_t_query() {
$pars = _cloud_build_params();
$pars['method'] = 'theme.query';
$dat = cloud_request('http://www.weike.com/gateway.php', $pars);
$file = IA_ROOT . '/data/theme.query';
$ret = _cloud_shipping_parse($dat, $file);
return $ret;
}
function cloud_t_info($name) {
$pars = _cloud_build_params();
$pars['method'] = 'theme.info';
$pars['theme'] = $name;
$dat = cloud_request('http://www.weike.com/gateway.php', $pars);
$file = IA_ROOT . '/data/theme.info';
$ret = _cloud_shipping_parse($dat, $file);
return $ret;
}
function cloud_t_build($name) {
$sql = 'SELECT * FROM ' . tablename('site_templates') . ' WHERE `name`=:name';
$theme = pdo_fetch($sql, array(':name' => $name));
$pars = _cloud_build_params();
$pars['method'] = 'theme.build';
$pars['theme'] = $name;
if(!empty($theme)) {
$pars['themeversion'] = $theme['version'];
}
$dat = cloud_request('http://www.weike.com/gateway.php', $pars);
$file = IA_ROOT . '/data/theme.build';
$ret = _cloud_shipping_parse($dat, $file);
if(!is_error($ret)) {
$dir = IA_ROOT . '/app/themes/' . $name;
$files = array();
if(!empty($ret['files'])) {
foreach($ret['files'] as $file) {
$entry = $dir . $file['path'];
if(!is_file($entry) || md5_file($entry) != $file['checksum']) {
$files[] = '/'. $name . $file['path'];
}
}
}
$ret['files'] = $files;
$ret['upgrade'] = true;
$ret['type'] = 'theme';
if(empty($theme)) {
$ret['install'] = 1;
}
}
return $ret;
}
function cloud_sms_send($mobile, $content) {
global $_W;
$row = pdo_fetch("SELECT `notify` FROM ".tablename('uni_settings') . " WHERE uniacid = :uniacid", array(':uniacid' => $_W['uniacid']));
$row['notify'] = @iunserializer($row['notify']);
if(!empty($row['notify']) && !empty($row['notify']['sms'])) {
$config = $row['notify']['sms'];
$balance = intval($config['balance']);
if($balance <= 0) {
return error(-1, '发送短信失败, 请联系系统管理人员. 错误详情: 短信余额不足');
}
$sign = $config['signature'];
if(empty($sign) && IMS_FAMILY == 'x') {
$sign = $_W['setting']['copyright']['sitename'];
}
if(empty($sign)) {
$sign = '微擎';
}
$pars = _cloud_build_params();
$pars['method'] = 'sms.send';
$pars['mobile'] = $mobile;
$pars['content'] = $content . " 【{$sign}】";
$dat = cloud_request('http://www.weike.com/gateway.php', $pars);
$file = IA_ROOT . '/data/sms.send';
$ret = _cloud_shipping_parse($dat, $file);
if (is_error($ret)) {
return error($ret['errno'], $ret['message']);
}
if ($ret == 'success') {
return true;
} else {
return error(-1, $ret);
}
}
return error(-1, '发送短信失败, 请联系系统管理人员. 错误详情: 没有设置短信配额或参数');
}
function cloud_build() {
$pars = _cloud_build_params();
$pars['method'] = 'application.build';
$pars['extra'] = cloud_extra_data();
$dat = cloud_request('http://www.weike.com/gateway.php', $pars);
$file = IA_ROOT . '/data/application.build';
$ret = _cloud_shipping_parse($dat, $file);
if(!is_error($ret)) {
if($ret['state'] == 'warning') {
$ret['files'] = cloud_client_define();
unset($ret['schemas']);
unset($ret['scripts']);
} else {
$files = array();
if(!empty($ret['files'])) {
foreach($ret['files'] as $file) {
$entry = IA_ROOT . $file['path'];
if(!is_file($entry) || md5_file($entry) != $file['checksum']) {
$files[] = $file['path'];
}
}
}
$ret['files'] = $files;
$schemas = array();
if(!empty($ret['schemas'])) {
load()->func('db');
foreach($ret['schemas'] as $remote) {
$name = substr($remote['tablename'], 4);
$local = db_table_schema(pdo(), $name);
unset($remote['increment']);
unset($local['increment']);
if(empty($local)) {
$schemas[] = $remote;
} else {
$sqls = db_table_fix_sql($local, $remote);
if(!empty($sqls)) {
$schemas[] = $remote;
}
}
}
}
$ret['schemas'] = $schemas;
}
if($ret['family'] == 'x' && IMS_FAMILY == 'v') {
load()->model('setting');
setting_upgrade_version('x', IMS_VERSION, IMS_RELEASE_DATE);
message('您已经购买了商业授权版本, 系统将转换为商业版, 并重新运行自动更新程序.', 'refresh');
}
$ret['upgrade'] = false;
if(!empty($ret['files']) || !empty($ret['schemas']) || !empty($ret['scripts'])) {
$ret['upgrade'] = true;
}
$upgrade = array();
$upgrade['upgrade'] = $ret['upgrade'];
$upgrade['lastupdate'] = TIMESTAMP;
cache_write('upgrade', $upgrade);
}
return $ret;
}
function cloud_schema() {
$pars = _cloud_build_params();
$pars['method'] = 'application.schema';
$dat = cloud_request('http://www.weike.com/gateway.php', $pars);
$file = IA_ROOT . '/data/application.schema';
$ret = _cloud_shipping_parse($dat, $file);
if(!is_error($ret)) {
$schemas = array();
if(!empty($ret['schemas'])) {
load()->func('db');
foreach($ret['schemas'] as $remote) {
$name = substr($remote['tablename'], 4);
$local = db_table_schema(pdo(), $name);
unset($remote['increment']);
unset($local['increment']);
if(empty($local)) {
$schemas[] = $remote;
} else {
$diffs = db_schema_compare($local, $remote);
if(!empty($diffs)) {
$schemas[] = $remote;
}
}
}
}
$ret['schemas'] = $schemas;
}
return $ret;
}
function cloud_download($path, $type = '') {
$pars = _cloud_build_params();
$pars['method'] = 'application.shipping';
$pars['path'] = $path;
$pars['type'] = $type;
$pars['gz'] = function_exists('gzcompress') && function_exists('gzuncompress') ? 'true' : 'false';
$headers = array('content-type' => 'application/x-www-form-urlencoded');
$dat = cloud_request('http://www.weike.com/gateway.php', $pars, $headers, 300);
if(is_error($dat)) {
return error(-1, '网络存在错误, 请稍后重试。' . $dat['message']);
}
if($dat['content'] == 'success') {
return true;
}
$ret = @json_decode($dat['content'], true);
if(is_error($ret)) {
return $ret;
} else {
return error(-1, '不能下载文件, 请稍后重试。');
}
}
function _cloud_shipping_parse($dat, $file) {
if (is_error($dat)) {
return error(-1, '网络传输错误, 请检查您的cURL是否可用, 或者服务器网络是否正常. ' . $dat['message']);
}
$tmp = unserialize($dat['content']);
if (is_array($tmp) && is_error($tmp)) {
if ($tmp['errno'] == '-2') {
$data = file_get_contents(IA_ROOT . '/framework/version.inc.php');
file_put_contents(IA_ROOT . '/framework/version.inc.php', str_replace("'x'", "'v'", $data));
}
return $tmp;
}
if ($dat['content'] == 'patching') {
return error(-1, '补丁程序正在更新中,请稍后再试!');
}
if ($dat['content'] == 'blacklist') {
return error(-1, '抱歉,您的站点已被列入云服务黑名单,云服务一切业务已被禁止,请联系微擎客服!');
}
if (strlen($dat['content']) != 32) {
return error(-1, '云服务平台向您的服务器传输数据过程中出现错误, 这个错误可能是由于您的通信密钥和云服务不一致, 请尝试诊断云服务参数(重置站点ID和通信密钥). 传输原始数据:' . $dat['meta']);
}
$data = @file_get_contents($file);
if (empty($data)) {
return error(-1, '没有接收到服务器的传输的数据.');
}
@unlink($file);
$ret = @iunserializer($data);
if (empty($data) || empty($ret) || $dat['content'] != $ret['secret']) {
return error(-1, '云服务平台向您的服务器传输的数据校验失败, 可能是因为您的网络不稳定, 或网络不安全, 请稍后重试.');
}
$ret = iunserializer($ret['data']);
if (is_array($ret) && is_error($ret)) {
if ($ret['errno'] == '-2') {
$data = file_get_contents(IA_ROOT . '/framework/version.inc.php');
file_put_contents(IA_ROOT . '/framework/version.inc.php', str_replace("'x'", "'v'", $data));
}
}
if (!is_error($ret) && is_array($ret) && !empty($ret)) {
if ($ret['state'] == 'fatal') {
return error($ret['errorno'], '发生错误: ' . $ret['message']);
}
return $ret;
} else {
return error(-1, "发生错误: {$ret['message']}");
}
}
function cloud_request($url, $post = '', $extra = array(), $timeout = 60) {
load()->func('communication');
if (!empty($_W['setting']['cloudip'])) {
$extra['ip'] = $_W['setting']['cloudip'];
}
return ihttp_request($url, $post, $extra, $timeout);
}
function cloud_extra_data() {
$data = array();
$data['accounts'] = pdo_fetchall("SELECT name, account, original FROM ".tablename('account_wechats') . " GROUP BY account");
return serialize($data);
} | shengkai86/yushungroup | framework/model/cloud.mod.php | PHP | apache-2.0 | 13,655 |
package weixin.popular.bean.datacube.getcardbizuininfo;
import com.google.gson.annotations.SerializedName;
/**
* 拉取卡券概况数据接口-响应参数-日期数据
*
* @author Moyq5
*/
public class BizuinInfoResultInfo {
/**
* 日期信息
*/
@SerializedName("ref_date")
private String refDate;
/**
* 浏览次数
*/
@SerializedName("view_cnt")
private Integer viewCnt;
/**
* 浏览人数
*/
@SerializedName("view_user")
private Integer viewUser;
/**
* 领取次数
*/
@SerializedName("receive_cnt")
private Integer receiveCnt;
/**
* 领取人数
*/
@SerializedName("receive_user")
private Integer receiveUser;
/**
* 使用次数
*/
@SerializedName("verify_cnt")
private Integer verifyCnt;
/**
* 使用人数
*/
@SerializedName("verify_user")
private Integer verifyUser;
/**
* 转赠次数
*/
@SerializedName("given_cnt")
private Integer givenCnt;
/**
* 转赠人数
*/
@SerializedName("given_user")
private Integer givenUser;
/**
* 过期次数
*/
@SerializedName("expire_cnt")
private Integer expireCnt;
/**
* 过期人数
*/
@SerializedName("expire_user")
private Integer expireUser;
/**
* @return 日期信息
*/
public String getRefDate() {
return refDate;
}
/**
* @param refDate 日期信息
*/
public void setRefDate(String refDate) {
this.refDate = refDate;
}
/**
* @return 浏览次数
*/
public Integer getViewCnt() {
return viewCnt;
}
/**
* @param viewCnt 浏览次数
*/
public void setViewCnt(Integer viewCnt) {
this.viewCnt = viewCnt;
}
/**
* @return 浏览人数
*/
public Integer getViewUser() {
return viewUser;
}
/**
* @param viewUser 浏览人数
*/
public void setViewUser(Integer viewUser) {
this.viewUser = viewUser;
}
/**
* @return 领取次数
*/
public Integer getReceiveCnt() {
return receiveCnt;
}
/**
* @param receiveCnt 领取次数
*/
public void setReceiveCnt(Integer receiveCnt) {
this.receiveCnt = receiveCnt;
}
/**
* @return 领取人数
*/
public Integer getReceiveUser() {
return receiveUser;
}
/**
* @param receiveUser 领取人数
*/
public void setReceiveUser(Integer receiveUser) {
this.receiveUser = receiveUser;
}
/**
* @return 使用次数
*/
public Integer getVerifyCnt() {
return verifyCnt;
}
/**
* @param verifyCnt 使用次数
*/
public void setVerifyCnt(Integer verifyCnt) {
this.verifyCnt = verifyCnt;
}
/**
* @return 使用人数
*/
public Integer getVerifyUser() {
return verifyUser;
}
/**
* @param verifyUser 使用人数
*/
public void setVerifyUser(Integer verifyUser) {
this.verifyUser = verifyUser;
}
/**
* @return 转赠次数
*/
public Integer getGivenCnt() {
return givenCnt;
}
/**
* @param givenCnt 转赠次数
*/
public void setGivenCnt(Integer givenCnt) {
this.givenCnt = givenCnt;
}
/**
* @return 转赠人数
*/
public Integer getGivenUser() {
return givenUser;
}
/**
* @param givenUser 转赠人数
*/
public void setGivenUser(Integer givenUser) {
this.givenUser = givenUser;
}
/**
* @return 过期次数
*/
public Integer getExpireCnt() {
return expireCnt;
}
/**
* @param expireCnt 过期次数
*/
public void setExpireCnt(Integer expireCnt) {
this.expireCnt = expireCnt;
}
/**
* @return 过期人数
*/
public Integer getExpireUser() {
return expireUser;
}
/**
* @param expireUser 过期人数
*/
public void setExpireUser(Integer expireUser) {
this.expireUser = expireUser;
}
}
| China-ls/wechat4java | src/main/java/weixin/popular/bean/datacube/getcardbizuininfo/BizuinInfoResultInfo.java | Java | apache-2.0 | 4,198 |
// Copyright 2015 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package v3rpc
import (
"io"
"sync"
"time"
"golang.org/x/net/context"
"github.com/coreos/etcd/etcdserver"
"github.com/coreos/etcd/etcdserver/api/v3rpc/rpctypes"
pb "github.com/coreos/etcd/etcdserver/etcdserverpb"
"github.com/coreos/etcd/mvcc"
"github.com/coreos/etcd/mvcc/mvccpb"
)
type watchServer struct {
clusterID int64
memberID int64
raftTimer etcdserver.RaftTimer
watchable mvcc.Watchable
}
func NewWatchServer(s *etcdserver.EtcdServer) pb.WatchServer {
return &watchServer{
clusterID: int64(s.Cluster().ID()),
memberID: int64(s.ID()),
raftTimer: s,
watchable: s.Watchable(),
}
}
var (
// External test can read this with GetProgressReportInterval()
// and change this to a small value to finish fast with
// SetProgressReportInterval().
progressReportInterval = 10 * time.Minute
progressReportIntervalMu sync.RWMutex
)
func GetProgressReportInterval() time.Duration {
progressReportIntervalMu.RLock()
defer progressReportIntervalMu.RUnlock()
return progressReportInterval
}
func SetProgressReportInterval(newTimeout time.Duration) {
progressReportIntervalMu.Lock()
defer progressReportIntervalMu.Unlock()
progressReportInterval = newTimeout
}
const (
// We send ctrl response inside the read loop. We do not want
// send to block read, but we still want ctrl response we sent to
// be serialized. Thus we use a buffered chan to solve the problem.
// A small buffer should be OK for most cases, since we expect the
// ctrl requests are infrequent.
ctrlStreamBufLen = 16
)
// serverWatchStream is an etcd server side stream. It receives requests
// from client side gRPC stream. It receives watch events from mvcc.WatchStream,
// and creates responses that forwarded to gRPC stream.
// It also forwards control message like watch created and canceled.
type serverWatchStream struct {
clusterID int64
memberID int64
raftTimer etcdserver.RaftTimer
gRPCStream pb.Watch_WatchServer
watchStream mvcc.WatchStream
ctrlStream chan *pb.WatchResponse
// progress tracks the watchID that stream might need to send
// progress to.
progress map[mvcc.WatchID]bool
// mu protects progress
mu sync.Mutex
// closec indicates the stream is closed.
closec chan struct{}
}
func (ws *watchServer) Watch(stream pb.Watch_WatchServer) error {
sws := serverWatchStream{
clusterID: ws.clusterID,
memberID: ws.memberID,
raftTimer: ws.raftTimer,
gRPCStream: stream,
watchStream: ws.watchable.NewWatchStream(),
// chan for sending control response like watcher created and canceled.
ctrlStream: make(chan *pb.WatchResponse, ctrlStreamBufLen),
progress: make(map[mvcc.WatchID]bool),
closec: make(chan struct{}),
}
go sws.sendLoop()
errc := make(chan error, 1)
go func() {
errc <- sws.recvLoop()
sws.close()
}()
select {
case err := <-errc:
return err
case <-stream.Context().Done():
err := stream.Context().Err()
// the only server-side cancellation is noleader for now.
if err == context.Canceled {
return rpctypes.ErrGRPCNoLeader
}
return err
}
}
func (sws *serverWatchStream) recvLoop() error {
for {
req, err := sws.gRPCStream.Recv()
if err == io.EOF {
return nil
}
if err != nil {
return err
}
switch uv := req.RequestUnion.(type) {
case *pb.WatchRequest_CreateRequest:
if uv.CreateRequest == nil {
break
}
creq := uv.CreateRequest
if len(creq.Key) == 0 {
// \x00 is the smallest key
creq.Key = []byte{0}
}
if len(creq.RangeEnd) == 1 && creq.RangeEnd[0] == 0 {
// support >= key queries
creq.RangeEnd = []byte{}
}
wsrev := sws.watchStream.Rev()
rev := creq.StartRevision
if rev == 0 {
rev = wsrev + 1
}
id := sws.watchStream.Watch(creq.Key, creq.RangeEnd, rev)
if id != -1 && creq.ProgressNotify {
sws.progress[id] = true
}
sws.ctrlStream <- &pb.WatchResponse{
Header: sws.newResponseHeader(wsrev),
WatchId: int64(id),
Created: true,
Canceled: id == -1,
}
case *pb.WatchRequest_CancelRequest:
if uv.CancelRequest != nil {
id := uv.CancelRequest.WatchId
err := sws.watchStream.Cancel(mvcc.WatchID(id))
if err == nil {
sws.ctrlStream <- &pb.WatchResponse{
Header: sws.newResponseHeader(sws.watchStream.Rev()),
WatchId: id,
Canceled: true,
}
sws.mu.Lock()
delete(sws.progress, mvcc.WatchID(id))
sws.mu.Unlock()
}
}
// TODO: do we need to return error back to client?
default:
panic("not implemented")
}
}
}
func (sws *serverWatchStream) sendLoop() {
// watch ids that are currently active
ids := make(map[mvcc.WatchID]struct{})
// watch responses pending on a watch id creation message
pending := make(map[mvcc.WatchID][]*pb.WatchResponse)
interval := GetProgressReportInterval()
progressTicker := time.NewTicker(interval)
defer progressTicker.Stop()
for {
select {
case wresp, ok := <-sws.watchStream.Chan():
if !ok {
return
}
// TODO: evs is []mvccpb.Event type
// either return []*mvccpb.Event from the mvcc package
// or define protocol buffer with []mvccpb.Event.
evs := wresp.Events
events := make([]*mvccpb.Event, len(evs))
for i := range evs {
events[i] = &evs[i]
}
wr := &pb.WatchResponse{
Header: sws.newResponseHeader(wresp.Revision),
WatchId: int64(wresp.WatchID),
Events: events,
CompactRevision: wresp.CompactRevision,
}
if _, hasId := ids[wresp.WatchID]; !hasId {
// buffer if id not yet announced
wrs := append(pending[wresp.WatchID], wr)
pending[wresp.WatchID] = wrs
continue
}
mvcc.ReportEventReceived()
if err := sws.gRPCStream.Send(wr); err != nil {
return
}
sws.mu.Lock()
if _, ok := sws.progress[wresp.WatchID]; ok {
sws.progress[wresp.WatchID] = false
}
sws.mu.Unlock()
case c, ok := <-sws.ctrlStream:
if !ok {
return
}
if err := sws.gRPCStream.Send(c); err != nil {
return
}
// track id creation
wid := mvcc.WatchID(c.WatchId)
if c.Canceled {
delete(ids, wid)
continue
}
if c.Created {
// flush buffered events
ids[wid] = struct{}{}
for _, v := range pending[wid] {
mvcc.ReportEventReceived()
if err := sws.gRPCStream.Send(v); err != nil {
return
}
}
delete(pending, wid)
}
case <-progressTicker.C:
for id, ok := range sws.progress {
if ok {
sws.watchStream.RequestProgress(id)
}
sws.progress[id] = true
}
case <-sws.closec:
// drain the chan to clean up pending events
for range sws.watchStream.Chan() {
mvcc.ReportEventReceived()
}
for _, wrs := range pending {
for range wrs {
mvcc.ReportEventReceived()
}
}
}
}
}
func (sws *serverWatchStream) close() {
sws.watchStream.Close()
close(sws.closec)
close(sws.ctrlStream)
}
func (sws *serverWatchStream) newResponseHeader(rev int64) *pb.ResponseHeader {
return &pb.ResponseHeader{
ClusterId: uint64(sws.clusterID),
MemberId: uint64(sws.memberID),
Revision: rev,
RaftTerm: sws.raftTimer.Term(),
}
}
| dnaeon/etcd | etcdserver/api/v3rpc/watch.go | GO | apache-2.0 | 7,705 |
// This is a generated file. Not intended for manual editing.
package vektah.rust.psi;
import java.util.List;
import org.jetbrains.annotations.*;
import com.intellij.psi.PsiElement;
public interface RustExprClosure extends RustExpr {
@NotNull
RustClosureBody getClosureBody();
@NotNull
RustClosureExprArgs getClosureExprArgs();
}
| consulo/consulo-rust | gen/vektah/rust/psi/RustExprClosure.java | Java | apache-2.0 | 343 |
<?php
/**
* @group conduit
*/
final class ConduitAPI_paste_create_Method extends ConduitAPI_paste_Method {
public function getMethodDescription() {
return 'Create a new paste.';
}
public function defineParamTypes() {
return array(
'content' => 'required string',
'title' => 'optional string',
'language' => 'optional string',
);
}
public function defineReturnType() {
return 'nonempty dict';
}
public function defineErrorTypes() {
return array(
'ERR-NO-PASTE' => 'Paste may not be empty.',
);
}
protected function execute(ConduitAPIRequest $request) {
$content = $request->getValue('content');
$title = $request->getValue('title');
$language = $request->getValue('language');
if (!strlen($content)) {
throw new ConduitException('ERR-NO-PASTE');
}
$title = nonempty($title, 'Masterwork From Distant Lands');
$language = nonempty($language, '');
$user = $request->getUser();
$paste_file = PhabricatorFile::newFromFileData(
$content,
array(
'name' => $title,
'mime-type' => 'text/plain; charset=utf-8',
'authorPHID' => $user->getPHID(),
));
// TODO: This should use PhabricatorPasteEditor.
$paste = PhabricatorPaste::initializeNewPaste($user);
$paste->setTitle($title);
$paste->setLanguage($language);
$paste->setFilePHID($paste_file->getPHID());
$paste->save();
$paste_file->attachToObject($user, $paste->getPHID());
$paste->attachRawContent($content);
return $this->buildPasteInfoDictionary($paste);
}
}
| yangming85/phabricator | src/applications/paste/conduit/ConduitAPI_paste_create_Method.php | PHP | apache-2.0 | 1,630 |
/******************************************************************************
* WebJavin - Java Web Framework. *
* *
* Copyright (c) 2011 - Sergey "Frosman" Lukjanov, me@frostman.ru *
* *
* Licensed under the Apache License, Version 2.0 (the "License"); *
* you may not use this file except in compliance with the License. *
* You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
******************************************************************************/
package ru.frostman.web.indigo.openid;
import ru.frostman.web.controller.Controllers;
/**
* @author slukjanov aka Frostman
*/
public class OpenId {
public static String getAuthUrl(String openIdProvider, String targetUrl) {
return Controllers.url(OpenIdController.AUTH_REDIRECT_URL) + "?"
+ OpenIdController.PARAM_PROVIDER + "=" + openIdProvider + "&"
+ OpenIdController.PARAM_TARGET + "=" + targetUrl;
}
public static String getGoogleAuthUrl(String targetUrl) {
return getAuthUrl(OpenIdController.GOOGLE_ENDPOINT, targetUrl);
}
}
| Frostman/webjavin | webjavin-indigo/src/main/java/ru/frostman/web/indigo/openid/OpenId.java | Java | apache-2.0 | 1,963 |
/*
* Copyright 2016-2017 Florent Weber <florent.weber@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.amaze.bench.cluster.jms;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Throwables;
import com.typesafe.config.Config;
import io.amaze.bench.cluster.actor.ActorSender;
import io.amaze.bench.cluster.leader.LeaderClusterClientFactory;
import io.amaze.bench.cluster.leader.ResourceManagerClusterClient;
import io.amaze.bench.cluster.metric.MetricsRepository;
import io.amaze.bench.cluster.metric.MetricsRepositoryClusterClient;
import io.amaze.bench.cluster.registry.ActorRegistry;
import io.amaze.bench.cluster.registry.ActorRegistryClusterClient;
import io.amaze.bench.cluster.registry.AgentRegistry;
import io.amaze.bench.cluster.registry.AgentRegistryClusterClient;
import io.amaze.bench.shared.jms.*;
import javax.validation.constraints.NotNull;
import static com.google.common.base.Throwables.propagate;
import static java.util.Objects.requireNonNull;
/**
* Created on 10/23/16.
*/
public final class JMSLeaderClusterClientFactory implements LeaderClusterClientFactory {
private final JMSServer server;
private final JMSEndpoint serverEndpoint;
private final ActorRegistry actorRegistry;
private volatile JMSClient senderJmsClient;
public JMSLeaderClusterClientFactory(@NotNull final Config factoryConfig,
@NotNull final ActorRegistry actorRegistry) {
this.actorRegistry = requireNonNull(actorRegistry);
try {
this.serverEndpoint = new JMSEndpoint(factoryConfig);
this.server = new FFMQServer(serverEndpoint);
} catch (JMSException e) {
throw propagate(e);
}
}
@VisibleForTesting
public JMSLeaderClusterClientFactory(@NotNull final JMSServer server,
@NotNull final JMSEndpoint serverEndpoint,
@NotNull final ActorRegistry actorRegistry) {
this.actorRegistry = requireNonNull(actorRegistry);
this.serverEndpoint = requireNonNull(serverEndpoint);
this.server = requireNonNull(server);
}
@Override
public ActorSender actorSender() {
senderJmsClient = createJmsClient();
return new JMSActorSender(senderJmsClient);
}
@Override
public ResourceManagerClusterClient createForResourceManager() {
return new JMSResourceManagerClusterClient(server, serverEndpoint);
}
@Override
public MetricsRepositoryClusterClient createForMetricsRepository(@NotNull final MetricsRepository metricsRepository) {
requireNonNull(metricsRepository);
MetricsRepositoryClusterClient clusterClient = new JMSMetricsRepositoryClusterClient(serverEndpoint);
clusterClient.startMetricsListener(metricsRepository.createClusterListener());
return clusterClient;
}
@Override
public ActorRegistryClusterClient createForActorRegistry() {
ActorRegistryClusterClient registryClusterClient = new JMSActorRegistryClusterClient(serverEndpoint);
registryClusterClient.startRegistryListener(actorRegistry.createClusterListener());
return registryClusterClient;
}
@Override
public AgentRegistryClusterClient createForAgentRegistry(@NotNull final AgentRegistry agentRegistry) {
requireNonNull(agentRegistry);
AgentRegistryClusterClient agentRegistryClient = new JMSAgentRegistryClusterClient(serverEndpoint);
agentRegistryClient.startRegistryListener(agentRegistry.createClusterListener());
return agentRegistryClient;
}
@Override
public void close() {
if (senderJmsClient != null) {
senderJmsClient.close();
}
server.close();
}
private JMSClient createJmsClient() {
JMSClient client;
try {
client = new FFMQClient(serverEndpoint);
} catch (JMSException e) {
throw Throwables.propagate(e);
}
return client;
}
}
| florentw/bench | cluster-jms/src/main/java/io/amaze/bench/cluster/jms/JMSLeaderClusterClientFactory.java | Java | apache-2.0 | 4,603 |
// Copyright (C) 2012 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.httpd.restapi;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.math.RoundingMode.CEILING;
import static java.nio.charset.StandardCharsets.ISO_8859_1;
import static java.nio.charset.StandardCharsets.UTF_8;
import static javax.servlet.http.HttpServletResponse.SC_BAD_REQUEST;
import static javax.servlet.http.HttpServletResponse.SC_CONFLICT;
import static javax.servlet.http.HttpServletResponse.SC_CREATED;
import static javax.servlet.http.HttpServletResponse.SC_FORBIDDEN;
import static javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR;
import static javax.servlet.http.HttpServletResponse.SC_METHOD_NOT_ALLOWED;
import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND;
import static javax.servlet.http.HttpServletResponse.SC_NOT_MODIFIED;
import static javax.servlet.http.HttpServletResponse.SC_NO_CONTENT;
import static javax.servlet.http.HttpServletResponse.SC_OK;
import static javax.servlet.http.HttpServletResponse.SC_PRECONDITION_FAILED;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.Iterables;
import com.google.common.collect.LinkedHashMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
import com.google.common.io.BaseEncoding;
import com.google.common.math.IntMath;
import com.google.common.net.HttpHeaders;
import com.google.gerrit.audit.AuditService;
import com.google.gerrit.audit.HttpAuditEvent;
import com.google.gerrit.common.Nullable;
import com.google.gerrit.common.TimeUtil;
import com.google.gerrit.extensions.registration.DynamicItem;
import com.google.gerrit.extensions.registration.DynamicMap;
import com.google.gerrit.extensions.restapi.AcceptsCreate;
import com.google.gerrit.extensions.restapi.AcceptsDelete;
import com.google.gerrit.extensions.restapi.AcceptsPost;
import com.google.gerrit.extensions.restapi.AuthException;
import com.google.gerrit.extensions.restapi.BadRequestException;
import com.google.gerrit.extensions.restapi.BinaryResult;
import com.google.gerrit.extensions.restapi.CacheControl;
import com.google.gerrit.extensions.restapi.DefaultInput;
import com.google.gerrit.extensions.restapi.IdString;
import com.google.gerrit.extensions.restapi.MethodNotAllowedException;
import com.google.gerrit.extensions.restapi.PreconditionFailedException;
import com.google.gerrit.extensions.restapi.RawInput;
import com.google.gerrit.extensions.restapi.ResourceConflictException;
import com.google.gerrit.extensions.restapi.ResourceNotFoundException;
import com.google.gerrit.extensions.restapi.Response;
import com.google.gerrit.extensions.restapi.RestApiException;
import com.google.gerrit.extensions.restapi.RestCollection;
import com.google.gerrit.extensions.restapi.RestModifyView;
import com.google.gerrit.extensions.restapi.RestReadView;
import com.google.gerrit.extensions.restapi.RestResource;
import com.google.gerrit.extensions.restapi.RestView;
import com.google.gerrit.extensions.restapi.TopLevelResource;
import com.google.gerrit.extensions.restapi.UnprocessableEntityException;
import com.google.gerrit.httpd.WebSession;
import com.google.gerrit.server.AccessPath;
import com.google.gerrit.server.AnonymousUser;
import com.google.gerrit.server.CurrentUser;
import com.google.gerrit.server.OptionUtil;
import com.google.gerrit.server.OutputFormat;
import com.google.gerrit.server.account.CapabilityUtils;
import com.google.gerrit.util.http.RequestUtil;
import com.google.gson.ExclusionStrategy;
import com.google.gson.FieldAttributes;
import com.google.gson.FieldNamingPolicy;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonElement;
import com.google.gson.JsonParseException;
import com.google.gson.JsonPrimitive;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonToken;
import com.google.gson.stream.MalformedJsonException;
import com.google.gwtexpui.server.CacheHeaders;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.util.Providers;
import org.eclipse.jgit.util.TemporaryBuffer;
import org.eclipse.jgit.util.TemporaryBuffer.Heap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.sql.Timestamp;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.zip.GZIPOutputStream;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public class RestApiServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
private static final Logger log = LoggerFactory
.getLogger(RestApiServlet.class);
/** MIME type used for a JSON response body. */
private static final String JSON_TYPE = "application/json";
private static final String FORM_TYPE = "application/x-www-form-urlencoded";
/**
* Garbage prefix inserted before JSON output to prevent XSSI.
* <p>
* This prefix is ")]}'\n" and is designed to prevent a web browser from
* executing the response body if the resource URI were to be referenced using
* a <script src="...> HTML tag from another web site. Clients using the
* HTTP interface will need to always strip the first line of response data to
* remove this magic header.
*/
public static final byte[] JSON_MAGIC;
static {
JSON_MAGIC = ")]}'\n".getBytes(UTF_8);
}
public static class Globals {
final Provider<CurrentUser> currentUser;
final DynamicItem<WebSession> webSession;
final Provider<ParameterParser> paramParser;
final AuditService auditService;
@Inject
Globals(Provider<CurrentUser> currentUser,
DynamicItem<WebSession> webSession,
Provider<ParameterParser> paramParser,
AuditService auditService) {
this.currentUser = currentUser;
this.webSession = webSession;
this.paramParser = paramParser;
this.auditService = auditService;
}
}
private final Globals globals;
private final Provider<RestCollection<RestResource, RestResource>> members;
public RestApiServlet(Globals globals,
RestCollection<? extends RestResource, ? extends RestResource> members) {
this(globals, Providers.of(members));
}
public RestApiServlet(Globals globals,
Provider<? extends RestCollection<? extends RestResource, ? extends RestResource>> members) {
@SuppressWarnings("unchecked")
Provider<RestCollection<RestResource, RestResource>> n =
(Provider<RestCollection<RestResource, RestResource>>) checkNotNull((Object) members);
this.globals = globals;
this.members = n;
}
@Override
protected final void service(HttpServletRequest req, HttpServletResponse res)
throws ServletException, IOException {
long auditStartTs = TimeUtil.nowMs();
res.setHeader("Content-Disposition", "attachment");
res.setHeader("X-Content-Type-Options", "nosniff");
int status = SC_OK;
Object result = null;
Multimap<String, String> params = LinkedHashMultimap.create();
Object inputRequestBody = null;
try {
checkUserSession(req);
List<IdString> path = splitPath(req);
RestCollection<RestResource, RestResource> rc = members.get();
CapabilityUtils.checkRequiresCapability(globals.currentUser,
null, rc.getClass());
RestResource rsrc = TopLevelResource.INSTANCE;
ViewData viewData = new ViewData(null, null);
if (path.isEmpty()) {
if (isGetOrHead(req)) {
viewData = new ViewData(null, rc.list());
} else if (rc instanceof AcceptsPost && "POST".equals(req.getMethod())) {
@SuppressWarnings("unchecked")
AcceptsPost<RestResource> ac = (AcceptsPost<RestResource>) rc;
viewData = new ViewData(null, ac.post(rsrc));
} else {
throw new MethodNotAllowedException();
}
} else {
IdString id = path.remove(0);
try {
rsrc = rc.parse(rsrc, id);
if (path.isEmpty()) {
checkPreconditions(req);
}
} catch (ResourceNotFoundException e) {
if (rc instanceof AcceptsCreate
&& path.isEmpty()
&& ("POST".equals(req.getMethod())
|| "PUT".equals(req.getMethod()))) {
@SuppressWarnings("unchecked")
AcceptsCreate<RestResource> ac = (AcceptsCreate<RestResource>) rc;
viewData = new ViewData(null, ac.create(rsrc, id));
status = SC_CREATED;
} else {
throw e;
}
}
if (viewData.view == null) {
viewData = view(rsrc, rc, req.getMethod(), path);
}
}
checkRequiresCapability(viewData);
while (viewData.view instanceof RestCollection<?,?>) {
@SuppressWarnings("unchecked")
RestCollection<RestResource, RestResource> c =
(RestCollection<RestResource, RestResource>) viewData.view;
if (path.isEmpty()) {
if (isGetOrHead(req)) {
viewData = new ViewData(null, c.list());
} else if (c instanceof AcceptsPost && "POST".equals(req.getMethod())) {
@SuppressWarnings("unchecked")
AcceptsPost<RestResource> ac = (AcceptsPost<RestResource>) c;
viewData = new ViewData(null, ac.post(rsrc));
} else if (c instanceof AcceptsDelete && "DELETE".equals(req.getMethod())) {
@SuppressWarnings("unchecked")
AcceptsDelete<RestResource> ac = (AcceptsDelete<RestResource>) c;
viewData = new ViewData(null, ac.delete(rsrc, null));
} else {
throw new MethodNotAllowedException();
}
break;
} else {
IdString id = path.remove(0);
try {
rsrc = c.parse(rsrc, id);
checkPreconditions(req);
viewData = new ViewData(null, null);
} catch (ResourceNotFoundException e) {
if (c instanceof AcceptsCreate
&& path.isEmpty()
&& ("POST".equals(req.getMethod())
|| "PUT".equals(req.getMethod()))) {
@SuppressWarnings("unchecked")
AcceptsCreate<RestResource> ac = (AcceptsCreate<RestResource>) c;
viewData = new ViewData(viewData.pluginName, ac.create(rsrc, id));
status = SC_CREATED;
} else if (c instanceof AcceptsDelete
&& path.isEmpty()
&& "DELETE".equals(req.getMethod())) {
@SuppressWarnings("unchecked")
AcceptsDelete<RestResource> ac = (AcceptsDelete<RestResource>) c;
viewData = new ViewData(viewData.pluginName, ac.delete(rsrc, id));
status = SC_NO_CONTENT;
} else {
throw e;
}
}
if (viewData.view == null) {
viewData = view(rsrc, c, req.getMethod(), path);
}
}
checkRequiresCapability(viewData);
}
if (notModified(req, rsrc)) {
res.sendError(SC_NOT_MODIFIED);
return;
}
Multimap<String, String> config = LinkedHashMultimap.create();
ParameterParser.splitQueryString(req.getQueryString(), config, params);
if (!globals.paramParser.get().parse(viewData.view, params, req, res)) {
return;
}
if (viewData.view instanceof RestModifyView<?, ?>) {
@SuppressWarnings("unchecked")
RestModifyView<RestResource, Object> m =
(RestModifyView<RestResource, Object>) viewData.view;
inputRequestBody = parseRequest(req, inputType(m));
result = m.apply(rsrc, inputRequestBody);
} else if (viewData.view instanceof RestReadView<?>) {
result = ((RestReadView<RestResource>) viewData.view).apply(rsrc);
} else {
throw new ResourceNotFoundException();
}
if (result instanceof Response) {
@SuppressWarnings("rawtypes")
Response<?> r = (Response) result;
status = r.statusCode();
configureCaching(req, res, rsrc, r.caching());
} else if (result instanceof Response.Redirect) {
CacheHeaders.setNotCacheable(res);
res.sendRedirect(((Response.Redirect) result).location());
return;
} else {
CacheHeaders.setNotCacheable(res);
}
res.setStatus(status);
if (result != Response.none()) {
result = Response.unwrap(result);
if (result instanceof BinaryResult) {
replyBinaryResult(req, res, (BinaryResult) result);
} else {
replyJson(req, res, config, result);
}
}
} catch (MalformedJsonException e) {
replyError(req, res, status = SC_BAD_REQUEST,
"Invalid " + JSON_TYPE + " in request", e);
} catch (JsonParseException e) {
replyError(req, res, status = SC_BAD_REQUEST,
"Invalid " + JSON_TYPE + " in request", e);
} catch (BadRequestException e) {
replyError(req, res, status = SC_BAD_REQUEST, messageOr(e, "Bad Request"),
e.caching(), e);
} catch (AuthException e) {
replyError(req, res, status = SC_FORBIDDEN, messageOr(e, "Forbidden"),
e.caching(), e);
} catch (AmbiguousViewException e) {
replyError(req, res, status = SC_NOT_FOUND, messageOr(e, "Ambiguous"), e);
} catch (ResourceNotFoundException e) {
replyError(req, res, status = SC_NOT_FOUND, messageOr(e, "Not Found"),
e.caching(), e);
} catch (MethodNotAllowedException e) {
replyError(req, res, status = SC_METHOD_NOT_ALLOWED,
messageOr(e, "Method Not Allowed"), e.caching(), e);
} catch (ResourceConflictException e) {
replyError(req, res, status = SC_CONFLICT, messageOr(e, "Conflict"),
e.caching(), e);
} catch (PreconditionFailedException e) {
replyError(req, res, status = SC_PRECONDITION_FAILED,
messageOr(e, "Precondition Failed"), e.caching(), e);
} catch (UnprocessableEntityException e) {
replyError(req, res, status = 422, messageOr(e, "Unprocessable Entity"),
e.caching(), e);
} catch (Exception e) {
status = SC_INTERNAL_SERVER_ERROR;
handleException(e, req, res);
} finally {
globals.auditService.dispatch(new HttpAuditEvent(globals.webSession.get()
.getSessionId(), globals.currentUser.get(), req.getRequestURI(),
auditStartTs, params, req.getMethod(), inputRequestBody, status,
result));
}
}
private static String messageOr(Throwable t, String defaultMessage) {
if (!Strings.isNullOrEmpty(t.getMessage())) {
return t.getMessage();
}
return defaultMessage;
}
private static boolean notModified(HttpServletRequest req, RestResource rsrc) {
if (!isGetOrHead(req)) {
return false;
}
if (rsrc instanceof RestResource.HasETag) {
String have = req.getHeader(HttpHeaders.IF_NONE_MATCH);
if (have != null) {
return have.equals(((RestResource.HasETag) rsrc).getETag());
}
}
if (rsrc instanceof RestResource.HasLastModified) {
Timestamp m = ((RestResource.HasLastModified) rsrc).getLastModified();
long d = req.getDateHeader(HttpHeaders.IF_MODIFIED_SINCE);
// HTTP times are in seconds, database may have millisecond precision.
return d / 1000L == m.getTime() / 1000L;
}
return false;
}
private static <T> void configureCaching(HttpServletRequest req,
HttpServletResponse res, RestResource rsrc, CacheControl c) {
if (isGetOrHead(req)) {
switch (c.getType()) {
case NONE:
default:
CacheHeaders.setNotCacheable(res);
break;
case PRIVATE:
addResourceStateHeaders(res, rsrc);
CacheHeaders.setCacheablePrivate(res,
c.getAge(), c.getUnit(),
c.isMustRevalidate());
break;
case PUBLIC:
addResourceStateHeaders(res, rsrc);
CacheHeaders.setCacheable(req, res,
c.getAge(), c.getUnit(),
c.isMustRevalidate());
break;
}
} else {
CacheHeaders.setNotCacheable(res);
}
}
private static void addResourceStateHeaders(
HttpServletResponse res, RestResource rsrc) {
if (rsrc instanceof RestResource.HasETag) {
res.setHeader(
HttpHeaders.ETAG,
((RestResource.HasETag) rsrc).getETag());
}
if (rsrc instanceof RestResource.HasLastModified) {
res.setDateHeader(
HttpHeaders.LAST_MODIFIED,
((RestResource.HasLastModified) rsrc).getLastModified().getTime());
}
}
private void checkPreconditions(HttpServletRequest req)
throws PreconditionFailedException {
if ("*".equals(req.getHeader("If-None-Match"))) {
throw new PreconditionFailedException("Resource already exists");
}
}
private static Type inputType(RestModifyView<RestResource, Object> m) {
Type inputType = extractInputType(m.getClass());
if (inputType == null) {
throw new IllegalStateException(String.format(
"View %s does not correctly implement %s",
m.getClass(), RestModifyView.class.getSimpleName()));
}
return inputType;
}
@SuppressWarnings("rawtypes")
private static Type extractInputType(Class clazz) {
for (Type t : clazz.getGenericInterfaces()) {
if (t instanceof ParameterizedType
&& ((ParameterizedType) t).getRawType() == RestModifyView.class) {
return ((ParameterizedType) t).getActualTypeArguments()[1];
}
}
if (clazz.getSuperclass() != null) {
Type i = extractInputType(clazz.getSuperclass());
if (i != null) {
return i;
}
}
for (Class t : clazz.getInterfaces()) {
Type i = extractInputType(t);
if (i != null) {
return i;
}
}
return null;
}
private Object parseRequest(HttpServletRequest req, Type type)
throws IOException, BadRequestException, SecurityException,
IllegalArgumentException, NoSuchMethodException, IllegalAccessException,
InstantiationException, InvocationTargetException, MethodNotAllowedException {
if (isType(JSON_TYPE, req.getContentType())) {
BufferedReader br = req.getReader();
try {
JsonReader json = new JsonReader(br);
json.setLenient(true);
JsonToken first;
try {
first = json.peek();
} catch (EOFException e) {
throw new BadRequestException("Expected JSON object");
}
if (first == JsonToken.STRING) {
return parseString(json.nextString(), type);
}
return OutputFormat.JSON.newGson().fromJson(json, type);
} finally {
br.close();
}
} else if (("PUT".equals(req.getMethod()) || "POST".equals(req.getMethod()))
&& acceptsRawInput(type)) {
return parseRawInput(req, type);
} else if ("DELETE".equals(req.getMethod()) && hasNoBody(req)) {
return null;
} else if (hasNoBody(req)) {
return createInstance(type);
} else if (isType("text/plain", req.getContentType())) {
BufferedReader br = req.getReader();
try {
char[] tmp = new char[256];
StringBuilder sb = new StringBuilder();
int n;
while (0 < (n = br.read(tmp))) {
sb.append(tmp, 0, n);
}
return parseString(sb.toString(), type);
} finally {
br.close();
}
} else if ("POST".equals(req.getMethod())
&& isType(FORM_TYPE, req.getContentType())) {
return OutputFormat.JSON.newGson().fromJson(
ParameterParser.formToJson(req),
type);
} else {
throw new BadRequestException("Expected Content-Type: " + JSON_TYPE);
}
}
private static boolean hasNoBody(HttpServletRequest req) {
int len = req.getContentLength();
String type = req.getContentType();
return (len <= 0 && type == null)
|| (len == 0 && isType(FORM_TYPE, type));
}
@SuppressWarnings("rawtypes")
private static boolean acceptsRawInput(Type type) {
if (type instanceof Class) {
for (Field f : ((Class) type).getDeclaredFields()) {
if (f.getType() == RawInput.class) {
return true;
}
}
}
return false;
}
private Object parseRawInput(final HttpServletRequest req, Type type)
throws SecurityException, NoSuchMethodException,
IllegalArgumentException, InstantiationException, IllegalAccessException,
InvocationTargetException, MethodNotAllowedException {
Object obj = createInstance(type);
for (Field f : obj.getClass().getDeclaredFields()) {
if (f.getType() == RawInput.class) {
f.setAccessible(true);
f.set(obj, new RawInput() {
@Override
public String getContentType() {
return req.getContentType();
}
@Override
public long getContentLength() {
return req.getContentLength();
}
@Override
public InputStream getInputStream() throws IOException {
return req.getInputStream();
}
});
return obj;
}
}
throw new MethodNotAllowedException();
}
private Object parseString(String value, Type type)
throws BadRequestException, SecurityException, NoSuchMethodException,
IllegalArgumentException, IllegalAccessException, InstantiationException,
InvocationTargetException {
if (type == String.class) {
return value;
}
Object obj = createInstance(type);
Field[] fields = obj.getClass().getDeclaredFields();
if (fields.length == 0 && Strings.isNullOrEmpty(value)) {
return obj;
}
for (Field f : fields) {
if (f.getAnnotation(DefaultInput.class) != null
&& f.getType() == String.class) {
f.setAccessible(true);
f.set(obj, value);
return obj;
}
}
throw new BadRequestException("Expected JSON object");
}
private static Object createInstance(Type type)
throws NoSuchMethodException, InstantiationException,
IllegalAccessException, InvocationTargetException {
if (type instanceof Class) {
@SuppressWarnings("unchecked")
Class<Object> clazz = (Class<Object>) type;
Constructor<Object> c = clazz.getDeclaredConstructor();
c.setAccessible(true);
return c.newInstance();
}
throw new InstantiationException("Cannot make " + type);
}
public static void replyJson(@Nullable HttpServletRequest req,
HttpServletResponse res,
Multimap<String, String> config,
Object result)
throws IOException {
TemporaryBuffer.Heap buf = heap(Integer.MAX_VALUE);
buf.write(JSON_MAGIC);
Writer w = new BufferedWriter(new OutputStreamWriter(buf, UTF_8));
Gson gson = newGson(config, req);
if (result instanceof JsonElement) {
gson.toJson((JsonElement) result, w);
} else {
gson.toJson(result, w);
}
w.write('\n');
w.flush();
replyBinaryResult(req, res, asBinaryResult(buf)
.setContentType(JSON_TYPE)
.setCharacterEncoding(UTF_8.name()));
}
private static Gson newGson(Multimap<String, String> config,
@Nullable HttpServletRequest req) {
GsonBuilder gb = OutputFormat.JSON_COMPACT.newGsonBuilder();
enablePrettyPrint(gb, config, req);
enablePartialGetFields(gb, config);
return gb.create();
}
private static void enablePrettyPrint(GsonBuilder gb,
Multimap<String, String> config,
@Nullable HttpServletRequest req) {
String pp = Iterables.getFirst(config.get("pp"), null);
if (pp == null) {
pp = Iterables.getFirst(config.get("prettyPrint"), null);
if (pp == null && req != null) {
pp = acceptsJson(req) ? "0" : "1";
}
}
if ("1".equals(pp) || "true".equals(pp)) {
gb.setPrettyPrinting();
}
}
private static void enablePartialGetFields(GsonBuilder gb,
Multimap<String, String> config) {
final Set<String> want = Sets.newHashSet();
for (String p : config.get("fields")) {
Iterables.addAll(want, OptionUtil.splitOptionValue(p));
}
if (!want.isEmpty()) {
gb.addSerializationExclusionStrategy(new ExclusionStrategy() {
private final Map<String, String> names = Maps.newHashMap();
@Override
public boolean shouldSkipField(FieldAttributes field) {
String name = names.get(field.getName());
if (name == null) {
// Names are supplied by Gson in terms of Java source.
// Translate and cache the JSON lower_case_style used.
try {
name =
FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES.translateName(//
field.getDeclaringClass().getDeclaredField(field.getName()));
names.put(field.getName(), name);
} catch (SecurityException e) {
return true;
} catch (NoSuchFieldException e) {
return true;
}
}
return !want.contains(name);
}
@Override
public boolean shouldSkipClass(Class<?> clazz) {
return false;
}
});
}
}
static void replyBinaryResult(
@Nullable HttpServletRequest req,
HttpServletResponse res,
BinaryResult bin) throws IOException {
final BinaryResult appResult = bin;
try {
if (bin.getAttachmentName() != null) {
res.setHeader(
"Content-Disposition",
"attachment; filename=\"" + bin.getAttachmentName() + "\"");
}
if (bin.isBase64()) {
bin = stackBase64(res, bin);
}
if (bin.canGzip() && acceptsGzip(req)) {
bin = stackGzip(res, bin);
}
res.setContentType(bin.getContentType());
long len = bin.getContentLength();
if (0 <= len && len < Integer.MAX_VALUE) {
res.setContentLength((int) len);
} else if (0 <= len) {
res.setHeader("Content-Length", Long.toString(len));
}
if (req == null || !"HEAD".equals(req.getMethod())) {
OutputStream dst = res.getOutputStream();
try {
bin.writeTo(dst);
} finally {
dst.close();
}
}
} finally {
appResult.close();
}
}
private static BinaryResult stackBase64(HttpServletResponse res,
final BinaryResult src) throws IOException {
BinaryResult b64;
long len = src.getContentLength();
if (0 <= len && len <= (7 << 20)) {
b64 = base64(src);
} else {
b64 = new BinaryResult() {
@Override
public void writeTo(OutputStream out) throws IOException {
OutputStream e = BaseEncoding.base64().encodingStream(
new OutputStreamWriter(out, ISO_8859_1));
src.writeTo(e);
e.flush();
}
};
}
res.setHeader("X-FYI-Content-Encoding", "base64");
res.setHeader("X-FYI-Content-Type", src.getContentType());
return b64.setContentType("text/plain").setCharacterEncoding("ISO-8859-1");
}
private static BinaryResult stackGzip(HttpServletResponse res,
final BinaryResult src) throws IOException {
BinaryResult gz;
long len = src.getContentLength();
if (256 <= len && len <= (10 << 20)) {
gz = compress(src);
if (len <= gz.getContentLength()) {
return src;
}
} else {
gz = new BinaryResult() {
@Override
public void writeTo(OutputStream out) throws IOException {
GZIPOutputStream gz = new GZIPOutputStream(out);
src.writeTo(gz);
gz.finish();
gz.flush();
}
};
}
res.setHeader("Content-Encoding", "gzip");
return gz.setContentType(src.getContentType());
}
private ViewData view(
RestResource rsrc,
RestCollection<RestResource, RestResource> rc,
String method, List<IdString> path) throws AmbiguousViewException,
RestApiException {
DynamicMap<RestView<RestResource>> views = rc.views();
final IdString projection = path.isEmpty()
? IdString.fromUrl("/")
: path.remove(0);
if (!path.isEmpty()) {
// If there are path components still remaining after this projection
// is chosen, look for the projection based upon GET as the method as
// the client thinks it is a nested collection.
method = "GET";
} else if ("HEAD".equals(method)) {
method = "GET";
}
List<String> p = splitProjection(projection);
if (p.size() == 2) {
String viewname = p.get(1);
if (Strings.isNullOrEmpty(viewname)) {
viewname = "/";
}
RestView<RestResource> view =
views.get(p.get(0), method + "." + viewname);
if (view != null) {
return new ViewData(p.get(0), view);
}
view = views.get(p.get(0), "GET." + viewname);
if (view != null) {
if (view instanceof AcceptsPost && "POST".equals(method)) {
@SuppressWarnings("unchecked")
AcceptsPost<RestResource> ap = (AcceptsPost<RestResource>) view;
return new ViewData(p.get(0), ap.post(rsrc));
}
}
throw new ResourceNotFoundException(projection);
}
String name = method + "." + p.get(0);
RestView<RestResource> core = views.get("gerrit", name);
if (core != null) {
return new ViewData(null, core);
} else {
core = views.get("gerrit", "GET." + p.get(0));
if (core instanceof AcceptsPost && "POST".equals(method)) {
@SuppressWarnings("unchecked")
AcceptsPost<RestResource> ap = (AcceptsPost<RestResource>) core;
return new ViewData(null, ap.post(rsrc));
}
}
Map<String, RestView<RestResource>> r = Maps.newTreeMap();
for (String plugin : views.plugins()) {
RestView<RestResource> action = views.get(plugin, name);
if (action != null) {
r.put(plugin, action);
}
}
if (r.size() == 1) {
Map.Entry<String, RestView<RestResource>> entry =
Iterables.getOnlyElement(r.entrySet());
return new ViewData(entry.getKey(), entry.getValue());
} else if (r.isEmpty()) {
throw new ResourceNotFoundException(projection);
} else {
throw new AmbiguousViewException(String.format(
"Projection %s is ambiguous: %s",
name,
Joiner.on(", ").join(
Iterables.transform(r.keySet(), new Function<String, String>() {
@Override
public String apply(String in) {
return in + "~" + projection;
}
}))));
}
}
private static List<IdString> splitPath(HttpServletRequest req) {
String path = RequestUtil.getEncodedPathInfo(req);
if (Strings.isNullOrEmpty(path)) {
return Collections.emptyList();
}
List<IdString> out = Lists.newArrayList();
for (String p : Splitter.on('/').split(path)) {
out.add(IdString.fromUrl(p));
}
if (out.size() > 0 && out.get(out.size() - 1).isEmpty()) {
out.remove(out.size() - 1);
}
return out;
}
private static List<String> splitProjection(IdString projection) {
List<String> p = Lists.newArrayListWithCapacity(2);
Iterables.addAll(p, Splitter.on('~').limit(2).split(projection.get()));
return p;
}
private void checkUserSession(HttpServletRequest req)
throws AuthException {
CurrentUser user = globals.currentUser.get();
if (isStateChange(req)) {
if (user instanceof AnonymousUser) {
throw new AuthException("Authentication required");
} else if (!globals.webSession.get().isAccessPathOk(AccessPath.REST_API)) {
throw new AuthException("Invalid authentication method. In order to authenticate, prefix the REST endpoint URL with /a/ (e.g. http://example.com/a/projects/).");
}
}
user.setAccessPath(AccessPath.REST_API);
}
private static boolean isGetOrHead(HttpServletRequest req) {
return "GET".equals(req.getMethod()) || "HEAD".equals(req.getMethod());
}
private static boolean isStateChange(HttpServletRequest req) {
return !isGetOrHead(req);
}
private void checkRequiresCapability(ViewData viewData) throws AuthException {
CapabilityUtils.checkRequiresCapability(globals.currentUser,
viewData.pluginName, viewData.view.getClass());
}
private static void handleException(Throwable err, HttpServletRequest req,
HttpServletResponse res) throws IOException {
String uri = req.getRequestURI();
if (!Strings.isNullOrEmpty(req.getQueryString())) {
uri += "?" + req.getQueryString();
}
log.error(String.format("Error in %s %s", req.getMethod(), uri), err);
if (!res.isCommitted()) {
res.reset();
replyError(req, res, SC_INTERNAL_SERVER_ERROR, "Internal server error", err);
}
}
public static void replyError(HttpServletRequest req, HttpServletResponse res,
int statusCode, String msg, @Nullable Throwable err) throws IOException {
replyError(req, res, statusCode, msg, CacheControl.NONE, err);
}
public static void replyError(HttpServletRequest req,
HttpServletResponse res, int statusCode, String msg,
CacheControl c, @Nullable Throwable err) throws IOException {
if (err != null) {
RequestUtil.setErrorTraceAttribute(req, err);
}
configureCaching(req, res, null, c);
res.setStatus(statusCode);
replyText(req, res, msg);
}
static void replyText(@Nullable HttpServletRequest req,
HttpServletResponse res, String text) throws IOException {
if ((req == null || isGetOrHead(req)) && isMaybeHTML(text)) {
replyJson(req, res, ImmutableMultimap.of("pp", "0"), new JsonPrimitive(text));
} else {
if (!text.endsWith("\n")) {
text += "\n";
}
replyBinaryResult(req, res,
BinaryResult.create(text).setContentType("text/plain"));
}
}
private static final Pattern IS_HTML = Pattern.compile("[<&]");
private static boolean isMaybeHTML(String text) {
return IS_HTML.matcher(text).find();
}
private static boolean acceptsJson(HttpServletRequest req) {
return req != null && isType(JSON_TYPE, req.getHeader(HttpHeaders.ACCEPT));
}
private static boolean acceptsGzip(HttpServletRequest req) {
if (req != null) {
String accepts = req.getHeader(HttpHeaders.ACCEPT_ENCODING);
return accepts != null && accepts.contains("gzip");
}
return false;
}
private static boolean isType(String expect, String given) {
if (given == null) {
return false;
} else if (expect.equals(given)) {
return true;
} else if (given.startsWith(expect + ",")) {
return true;
}
for (String p : given.split("[ ,;][ ,;]*")) {
if (expect.equals(p)) {
return true;
}
}
return false;
}
private static BinaryResult base64(BinaryResult bin)
throws IOException {
int max = 4 * IntMath.divide((int) bin.getContentLength(), 3, CEILING);
TemporaryBuffer.Heap buf = heap(max);
OutputStream encoded = BaseEncoding.base64().encodingStream(
new OutputStreamWriter(buf, ISO_8859_1));
bin.writeTo(encoded);
encoded.close();
return asBinaryResult(buf);
}
private static BinaryResult compress(BinaryResult bin)
throws IOException {
TemporaryBuffer.Heap buf = heap(20 << 20);
GZIPOutputStream gz = new GZIPOutputStream(buf);
bin.writeTo(gz);
gz.close();
return asBinaryResult(buf).setContentType(bin.getContentType());
}
@SuppressWarnings("resource")
private static BinaryResult asBinaryResult(final TemporaryBuffer.Heap buf) {
return new BinaryResult() {
@Override
public void writeTo(OutputStream os) throws IOException {
buf.writeTo(os, null);
}
}.setContentLength(buf.length());
}
private static Heap heap(int max) {
return new TemporaryBuffer.Heap(max);
}
@SuppressWarnings("serial")
private static class AmbiguousViewException extends Exception {
AmbiguousViewException(String message) {
super(message);
}
}
private static class ViewData {
String pluginName;
RestView<RestResource> view;
ViewData(String pluginName, RestView<RestResource> view) {
this.pluginName = pluginName;
this.view = view;
}
}
}
| Overruler/gerrit | gerrit-httpd/src/main/java/com/google/gerrit/httpd/restapi/RestApiServlet.java | Java | apache-2.0 | 37,717 |
/*
* Copyright (c) 2014 TIKINOU LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mythtv.services.api.v027.beans;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* <b>Auto-generated file, do not modify manually !!!!</b>
*
* @author Sebastien Astie
*/
@JsonIgnoreProperties( ignoreUnknown = true )
public class TitleInfo {
@JsonProperty( "Title" )
private String title;
@JsonProperty( "Inetref" )
private String inetref;
/**
* @return the title
*/
public String getTitle() {
return title;
}
/**
* @param title the title to set
*/
public void setTitle( String title ) {
this.title = title;
}
/**
* @return the inetref
*/
public String getInetref() {
return inetref;
}
/**
* @param inetref the inetref to set
*/
public void setInetref( String inetref ) {
this.inetref = inetref;
}
} | MythTV-Clients/MythTV-Service-API | src/main/java/org/mythtv/services/api/v027/beans/TitleInfo.java | Java | apache-2.0 | 1,693 |
/* globals describe, it, expect, hot, cold, expectObservable, expectSubscriptions */
var Rx = require('../../dist/cjs/Rx.KitchenSink');
var Observable = Rx.Observable;
describe('Observable.prototype.windowCount', function () {
it('should emit windows with count 2 and skip 1', function () {
var source = hot('^-a--b--c--d--|');
var subs = '^ !';
var expected = 'u-v--x--y--z--|';
var u = cold( '--a--(b|) ');
var v = cold( '---b--(c|) ');
var x = cold( '---c--(d|)');
var y = cold( '---d--|');
var z = cold( '---|');
var values = { u: u, v: v, x: x, y: y, z: z };
var result = source.windowCount(2, 1);
expectObservable(result).toBe(expected, values);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should emit windows with count 2, and skip unspecified', function () {
var source = hot('--a--b--c--d--e--f--|');
var subs = '^ !';
var expected = 'x----y-----z-----w--|';
var x = cold( '--a--(b|) ');
var y = cold( '---c--(d|) ');
var z = cold( '---e--(f|)');
var w = cold( '---|');
var values = { x: x, y: y, z: z, w: w };
var result = source.windowCount(2);
expectObservable(result).toBe(expected, values);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should return empty if source is empty', function () {
var source = cold('|');
var subs = '(^!)';
var expected = '(w|)';
var w = cold('|');
var values = { w: w };
var result = source.windowCount(2, 1);
expectObservable(result).toBe(expected, values);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should return Never if source if Never', function () {
var source = cold('-');
var subs = '^';
var expected = 'w';
var w = cold('-');
var expectedValues = { w: w };
var result = source.windowCount(2, 1);
expectObservable(result).toBe(expected, expectedValues);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should propagate error from a just-throw source', function () {
var source = cold('#');
var subs = '(^!)';
var expected = '(w#)';
var w = cold('#');
var expectedValues = { w: w };
var result = source.windowCount(2, 1);
expectObservable(result).toBe(expected, expectedValues);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should raise error if source raises error', function () {
var source = hot('--a--b--c--d--e--f--#');
var subs = '^ !';
var expected = 'u-v--w--x--y--z--q--#';
var u = cold( '--a--b--(c|) ');
var v = cold( '---b--c--(d|) ');
var w = cold( '---c--d--(e|) ');
var x = cold( '---d--e--(f|)');
var y = cold( '---e--f--#');
var z = cold( '---f--#');
var q = cold( '---#');
var values = { u: u, v: v, w: w, x: x, y: y, z: z, q: q };
var result = source.windowCount(3, 1);
expectObservable(result).toBe(expected, values);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should dispose of inner windows once outer is unsubscribed early', function () {
var source = hot('^-a--b--c--d--|');
var subs = '^ ! ';
var expected = 'w-x--y--z- ';
var w = cold( '--a--(b|) ');
var x = cold( '---b--(c|) ');
var y = cold( '---c- ');
var z = cold( '-- ');
var unsub = ' ! ';
var values = { w: w, x: x, y: y, z: z };
var result = source.windowCount(2, 1);
expectObservable(result, unsub).toBe(expected, values);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should dispose window Subjects if the outer is unsubscribed early', function () {
var source = hot('--a--b--c--d--e--f--g--h--|');
var sourceSubs = '^ ! ';
var expected = 'x--------- ';
var x = cold( '--a--b--c- ');
var unsub = ' ! ';
var late = time('---------------| ');
var values = { x: x };
var window;
var result = source.windowCount(10, 10)
.do(function (w) { window = w; });
expectObservable(result, unsub).toBe(expected, values);
expectSubscriptions(source.subscriptions).toBe(sourceSubs);
rxTestScheduler.schedule(function () {
expect(function () {
window.subscribe();
}).toThrowError('Cannot subscribe to a disposed Subject.');
}, late);
});
it('should not break unsubscription chains when result is unsubscribed explicitly', function () {
var source = hot('^-a--b--c--d--|');
var subs = '^ ! ';
var expected = 'w-x--y--z- ';
var w = cold( '--a--(b|) ');
var x = cold( '---b--(c|) ');
var y = cold( '---c- ');
var z = cold( '-- ');
var unsub = ' ! ';
var values = { w: w, x: x, y: y, z: z };
var result = source
.mergeMap(function (i) { return Observable.of(i); })
.windowCount(2, 1)
.mergeMap(function (i) { return Observable.of(i); });
expectObservable(result, unsub).toBe(expected, values);
expectSubscriptions(source.subscriptions).toBe(subs);
});
}); | justinwoo/RxJS | spec/operators/windowCount-spec.js | JavaScript | apache-2.0 | 5,581 |
/*
* This file is open source software, licensed to you under the terms
* of the Apache License, Version 2.0 (the "License"). See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. You may not use this file except in compliance with the License.
*
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Copyright (C) 2014 Cloudius Systems, Ltd.
*/
#pragma once
#include <memory>
#include <seastar/net/config.hh>
#include <seastar/net/net.hh>
#include <seastar/core/sstring.hh>
#include <seastar/util/program-options.hh>
namespace seastar {
namespace net {
/// DPDK configuration.
struct dpdk_options : public program_options::option_group {
/// DPDK Port Index.
///
/// Default: 0.
program_options::value<unsigned> dpdk_port_index;
/// \brief Enable HW Flow Control (on / off).
///
/// Default: \p on.
program_options::value<std::string> hw_fc;
/// \cond internal
dpdk_options(program_options::option_group* parent_group);
/// \endcond
};
}
/// \cond internal
#ifdef SEASTAR_HAVE_DPDK
std::unique_ptr<net::device> create_dpdk_net_device(
uint16_t port_idx = 0,
uint16_t num_queues = 1,
bool use_lro = true,
bool enable_fc = true);
std::unique_ptr<net::device> create_dpdk_net_device(
const net::hw_config& hw_cfg);
namespace dpdk {
/**
* @return Number of bytes needed for mempool objects of each QP.
*/
uint32_t qp_mempool_obj_size(bool hugetlbfs_membackend);
}
/// \endcond
#endif // SEASTAR_HAVE_DPDK
}
| scylladb/seastar | include/seastar/net/dpdk.hh | C++ | apache-2.0 | 2,086 |
package ec2
import (
"encoding/base64"
"encoding/xml"
"fmt"
"net/url"
"strconv"
"strings"
"github.com/dynport/gocloud/aws"
)
func NewFromEnv() *Client {
return &Client{
aws.NewFromEnv(),
}
}
type Client struct {
*aws.Client
}
func (client *Client) Endpoint() string {
prefix := "https://"
if client.Client.Region != "" {
prefix += client.Client.Region + "."
}
return prefix + "ec2.amazonaws.com"
}
const (
API_VERSIONS_EC2 = "2013-08-15"
CANONICAL_OWNER_ID = "099720109477"
SELF_OWNER_ID = "self"
UBUNTU_ALL = "ubuntu/images/*"
UBUNTU_PREFIX = "ubuntu-*"
UBUNTU_RARING_PREFIX = "ubuntu-raring*"
UBUNTU_TRUSTY_PREFIX = "ubuntu-trusty*"
UBUNTU_SAUCY_PREFIX = "ubuntu-saucy*"
ImagePrefixRaringAmd64 = "ubuntu-raring-13.04-amd64*"
)
type ImageFilter struct {
Owner string
Name string
ImageIds []string
}
type ImageList []*Image
type InstanceList []*Instance
func (list ImageList) Len() int {
return len(list)
}
func (list ImageList) Swap(a, b int) {
list[a], list[b] = list[b], list[a]
}
func (list ImageList) Less(a, b int) bool {
return list[a].Name > list[b].Name
}
type RunInstancesConfig struct {
ImageId string `json:",omitempty"`
MinCount int `json:",omitempty"`
MaxCount int `json:",omitempty"`
InstanceType string `json:",omitempty"`
AvailabilityZone string `json:",omitempty"`
KeyName string `json:",omitempty"`
SecurityGroups []string `json:",omitempty"`
SubnetId string `json:",omitempty"`
NetworkInterfaces []*CreateNetworkInterface `json:",omitempty"`
BlockDeviceMappings []*BlockDeviceMapping `json:",omitempty"`
UserData string `json:",omitempty"`
IamInstanceProfileName string `json:",omitempty"`
EbsOptimized bool `json:",omitempty"`
}
func (config *RunInstancesConfig) Values() (url.Values, error) {
values := url.Values{}
if config.MinCount == 0 {
config.MinCount = 1
}
if config.MaxCount == 0 {
config.MaxCount = 1
}
if config.ImageId == "" {
return nil, fmt.Errorf("ImageId must be provided")
}
values.Add("MinCount", strconv.Itoa(config.MinCount))
values.Add("MaxCount", strconv.Itoa(config.MaxCount))
values.Add("ImageId", config.ImageId)
if config.EbsOptimized {
values.Add("EbsOptimized", "true")
}
if config.UserData != "" {
values.Add("UserData", b64.EncodeToString([]byte(config.UserData)))
}
if config.IamInstanceProfileName != "" {
values.Add("IamInstanceProfile.Name", config.IamInstanceProfileName)
}
if config.InstanceType != "" {
values.Add("InstanceType", config.InstanceType)
}
if config.KeyName != "" {
values.Add("KeyName", config.KeyName)
}
if config.AvailabilityZone != "" {
values.Add("Placement.AvailabilityZone", config.AvailabilityZone)
}
if len(config.NetworkInterfaces) > 0 {
for i, nic := range config.NetworkInterfaces {
idx := strconv.Itoa(i)
values.Add("NetworkInterface."+idx+".DeviceIndex", idx)
values.Add("NetworkInterface."+idx+".AssociatePublicIpAddress", "true")
values.Add("NetworkInterface."+idx+".SubnetId", nic.SubnetId)
for i, sg := range nic.SecurityGroupIds {
values.Add("NetworkInterface."+idx+".SecurityGroupId."+strconv.Itoa(i), sg)
}
}
} else {
for i, sg := range config.SecurityGroups {
values.Add("SecurityGroupId."+strconv.Itoa(i+1), sg)
}
values.Add("SubnetId", config.SubnetId)
}
for i, bdm := range config.BlockDeviceMappings {
prefix := fmt.Sprintf("BlockDeviceMapping.%d", i)
if bdm.DeviceName == "" {
return nil, fmt.Errorf("DeviceName must be set for all BlockDeviceMappings")
}
values.Add(prefix+".DeviceName", bdm.DeviceName)
if ebs := bdm.Ebs; ebs != nil {
prefix := prefix + ".Ebs"
if ebs.VolumeSize > 0 {
values.Add(prefix+".VolumeSize", strconv.Itoa(ebs.VolumeSize))
}
if ebs.Iops > 0 {
values.Add(prefix+".Iops", strconv.Itoa(ebs.Iops))
}
if ebs.DeleteOnTermination {
values.Add(prefix+".DeleteOnTermination", "true")
}
if ebs.Encrypted {
values.Add(prefix+".Encrypted", "true")
}
if ebs.SnapshotId != "" {
values.Add(prefix+".SnapshotId", ebs.SnapshotId)
}
if ebs.VolumeType != "" {
values.Add(prefix+".VolumeType", ebs.VolumeType)
}
}
}
return values, nil
}
func (config *RunInstancesConfig) AddPublicIp() error {
if config.SubnetId == "" {
return fmt.Errorf("SubnetId must be set")
}
nic := &CreateNetworkInterface{
DeviceIndex: len(config.NetworkInterfaces), AssociatePublicIpAddress: true, SubnetId: config.SubnetId,
SecurityGroupIds: config.SecurityGroups,
}
config.NetworkInterfaces = []*CreateNetworkInterface{nic}
return nil
}
func queryForAction(action string) string {
values := &url.Values{}
values.Add("Version", API_VERSIONS_EC2)
values.Add("Action", action)
return values.Encode()
}
func (client *Client) DescribeTags() (tags TagList, e error) {
query := queryForAction("DescribeTags")
raw, e := client.DoSignedRequest("GET", client.Endpoint(), query, nil)
if e != nil {
return tags, e
}
rsp := &DescribeTagsResponse{}
e = xml.Unmarshal(raw.Content, rsp)
if e != nil {
return tags, e
}
return rsp.Tags, e
}
func (client *Client) CreateTags(resourceIds []string, tags map[string]string) error {
values := &url.Values{}
for i, id := range resourceIds {
values.Add("ResourceId."+strconv.Itoa(i), id)
}
tagsCount := 1
for k, v := range tags {
prefix := fmt.Sprintf("Tag.%d.", tagsCount)
values.Add(prefix+"Key", k)
values.Add(prefix+"Value", v)
tagsCount++
}
query := queryForAction("CreateTags") + "&" + values.Encode()
_, e := client.DoSignedRequest("POST", client.Endpoint(), query, nil)
if e != nil {
return e
}
return nil
}
func (client *Client) TerminateInstances(ids []string) (*aws.Response, error) {
query := queryForAction("TerminateInstances")
for i, id := range ids {
query += fmt.Sprintf("&InstanceId.%d=%s", i, id)
}
return client.DoSignedRequest("DELETE", client.Endpoint(), query, nil)
}
type Error struct {
Code string `xml:"Code"`
Message string `xml:"Message"`
}
type ErrorResponse struct {
XMLName xml.Name `xml:"Response"`
RequestID string `xml:"RequestID"`
Errors []*Error `xml:"Errors>Error"`
}
func (er *ErrorResponse) ErrorStrings() string {
out := []string{}
for _, e := range er.Errors {
out = append(out, fmt.Sprintf("%s: %s", e.Code, e.Message))
}
return strings.Join(out, ", ")
}
type RunInstancesResponse struct {
XMLName xml.Name `xml:"RunInstancesResponse"`
RequestId string `xml:"requestId"`
ReservationId string `xml:"reservationId"`
OwnerId string `xml:"ownerId"`
Instances []*Instance `xml:"instancesSet>item"`
}
var b64 = base64.StdEncoding
func (client *Client) RunInstances(config *RunInstancesConfig) (list InstanceList, e error) {
values, e := config.Values()
if e != nil {
return nil, e
}
query := queryForAction("RunInstances") + "&" + values.Encode()
raw, e := client.DoSignedRequest("POST", client.Endpoint(), query, nil)
if e != nil {
return list, e
}
er := &ErrorResponse{}
if e := xml.Unmarshal(raw.Content, er); e == nil {
return nil, fmt.Errorf(er.ErrorStrings())
}
rsp := &RunInstancesResponse{}
e = xml.Unmarshal(raw.Content, rsp)
if e != nil {
return list, e
}
return InstanceList(rsp.Instances), nil
}
type DescribeInstancesOptions struct {
InstanceIds []string
Filters []*Filter
}
func (client *Client) DescribeInstancesWithOptions(options *DescribeInstancesOptions) (instances []*Instance, e error) {
if options == nil {
options = &DescribeInstancesOptions{}
}
values := url.Values{"Version": {API_VERSIONS_EC2}, "Action": {"DescribeInstances"}}
if len(options.InstanceIds) > 0 {
for i, id := range options.InstanceIds {
values.Add("InstanceId."+strconv.Itoa(i+1), id)
}
}
applyFilters(values, options.Filters)
raw, e := client.DoSignedRequest("GET", client.Endpoint(), values.Encode(), nil)
if e != nil {
return instances, e
}
rsp := &DescribeInstancesResponse{}
e = xml.Unmarshal(raw.Content, rsp)
if e != nil {
e = fmt.Errorf("%s: %s", e.Error(), string(raw.Content))
return instances, e
}
return rsp.Instances(), nil
}
func (client *Client) DescribeInstances() (instances []*Instance, e error) {
return client.DescribeInstancesWithOptions(nil)
}
| dynport/gocloud | aws/ec2/client.go | GO | apache-2.0 | 8,627 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tomcat.websocket;
import java.io.InputStream;
import java.io.Reader;
import java.lang.reflect.GenericArrayType;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.lang.reflect.TypeVariable;
import java.nio.ByteBuffer;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentLinkedQueue;
import javax.websocket.CloseReason.CloseCode;
import javax.websocket.CloseReason.CloseCodes;
import javax.websocket.Decoder;
import javax.websocket.Decoder.Binary;
import javax.websocket.Decoder.BinaryStream;
import javax.websocket.Decoder.Text;
import javax.websocket.Decoder.TextStream;
import javax.websocket.DeploymentException;
import javax.websocket.Encoder;
import javax.websocket.EndpointConfig;
import javax.websocket.MessageHandler;
import javax.websocket.PongMessage;
import javax.websocket.Session;
import org.apache.tomcat.util.res.StringManager;
import org.apache.tomcat.websocket.pojo.PojoMessageHandlerWholeBinary;
import org.apache.tomcat.websocket.pojo.PojoMessageHandlerWholeText;
/**
* Utility class for internal use only within the
* {@link org.apache.tomcat.websocket} package.
*/
public class Util {
private static final StringManager sm =
StringManager.getManager(Constants.PACKAGE_NAME);
private static final Queue<SecureRandom> randoms =
new ConcurrentLinkedQueue<>();
private Util() {
// Hide default constructor
}
static boolean isControl(byte opCode) {
return (opCode & 0x08) > 0;
}
static boolean isText(byte opCode) {
return opCode == Constants.OPCODE_TEXT;
}
static CloseCode getCloseCode(int code) {
if (code > 2999 && code < 5000) {
return CloseCodes.NORMAL_CLOSURE;
}
switch (code) {
case 1000:
return CloseCodes.NORMAL_CLOSURE;
case 1001:
return CloseCodes.GOING_AWAY;
case 1002:
return CloseCodes.PROTOCOL_ERROR;
case 1003:
return CloseCodes.CANNOT_ACCEPT;
case 1004:
// Should not be used in a close frame
// return CloseCodes.RESERVED;
return CloseCodes.PROTOCOL_ERROR;
case 1005:
// Should not be used in a close frame
// return CloseCodes.NO_STATUS_CODE;
return CloseCodes.PROTOCOL_ERROR;
case 1006:
// Should not be used in a close frame
// return CloseCodes.CLOSED_ABNORMALLY;
return CloseCodes.PROTOCOL_ERROR;
case 1007:
return CloseCodes.NOT_CONSISTENT;
case 1008:
return CloseCodes.VIOLATED_POLICY;
case 1009:
return CloseCodes.TOO_BIG;
case 1010:
return CloseCodes.NO_EXTENSION;
case 1011:
return CloseCodes.UNEXPECTED_CONDITION;
case 1012:
// Not in RFC6455
// return CloseCodes.SERVICE_RESTART;
return CloseCodes.PROTOCOL_ERROR;
case 1013:
// Not in RFC6455
// return CloseCodes.TRY_AGAIN_LATER;
return CloseCodes.PROTOCOL_ERROR;
case 1015:
// Should not be used in a close frame
// return CloseCodes.TLS_HANDSHAKE_FAILURE;
return CloseCodes.PROTOCOL_ERROR;
default:
return CloseCodes.PROTOCOL_ERROR;
}
}
static byte[] generateMask() {
// SecureRandom is not thread-safe so need to make sure only one thread
// uses it at a time. In theory, the pool could grow to the same size
// as the number of request processing threads. In reality it will be
// a lot smaller.
// Get a SecureRandom from the pool
SecureRandom sr = randoms.poll();
// If one isn't available, generate a new one
if (sr == null) {
try {
sr = SecureRandom.getInstance("SHA1PRNG");
} catch (NoSuchAlgorithmException e) {
// Fall back to platform default
sr = new SecureRandom();
}
}
// Generate the mask
byte[] result = new byte[4];
sr.nextBytes(result);
// Put the SecureRandom back in the poll
randoms.add(sr);
return result;
}
static Class<?> getMessageType(MessageHandler listener) {
return Util.getGenericType(MessageHandler.class,
listener.getClass()).getClazz();
}
public static Class<?> getDecoderType(Class<? extends Decoder> decoder) {
return Util.getGenericType(Decoder.class, decoder).getClazz();
}
static Class<?> getEncoderType(Class<? extends Encoder> encoder) {
return Util.getGenericType(Encoder.class, encoder).getClazz();
}
private static <T> TypeResult getGenericType(Class<T> type,
Class<? extends T> clazz) {
// Look to see if this class implements the interface of interest
// Get all the interfaces
Type[] interfaces = clazz.getGenericInterfaces();
for (Type iface : interfaces) {
// Only need to check interfaces that use generics
if (iface instanceof ParameterizedType) {
ParameterizedType pi = (ParameterizedType) iface;
// Look for the interface of interest
if (pi.getRawType() instanceof Class) {
if (type.isAssignableFrom((Class<?>) pi.getRawType())) {
return getTypeParameter(
clazz, pi.getActualTypeArguments()[0]);
}
}
}
}
// Interface not found on this class. Look at the superclass.
@SuppressWarnings("unchecked")
Class<? extends T> superClazz =
(Class<? extends T>) clazz.getSuperclass();
TypeResult superClassTypeResult = getGenericType(type, superClazz);
int dimension = superClassTypeResult.getDimension();
if (superClassTypeResult.getIndex() == -1 && dimension == 0) {
// Superclass implements interface and defines explicit type for
// the interface of interest
return superClassTypeResult;
}
if (superClassTypeResult.getIndex() > -1) {
// Superclass implements interface and defines unknown type for
// the interface of interest
// Map that unknown type to the generic types defined in this class
ParameterizedType superClassType =
(ParameterizedType) clazz.getGenericSuperclass();
TypeResult result = getTypeParameter(clazz,
superClassType.getActualTypeArguments()[
superClassTypeResult.getIndex()]);
result.incrementDimension(superClassTypeResult.getDimension());
if (result.getClazz() != null && result.getDimension() > 0) {
superClassTypeResult = result;
} else {
return result;
}
}
if (superClassTypeResult.getDimension() > 0) {
StringBuilder className = new StringBuilder();
for (int i = 0; i < dimension; i++) {
className.append('[');
}
className.append('L');
className.append(superClassTypeResult.getClazz().getCanonicalName());
className.append(';');
Class<?> arrayClazz;
try {
arrayClazz = Class.forName(className.toString());
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException(e);
}
return new TypeResult(arrayClazz, -1, 0);
}
// Error will be logged further up the call stack
return null;
}
/*
* For a generic parameter, return either the Class used or if the type
* is unknown, the index for the type in definition of the class
*/
private static TypeResult getTypeParameter(Class<?> clazz, Type argType) {
if (argType instanceof Class<?>) {
return new TypeResult((Class<?>) argType, -1, 0);
} else if (argType instanceof ParameterizedType) {
return new TypeResult((Class<?>)((ParameterizedType) argType).getRawType(), -1, 0);
} else if (argType instanceof GenericArrayType) {
Type arrayElementType = ((GenericArrayType) argType).getGenericComponentType();
TypeResult result = getTypeParameter(clazz, arrayElementType);
result.incrementDimension(1);
return result;
} else {
TypeVariable<?>[] tvs = clazz.getTypeParameters();
for (int i = 0; i < tvs.length; i++) {
if (tvs[i].equals(argType)) {
return new TypeResult(null, i, 0);
}
}
return null;
}
}
public static boolean isPrimitive(Class<?> clazz) {
if (clazz.isPrimitive()) {
return true;
} else if(clazz.equals(Boolean.class) ||
clazz.equals(Byte.class) ||
clazz.equals(Character.class) ||
clazz.equals(Double.class) ||
clazz.equals(Float.class) ||
clazz.equals(Integer.class) ||
clazz.equals(Long.class) ||
clazz.equals(Short.class)) {
return true;
}
return false;
}
public static Object coerceToType(Class<?> type, String value) {
if (type.equals(String.class)) {
return value;
} else if (type.equals(boolean.class) || type.equals(Boolean.class)) {
return Boolean.valueOf(value);
} else if (type.equals(byte.class) || type.equals(Byte.class)) {
return Byte.valueOf(value);
} else if (value.length() == 1 &&
(type.equals(char.class) || type.equals(Character.class))) {
return Character.valueOf(value.charAt(0));
} else if (type.equals(double.class) || type.equals(Double.class)) {
return Double.valueOf(value);
} else if (type.equals(float.class) || type.equals(Float.class)) {
return Float.valueOf(value);
} else if (type.equals(int.class) || type.equals(Integer.class)) {
return Integer.valueOf(value);
} else if (type.equals(long.class) || type.equals(Long.class)) {
return Long.valueOf(value);
} else if (type.equals(short.class) || type.equals(Short.class)) {
return Short.valueOf(value);
} else {
throw new IllegalArgumentException(sm.getString(
"util.invalidType", value, type.getName()));
}
}
public static List<DecoderEntry> getDecoders(
Class<? extends Decoder>[] decoderClazzes)
throws DeploymentException{
List<DecoderEntry> result = new ArrayList<>();
for (Class<? extends Decoder> decoderClazz : decoderClazzes) {
// Need to instantiate decoder to ensure it is valid and that
// deployment can be failed if it is not
@SuppressWarnings("unused")
Decoder instance;
try {
instance = decoderClazz.newInstance();
} catch (InstantiationException | IllegalAccessException e) {
throw new DeploymentException(
sm.getString("pojoMethodMapping.invalidDecoder",
decoderClazz.getName()), e);
}
DecoderEntry entry = new DecoderEntry(
Util.getDecoderType(decoderClazz), decoderClazz);
result.add(entry);
}
return result;
}
public static Set<MessageHandlerResult> getMessageHandlers(
MessageHandler listener, EndpointConfig endpointConfig,
Session session) {
Class<?> target = Util.getMessageType(listener);
// Will never be more than 2 types
Set<MessageHandlerResult> results = new HashSet<>(2);
// Simple cases - handlers already accepts one of the types expected by
// the frame handling code
if (String.class.isAssignableFrom(target)) {
MessageHandlerResult result =
new MessageHandlerResult(listener,
MessageHandlerResultType.TEXT);
results.add(result);
} else if (ByteBuffer.class.isAssignableFrom(target)) {
MessageHandlerResult result =
new MessageHandlerResult(listener,
MessageHandlerResultType.BINARY);
results.add(result);
} else if (PongMessage.class.isAssignableFrom(target)) {
MessageHandlerResult result =
new MessageHandlerResult(listener,
MessageHandlerResultType.PONG);
results.add(result);
// Relatively simple cases - handler needs wrapping but no decoder to
// convert it to one of the types expected by the frame handling code
} else if (byte[].class.isAssignableFrom(target)) {
MessageHandlerResult result = new MessageHandlerResult(
new PojoMessageHandlerWholeBinary(listener,
getOnMessageMethod(listener), session,
endpointConfig, null, new Object[1], 0, true, -1,
false, -1),
MessageHandlerResultType.BINARY);
results.add(result);
} else if (InputStream.class.isAssignableFrom(target)) {
MessageHandlerResult result = new MessageHandlerResult(
new PojoMessageHandlerWholeBinary(listener,
getOnMessageMethod(listener), session,
endpointConfig, null, new Object[1], 0, true, -1,
true, -1),
MessageHandlerResultType.BINARY);
results.add(result);
} else if (Reader.class.isAssignableFrom(target)) {
MessageHandlerResult result = new MessageHandlerResult(
new PojoMessageHandlerWholeText(listener,
getOnMessageMethod(listener), session,
endpointConfig, null, new Object[1], 0, true, -1,
-1),
MessageHandlerResultType.TEXT);
results.add(result);
} else {
// More complex case - listener that requires a decoder
DecoderMatch decoderMatch;
try {
List<Class<? extends Decoder>> decoders =
endpointConfig.getDecoders();
@SuppressWarnings("unchecked")
List<DecoderEntry> decoderEntries = getDecoders(
decoders.toArray(new Class[decoders.size()]));
decoderMatch = new DecoderMatch(target, decoderEntries);
} catch (DeploymentException e) {
throw new IllegalArgumentException(e);
}
Method m = getOnMessageMethod(listener);
if (decoderMatch.getBinaryDecoders().size() > 0) {
MessageHandlerResult result = new MessageHandlerResult(
new PojoMessageHandlerWholeBinary(listener, m, session,
endpointConfig,
decoderMatch.getBinaryDecoders(), new Object[1],
0, false, -1, false, -1),
MessageHandlerResultType.BINARY);
results.add(result);
}
if (decoderMatch.getTextDecoders().size() > 0) {
MessageHandlerResult result = new MessageHandlerResult(
new PojoMessageHandlerWholeText(listener, m, session,
endpointConfig,
decoderMatch.getTextDecoders(), new Object[1],
0, false, -1, -1),
MessageHandlerResultType.TEXT);
results.add(result);
}
}
if (results.size() == 0) {
throw new IllegalArgumentException(
sm.getString("wsSession.unknownHandler", listener, target));
}
return results;
}
private static Method getOnMessageMethod(MessageHandler listener) {
try {
return listener.getClass().getMethod("onMessage", Object.class);
} catch (NoSuchMethodException | SecurityException e) {
throw new IllegalArgumentException(
sm.getString("util.invalidMessageHandler"), e);
}
}
public static class DecoderMatch {
private final List<Class<? extends Decoder>> textDecoders =
new ArrayList<>();
private final List<Class<? extends Decoder>> binaryDecoders =
new ArrayList<>();
public DecoderMatch(Class<?> target, List<DecoderEntry> decoderEntries) {
for (DecoderEntry decoderEntry : decoderEntries) {
if (decoderEntry.getClazz().isAssignableFrom(target)) {
if (Binary.class.isAssignableFrom(
decoderEntry.getDecoderClazz())) {
binaryDecoders.add(decoderEntry.getDecoderClazz());
// willDecode() method means this decoder may or may not
// decode a message so need to carry on checking for
// other matches
} else if (BinaryStream.class.isAssignableFrom(
decoderEntry.getDecoderClazz())) {
binaryDecoders.add(decoderEntry.getDecoderClazz());
// Stream decoders have to process the message so no
// more decoders can be matched
break;
} else if (Text.class.isAssignableFrom(
decoderEntry.getDecoderClazz())) {
textDecoders.add(decoderEntry.getDecoderClazz());
// willDecode() method means this decoder may or may not
// decode a message so need to carry on checking for
// other matches
} else if (TextStream.class.isAssignableFrom(
decoderEntry.getDecoderClazz())) {
textDecoders.add(decoderEntry.getDecoderClazz());
// Stream decoders have to process the message so no
// more decoders can be matched
break;
} else {
throw new IllegalArgumentException(
sm.getString("util.unknownDecoderType"));
}
}
}
}
public List<Class<? extends Decoder>> getTextDecoders() {
return textDecoders;
}
public List<Class<? extends Decoder>> getBinaryDecoders() {
return binaryDecoders;
}
public boolean hasMatches() {
return (textDecoders.size() > 0) || (binaryDecoders.size() > 0);
}
}
private static class TypeResult {
private final Class<?> clazz;
private final int index;
private int dimension;
public TypeResult(Class<?> clazz, int index, int dimension) {
this.clazz= clazz;
this.index = index;
this.dimension = dimension;
}
public Class<?> getClazz() {
return clazz;
}
public int getIndex() {
return index;
}
public int getDimension() {
return dimension;
}
public void incrementDimension(int inc) {
dimension += inc;
}
}
}
| wenzhucjy/tomcat_source | tomcat-8.0.9-sourcecode/java/org/apache/tomcat/websocket/Util.java | Java | apache-2.0 | 21,751 |
package jepperscore.scraper.common.rcon;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
/**
* This interface provides the functions for talking to a game's RCON server.
* @author Chuck
*
*/
public interface RconClient {
/**
* Sends a command to the RCON server.
* @param command The command to send.
* @return The response from the command.
*/
@CheckForNull
String[] sendCommand(@Nonnull String command);
/**
* Disconnects any open RCON session.
*/
void disconnect();
}
| SiphonSquirrel/jepperscore | scrapers/scraper-common/src/main/java/jepperscore/scraper/common/rcon/RconClient.java | Java | apache-2.0 | 518 |
/*
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.coeus.common.budget.framework.period;
import org.kuali.rice.krad.rules.rule.BusinessRule;
public interface SaveBudgetPeriodRule extends BusinessRule {
/**
* Rule invoked upon adding a budget period
* <code>{@link org.kuali.coeus.common.budget.framework.core.BudgetDocument}</code>
*
* @return boolean
*/
public boolean processSaveBudgetPeriodBusinessRules(SaveBudgetPeriodEvent saveBudgetPeriodEvent);
}
| blackcathacker/kc.preclean | coeus-code/src/main/java/org/kuali/coeus/common/budget/framework/period/SaveBudgetPeriodRule.java | Java | apache-2.0 | 1,082 |
package io.github.rcarlosdasilva.weixin.model.request.media;
import io.github.rcarlosdasilva.weixin.common.ApiAddress;
import io.github.rcarlosdasilva.weixin.model.request.base.BasicWeixinRequest;
public class MediaGetTemporaryWithHqAudioRequest extends BasicWeixinRequest {
private String mediaId;
public MediaGetTemporaryWithHqAudioRequest() {
this.path = ApiAddress.URL_MEDIA_TEMPORARY_GET_HQ_AUDIO;
}
public void setMediaId(String mediaId) {
this.mediaId = mediaId;
}
@Override
public String toString() {
return new StringBuilder(this.path).append("?access_token=").append(this.accessToken)
.append("&media_id=").append(this.mediaId).toString();
}
}
| rcarlosdasilva/weixin | src/main/java/io/github/rcarlosdasilva/weixin/model/request/media/MediaGetTemporaryWithHqAudioRequest.java | Java | apache-2.0 | 721 |
<?php
/**
*
* Publishing and Managing State
* ======
*
* After applying changes, the Editor queues a worker to publish mail, feed,
* and notifications, and to perform other background work like updating search
* indexes. This allows it to do this work without impacting performance for
* users.
*
* When work is moved to the daemons, the Editor state is serialized by
* @{method:getWorkerState}, then reloaded in a daemon process by
* @{method:loadWorkerState}. **This is fragile.**
*
* State is not persisted into the daemons by default, because we can not send
* arbitrary objects into the queue. This means the default behavior of any
* state properties is to reset to their defaults without warning prior to
* publishing.
*
* The easiest way to avoid this is to keep Editors stateless: the overwhelming
* majority of Editors can be written statelessly. If you need to maintain
* state, you can either:
*
* - not require state to exist during publishing; or
* - pass state to the daemons by implementing @{method:getCustomWorkerState}
* and @{method:loadCustomWorkerState}.
*
* This architecture isn't ideal, and we may eventually split this class into
* "Editor" and "Publisher" parts to make it more robust. See T6367 for some
* discussion and context.
*
* @task mail Sending Mail
* @task feed Publishing Feed Stories
* @task search Search Index
* @task files Integration with Files
* @task workers Managing Workers
*/
abstract class PhabricatorApplicationTransactionEditor
extends PhabricatorEditor {
private $contentSource;
private $object;
private $xactions;
private $isNewObject;
private $mentionedPHIDs;
private $continueOnNoEffect;
private $continueOnMissingFields;
private $parentMessageID;
private $heraldAdapter;
private $heraldTranscript;
private $subscribers;
private $unmentionablePHIDMap = array();
private $applicationEmail;
private $isPreview;
private $isHeraldEditor;
private $isInverseEdgeEditor;
private $actingAsPHID;
private $disableEmail;
private $heraldEmailPHIDs = array();
private $heraldForcedEmailPHIDs = array();
private $heraldHeader;
private $mailToPHIDs = array();
private $mailCCPHIDs = array();
private $feedNotifyPHIDs = array();
private $feedRelatedPHIDs = array();
/**
* Get the class name for the application this editor is a part of.
*
* Uninstalling the application will disable the editor.
*
* @return string Editor's application class name.
*/
abstract public function getEditorApplicationClass();
/**
* Get a description of the objects this editor edits, like "Differential
* Revisions".
*
* @return string Human readable description of edited objects.
*/
abstract public function getEditorObjectsDescription();
public function setActingAsPHID($acting_as_phid) {
$this->actingAsPHID = $acting_as_phid;
return $this;
}
public function getActingAsPHID() {
if ($this->actingAsPHID) {
return $this->actingAsPHID;
}
return $this->getActor()->getPHID();
}
/**
* When the editor tries to apply transactions that have no effect, should
* it raise an exception (default) or drop them and continue?
*
* Generally, you will set this flag for edits coming from "Edit" interfaces,
* and leave it cleared for edits coming from "Comment" interfaces, so the
* user will get a useful error if they try to submit a comment that does
* nothing (e.g., empty comment with a status change that has already been
* performed by another user).
*
* @param bool True to drop transactions without effect and continue.
* @return this
*/
public function setContinueOnNoEffect($continue) {
$this->continueOnNoEffect = $continue;
return $this;
}
public function getContinueOnNoEffect() {
return $this->continueOnNoEffect;
}
/**
* When the editor tries to apply transactions which don't populate all of
* an object's required fields, should it raise an exception (default) or
* drop them and continue?
*
* For example, if a user adds a new required custom field (like "Severity")
* to a task, all existing tasks won't have it populated. When users
* manually edit existing tasks, it's usually desirable to have them provide
* a severity. However, other operations (like batch editing just the
* owner of a task) will fail by default.
*
* By setting this flag for edit operations which apply to specific fields
* (like the priority, batch, and merge editors in Maniphest), these
* operations can continue to function even if an object is outdated.
*
* @param bool True to continue when transactions don't completely satisfy
* all required fields.
* @return this
*/
public function setContinueOnMissingFields($continue_on_missing_fields) {
$this->continueOnMissingFields = $continue_on_missing_fields;
return $this;
}
public function getContinueOnMissingFields() {
return $this->continueOnMissingFields;
}
/**
* Not strictly necessary, but reply handlers ideally set this value to
* make email threading work better.
*/
public function setParentMessageID($parent_message_id) {
$this->parentMessageID = $parent_message_id;
return $this;
}
public function getParentMessageID() {
return $this->parentMessageID;
}
public function getIsNewObject() {
return $this->isNewObject;
}
protected function getMentionedPHIDs() {
return $this->mentionedPHIDs;
}
public function setIsPreview($is_preview) {
$this->isPreview = $is_preview;
return $this;
}
public function getIsPreview() {
return $this->isPreview;
}
public function setIsInverseEdgeEditor($is_inverse_edge_editor) {
$this->isInverseEdgeEditor = $is_inverse_edge_editor;
return $this;
}
public function getIsInverseEdgeEditor() {
return $this->isInverseEdgeEditor;
}
public function setIsHeraldEditor($is_herald_editor) {
$this->isHeraldEditor = $is_herald_editor;
return $this;
}
public function getIsHeraldEditor() {
return $this->isHeraldEditor;
}
/**
* Prevent this editor from generating email when applying transactions.
*
* @param bool True to disable email.
* @return this
*/
public function setDisableEmail($disable_email) {
$this->disableEmail = $disable_email;
return $this;
}
public function getDisableEmail() {
return $this->disableEmail;
}
public function setUnmentionablePHIDMap(array $map) {
$this->unmentionablePHIDMap = $map;
return $this;
}
public function getUnmentionablePHIDMap() {
return $this->unmentionablePHIDMap;
}
protected function shouldEnableMentions(
PhabricatorLiskDAO $object,
array $xactions) {
return true;
}
public function setApplicationEmail(
PhabricatorMetaMTAApplicationEmail $email) {
$this->applicationEmail = $email;
return $this;
}
public function getApplicationEmail() {
return $this->applicationEmail;
}
public function getTransactionTypes() {
$types = array();
if ($this->object instanceof PhabricatorSubscribableInterface) {
$types[] = PhabricatorTransactions::TYPE_SUBSCRIBERS;
}
if ($this->object instanceof PhabricatorCustomFieldInterface) {
$types[] = PhabricatorTransactions::TYPE_CUSTOMFIELD;
}
if ($this->object instanceof HarbormasterBuildableInterface) {
$types[] = PhabricatorTransactions::TYPE_BUILDABLE;
}
if ($this->object instanceof PhabricatorTokenReceiverInterface) {
$types[] = PhabricatorTransactions::TYPE_TOKEN;
}
if ($this->object instanceof PhabricatorProjectInterface ||
$this->object instanceof PhabricatorMentionableInterface) {
$types[] = PhabricatorTransactions::TYPE_EDGE;
}
if ($this->object instanceof PhabricatorSpacesInterface) {
$types[] = PhabricatorTransactions::TYPE_SPACE;
}
return $types;
}
private function adjustTransactionValues(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction) {
if ($xaction->shouldGenerateOldValue()) {
$old = $this->getTransactionOldValue($object, $xaction);
$xaction->setOldValue($old);
}
$new = $this->getTransactionNewValue($object, $xaction);
$xaction->setNewValue($new);
}
private function getTransactionOldValue(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction) {
switch ($xaction->getTransactionType()) {
case PhabricatorTransactions::TYPE_SUBSCRIBERS:
return array_values($this->subscribers);
case PhabricatorTransactions::TYPE_VIEW_POLICY:
return $object->getViewPolicy();
case PhabricatorTransactions::TYPE_EDIT_POLICY:
return $object->getEditPolicy();
case PhabricatorTransactions::TYPE_JOIN_POLICY:
return $object->getJoinPolicy();
case PhabricatorTransactions::TYPE_SPACE:
$space_phid = $object->getSpacePHID();
if ($space_phid === null) {
if ($this->getIsNewObject()) {
// In this case, just return `null` so we know this is the initial
// transaction and it should be hidden.
return null;
}
$default_space = PhabricatorSpacesNamespaceQuery::getDefaultSpace();
if ($default_space) {
$space_phid = $default_space->getPHID();
}
}
return $space_phid;
case PhabricatorTransactions::TYPE_EDGE:
$edge_type = $xaction->getMetadataValue('edge:type');
if (!$edge_type) {
throw new Exception(
pht(
"Edge transaction has no '%s'!",
'edge:type'));
}
$old_edges = array();
if ($object->getPHID()) {
$edge_src = $object->getPHID();
$old_edges = id(new PhabricatorEdgeQuery())
->withSourcePHIDs(array($edge_src))
->withEdgeTypes(array($edge_type))
->needEdgeData(true)
->execute();
$old_edges = $old_edges[$edge_src][$edge_type];
}
return $old_edges;
case PhabricatorTransactions::TYPE_CUSTOMFIELD:
// NOTE: Custom fields have their old value pre-populated when they are
// built by PhabricatorCustomFieldList.
return $xaction->getOldValue();
case PhabricatorTransactions::TYPE_COMMENT:
return null;
default:
return $this->getCustomTransactionOldValue($object, $xaction);
}
}
private function getTransactionNewValue(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction) {
switch ($xaction->getTransactionType()) {
case PhabricatorTransactions::TYPE_SUBSCRIBERS:
return $this->getPHIDTransactionNewValue($xaction);
case PhabricatorTransactions::TYPE_VIEW_POLICY:
case PhabricatorTransactions::TYPE_EDIT_POLICY:
case PhabricatorTransactions::TYPE_JOIN_POLICY:
case PhabricatorTransactions::TYPE_BUILDABLE:
case PhabricatorTransactions::TYPE_TOKEN:
case PhabricatorTransactions::TYPE_INLINESTATE:
return $xaction->getNewValue();
case PhabricatorTransactions::TYPE_SPACE:
$space_phid = $xaction->getNewValue();
if (!strlen($space_phid)) {
// If an install has no Spaces or the Spaces controls are not visible
// to the viewer, we might end up with the empty string here instead
// of a strict `null`, because some controller just used `getStr()`
// to read the space PHID from the request.
// Just make this work like callers might reasonably expect so we
// don't need to handle this specially in every EditController.
return $this->getActor()->getDefaultSpacePHID();
} else {
return $space_phid;
}
case PhabricatorTransactions::TYPE_EDGE:
return $this->getEdgeTransactionNewValue($xaction);
case PhabricatorTransactions::TYPE_CUSTOMFIELD:
$field = $this->getCustomFieldForTransaction($object, $xaction);
return $field->getNewValueFromApplicationTransactions($xaction);
case PhabricatorTransactions::TYPE_COMMENT:
return null;
default:
return $this->getCustomTransactionNewValue($object, $xaction);
}
}
protected function getCustomTransactionOldValue(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction) {
throw new Exception(pht('Capability not supported!'));
}
protected function getCustomTransactionNewValue(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction) {
throw new Exception(pht('Capability not supported!'));
}
protected function transactionHasEffect(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction) {
switch ($xaction->getTransactionType()) {
case PhabricatorTransactions::TYPE_COMMENT:
return $xaction->hasComment();
case PhabricatorTransactions::TYPE_CUSTOMFIELD:
$field = $this->getCustomFieldForTransaction($object, $xaction);
return $field->getApplicationTransactionHasEffect($xaction);
case PhabricatorTransactions::TYPE_EDGE:
// A straight value comparison here doesn't always get the right
// result, because newly added edges aren't fully populated. Instead,
// compare the changes in a more granular way.
$old = $xaction->getOldValue();
$new = $xaction->getNewValue();
$old_dst = array_keys($old);
$new_dst = array_keys($new);
// NOTE: For now, we don't consider edge reordering to be a change.
// We have very few order-dependent edges and effectively no order
// oriented UI. This might change in the future.
sort($old_dst);
sort($new_dst);
if ($old_dst !== $new_dst) {
// We've added or removed edges, so this transaction definitely
// has an effect.
return true;
}
// We haven't added or removed edges, but we might have changed
// edge data.
foreach ($old as $key => $old_value) {
$new_value = $new[$key];
if ($old_value['data'] !== $new_value['data']) {
return true;
}
}
return false;
}
return ($xaction->getOldValue() !== $xaction->getNewValue());
}
protected function shouldApplyInitialEffects(
PhabricatorLiskDAO $object,
array $xactions) {
return false;
}
protected function applyInitialEffects(
PhabricatorLiskDAO $object,
array $xactions) {
throw new PhutilMethodNotImplementedException();
}
private function applyInternalEffects(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction) {
switch ($xaction->getTransactionType()) {
case PhabricatorTransactions::TYPE_CUSTOMFIELD:
$field = $this->getCustomFieldForTransaction($object, $xaction);
return $field->applyApplicationTransactionInternalEffects($xaction);
case PhabricatorTransactions::TYPE_BUILDABLE:
case PhabricatorTransactions::TYPE_TOKEN:
case PhabricatorTransactions::TYPE_VIEW_POLICY:
case PhabricatorTransactions::TYPE_EDIT_POLICY:
case PhabricatorTransactions::TYPE_JOIN_POLICY:
case PhabricatorTransactions::TYPE_SUBSCRIBERS:
case PhabricatorTransactions::TYPE_INLINESTATE:
case PhabricatorTransactions::TYPE_EDGE:
case PhabricatorTransactions::TYPE_SPACE:
case PhabricatorTransactions::TYPE_COMMENT:
return $this->applyBuiltinInternalTransaction($object, $xaction);
}
return $this->applyCustomInternalTransaction($object, $xaction);
}
private function applyExternalEffects(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction) {
switch ($xaction->getTransactionType()) {
case PhabricatorTransactions::TYPE_SUBSCRIBERS:
$subeditor = id(new PhabricatorSubscriptionsEditor())
->setObject($object)
->setActor($this->requireActor());
$old_map = array_fuse($xaction->getOldValue());
$new_map = array_fuse($xaction->getNewValue());
$subeditor->unsubscribe(
array_keys(
array_diff_key($old_map, $new_map)));
$subeditor->subscribeExplicit(
array_keys(
array_diff_key($new_map, $old_map)));
$subeditor->save();
// for the rest of these edits, subscribers should include those just
// added as well as those just removed.
$subscribers = array_unique(array_merge(
$this->subscribers,
$xaction->getOldValue(),
$xaction->getNewValue()));
$this->subscribers = $subscribers;
return $this->applyBuiltinExternalTransaction($object, $xaction);
case PhabricatorTransactions::TYPE_CUSTOMFIELD:
$field = $this->getCustomFieldForTransaction($object, $xaction);
return $field->applyApplicationTransactionExternalEffects($xaction);
case PhabricatorTransactions::TYPE_EDGE:
case PhabricatorTransactions::TYPE_BUILDABLE:
case PhabricatorTransactions::TYPE_TOKEN:
case PhabricatorTransactions::TYPE_VIEW_POLICY:
case PhabricatorTransactions::TYPE_EDIT_POLICY:
case PhabricatorTransactions::TYPE_JOIN_POLICY:
case PhabricatorTransactions::TYPE_INLINESTATE:
case PhabricatorTransactions::TYPE_SPACE:
case PhabricatorTransactions::TYPE_COMMENT:
return $this->applyBuiltinExternalTransaction($object, $xaction);
}
return $this->applyCustomExternalTransaction($object, $xaction);
}
protected function applyCustomInternalTransaction(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction) {
$type = $xaction->getTransactionType();
throw new Exception(
pht(
"Transaction type '%s' is missing an internal apply implementation!",
$type));
}
protected function applyCustomExternalTransaction(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction) {
$type = $xaction->getTransactionType();
throw new Exception(
pht(
"Transaction type '%s' is missing an external apply implementation!",
$type));
}
/**
* @{class:PhabricatorTransactions} provides many built-in transactions
* which should not require much - if any - code in specific applications.
*
* This method is a hook for the exceedingly-rare cases where you may need
* to do **additional** work for built-in transactions. Developers should
* extend this method, making sure to return the parent implementation
* regardless of handling any transactions.
*
* See also @{method:applyBuiltinExternalTransaction}.
*/
protected function applyBuiltinInternalTransaction(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction) {
switch ($xaction->getTransactionType()) {
case PhabricatorTransactions::TYPE_VIEW_POLICY:
$object->setViewPolicy($xaction->getNewValue());
break;
case PhabricatorTransactions::TYPE_EDIT_POLICY:
$object->setEditPolicy($xaction->getNewValue());
break;
case PhabricatorTransactions::TYPE_JOIN_POLICY:
$object->setJoinPolicy($xaction->getNewValue());
break;
case PhabricatorTransactions::TYPE_SPACE:
$object->setSpacePHID($xaction->getNewValue());
break;
}
}
/**
* See @{method::applyBuiltinInternalTransaction}.
*/
protected function applyBuiltinExternalTransaction(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction) {
switch ($xaction->getTransactionType()) {
case PhabricatorTransactions::TYPE_EDGE:
if ($this->getIsInverseEdgeEditor()) {
// If we're writing an inverse edge transaction, don't actually
// do anything. The initiating editor on the other side of the
// transaction will take care of the edge writes.
break;
}
$old = $xaction->getOldValue();
$new = $xaction->getNewValue();
$src = $object->getPHID();
$const = $xaction->getMetadataValue('edge:type');
$type = PhabricatorEdgeType::getByConstant($const);
if ($type->shouldWriteInverseTransactions()) {
$this->applyInverseEdgeTransactions(
$object,
$xaction,
$type->getInverseEdgeConstant());
}
foreach ($new as $dst_phid => $edge) {
$new[$dst_phid]['src'] = $src;
}
$editor = new PhabricatorEdgeEditor();
foreach ($old as $dst_phid => $edge) {
if (!empty($new[$dst_phid])) {
if ($old[$dst_phid]['data'] === $new[$dst_phid]['data']) {
continue;
}
}
$editor->removeEdge($src, $const, $dst_phid);
}
foreach ($new as $dst_phid => $edge) {
if (!empty($old[$dst_phid])) {
if ($old[$dst_phid]['data'] === $new[$dst_phid]['data']) {
continue;
}
}
$data = array(
'data' => $edge['data'],
);
$editor->addEdge($src, $const, $dst_phid, $data);
}
$editor->save();
break;
}
}
/**
* Fill in a transaction's common values, like author and content source.
*/
protected function populateTransaction(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction) {
$actor = $this->getActor();
// TODO: This needs to be more sophisticated once we have meta-policies.
$xaction->setViewPolicy(PhabricatorPolicies::POLICY_PUBLIC);
if ($actor->isOmnipotent()) {
$xaction->setEditPolicy(PhabricatorPolicies::POLICY_NOONE);
} else {
$xaction->setEditPolicy($this->getActingAsPHID());
}
$xaction->setAuthorPHID($this->getActingAsPHID());
$xaction->setContentSource($this->getContentSource());
$xaction->attachViewer($actor);
$xaction->attachObject($object);
if ($object->getPHID()) {
$xaction->setObjectPHID($object->getPHID());
}
return $xaction;
}
protected function didApplyInternalEffects(
PhabricatorLiskDAO $object,
array $xactions) {
return $xactions;
}
protected function applyFinalEffects(
PhabricatorLiskDAO $object,
array $xactions) {
return $xactions;
}
public function setContentSource(PhabricatorContentSource $content_source) {
$this->contentSource = $content_source;
return $this;
}
public function setContentSourceFromRequest(AphrontRequest $request) {
return $this->setContentSource(
PhabricatorContentSource::newFromRequest($request));
}
public function setContentSourceFromConduitRequest(
ConduitAPIRequest $request) {
$content_source = PhabricatorContentSource::newForSource(
PhabricatorContentSource::SOURCE_CONDUIT,
array());
return $this->setContentSource($content_source);
}
public function getContentSource() {
return $this->contentSource;
}
final public function applyTransactions(
PhabricatorLiskDAO $object,
array $xactions) {
$this->object = $object;
$this->xactions = $xactions;
$this->isNewObject = ($object->getPHID() === null);
$this->validateEditParameters($object, $xactions);
$actor = $this->requireActor();
// NOTE: Some transaction expansion requires that the edited object be
// attached.
foreach ($xactions as $xaction) {
$xaction->attachObject($object);
$xaction->attachViewer($actor);
}
$xactions = $this->expandTransactions($object, $xactions);
$xactions = $this->expandSupportTransactions($object, $xactions);
$xactions = $this->combineTransactions($xactions);
foreach ($xactions as $xaction) {
$xaction = $this->populateTransaction($object, $xaction);
}
$is_preview = $this->getIsPreview();
$read_locking = false;
$transaction_open = false;
if (!$is_preview) {
$errors = array();
$type_map = mgroup($xactions, 'getTransactionType');
foreach ($this->getTransactionTypes() as $type) {
$type_xactions = idx($type_map, $type, array());
$errors[] = $this->validateTransaction($object, $type, $type_xactions);
}
$errors[] = $this->validateAllTransactions($object, $xactions);
$errors = array_mergev($errors);
$continue_on_missing = $this->getContinueOnMissingFields();
foreach ($errors as $key => $error) {
if ($continue_on_missing && $error->getIsMissingFieldError()) {
unset($errors[$key]);
}
}
if ($errors) {
throw new PhabricatorApplicationTransactionValidationException($errors);
}
$file_phids = $this->extractFilePHIDs($object, $xactions);
if ($object->getID()) {
foreach ($xactions as $xaction) {
// If any of the transactions require a read lock, hold one and
// reload the object. We need to do this fairly early so that the
// call to `adjustTransactionValues()` (which populates old values)
// is based on the synchronized state of the object, which may differ
// from the state when it was originally loaded.
if ($this->shouldReadLock($object, $xaction)) {
$object->openTransaction();
$object->beginReadLocking();
$transaction_open = true;
$read_locking = true;
$object->reload();
break;
}
}
}
if ($this->shouldApplyInitialEffects($object, $xactions)) {
if (!$transaction_open) {
$object->openTransaction();
$transaction_open = true;
}
}
}
if ($this->shouldApplyInitialEffects($object, $xactions)) {
$this->applyInitialEffects($object, $xactions);
}
foreach ($xactions as $xaction) {
$this->adjustTransactionValues($object, $xaction);
}
$xactions = $this->filterTransactions($object, $xactions);
if (!$xactions) {
if ($read_locking) {
$object->endReadLocking();
$read_locking = false;
}
if ($transaction_open) {
$object->killTransaction();
$transaction_open = false;
}
return array();
}
// Now that we've merged, filtered, and combined transactions, check for
// required capabilities.
foreach ($xactions as $xaction) {
$this->requireCapabilities($object, $xaction);
}
$xactions = $this->sortTransactions($xactions);
if ($is_preview) {
$this->loadHandles($xactions);
return $xactions;
}
$comment_editor = id(new PhabricatorApplicationTransactionCommentEditor())
->setActor($actor)
->setActingAsPHID($this->getActingAsPHID())
->setContentSource($this->getContentSource());
if (!$transaction_open) {
$object->openTransaction();
}
foreach ($xactions as $xaction) {
$this->applyInternalEffects($object, $xaction);
}
$xactions = $this->didApplyInternalEffects($object, $xactions);
try {
$object->save();
} catch (AphrontDuplicateKeyQueryException $ex) {
$object->killTransaction();
// This callback has an opportunity to throw a better exception,
// so execution may end here.
$this->didCatchDuplicateKeyException($object, $xactions, $ex);
throw $ex;
}
foreach ($xactions as $xaction) {
$xaction->setObjectPHID($object->getPHID());
if ($xaction->getComment()) {
$xaction->setPHID($xaction->generatePHID());
$comment_editor->applyEdit($xaction, $xaction->getComment());
} else {
$xaction->save();
}
}
if ($file_phids) {
$this->attachFiles($object, $file_phids);
}
foreach ($xactions as $xaction) {
$this->applyExternalEffects($object, $xaction);
}
$xactions = $this->applyFinalEffects($object, $xactions);
if ($read_locking) {
$object->endReadLocking();
$read_locking = false;
}
$object->saveTransaction();
// Now that we've completely applied the core transaction set, try to apply
// Herald rules. Herald rules are allowed to either take direct actions on
// the database (like writing flags), or take indirect actions (like saving
// some targets for CC when we generate mail a little later), or return
// transactions which we'll apply normally using another Editor.
// First, check if *this* is a sub-editor which is itself applying Herald
// rules: if it is, stop working and return so we don't descend into
// madness.
// Otherwise, we're not a Herald editor, so process Herald rules (possibly
// using a Herald editor to apply resulting transactions) and then send out
// mail, notifications, and feed updates about everything.
if ($this->getIsHeraldEditor()) {
// We are the Herald editor, so stop work here and return the updated
// transactions.
return $xactions;
} else if ($this->getIsInverseEdgeEditor()) {
// If we're applying inverse edge transactions, don't trigger Herald.
// From a product perspective, the current set of inverse edges (most
// often, mentions) aren't things users would expect to trigger Herald.
// From a technical perspective, objects loaded by the inverse editor may
// not have enough data to execute rules. At least for now, just stop
// Herald from executing when applying inverse edges.
} else if ($this->shouldApplyHeraldRules($object, $xactions)) {
// We are not the Herald editor, so try to apply Herald rules.
$herald_xactions = $this->applyHeraldRules($object, $xactions);
if ($herald_xactions) {
$xscript_id = $this->getHeraldTranscript()->getID();
foreach ($herald_xactions as $herald_xaction) {
$herald_xaction->setMetadataValue('herald:transcriptID', $xscript_id);
}
// NOTE: We're acting as the omnipotent user because rules deal with
// their own policy issues. We use a synthetic author PHID (the
// Herald application) as the author of record, so that transactions
// will render in a reasonable way ("Herald assigned this task ...").
$herald_actor = PhabricatorUser::getOmnipotentUser();
$herald_phid = id(new PhabricatorHeraldApplication())->getPHID();
// TODO: It would be nice to give transactions a more specific source
// which points at the rule which generated them. You can figure this
// out from transcripts, but it would be cleaner if you didn't have to.
$herald_source = PhabricatorContentSource::newForSource(
PhabricatorContentSource::SOURCE_HERALD,
array());
$herald_editor = newv(get_class($this), array())
->setContinueOnNoEffect(true)
->setContinueOnMissingFields(true)
->setParentMessageID($this->getParentMessageID())
->setIsHeraldEditor(true)
->setActor($herald_actor)
->setActingAsPHID($herald_phid)
->setContentSource($herald_source);
$herald_xactions = $herald_editor->applyTransactions(
$object,
$herald_xactions);
// Merge the new transactions into the transaction list: we want to
// send email and publish feed stories about them, too.
$xactions = array_merge($xactions, $herald_xactions);
}
// If Herald did not generate transactions, we may still need to handle
// "Send an Email" rules.
$adapter = $this->getHeraldAdapter();
$this->heraldEmailPHIDs = $adapter->getEmailPHIDs();
$this->heraldForcedEmailPHIDs = $adapter->getForcedEmailPHIDs();
}
$this->didApplyTransactions($xactions);
if ($object instanceof PhabricatorCustomFieldInterface) {
// Maybe this makes more sense to move into the search index itself? For
// now I'm putting it here since I think we might end up with things that
// need it to be up to date once the next page loads, but if we don't go
// there we we could move it into search once search moves to the daemons.
// It now happens in the search indexer as well, but the search indexer is
// always daemonized, so the logic above still potentially holds. We could
// possibly get rid of this. The major motivation for putting it in the
// indexer was to enable reindexing to work.
$fields = PhabricatorCustomField::getObjectFields(
$object,
PhabricatorCustomField::ROLE_APPLICATIONSEARCH);
$fields->readFieldsFromStorage($object);
$fields->rebuildIndexes($object);
}
$herald_xscript = $this->getHeraldTranscript();
if ($herald_xscript) {
$herald_header = $herald_xscript->getXHeraldRulesHeader();
$herald_header = HeraldTranscript::saveXHeraldRulesHeader(
$object->getPHID(),
$herald_header);
} else {
$herald_header = HeraldTranscript::loadXHeraldRulesHeader(
$object->getPHID());
}
$this->heraldHeader = $herald_header;
// We're going to compute some of the data we'll use to publish these
// transactions here, before queueing a worker.
//
// Primarily, this is more correct: we want to publish the object as it
// exists right now. The worker may not execute for some time, and we want
// to use the current To/CC list, not respect any changes which may occur
// between now and when the worker executes.
//
// As a secondary benefit, this tends to reduce the amount of state that
// Editors need to pass into workers.
$object = $this->willPublish($object, $xactions);
if (!$this->getDisableEmail()) {
if ($this->shouldSendMail($object, $xactions)) {
$this->mailToPHIDs = $this->getMailTo($object);
$this->mailCCPHIDs = $this->getMailCC($object);
}
}
if ($this->shouldPublishFeedStory($object, $xactions)) {
$this->feedRelatedPHIDs = $this->getFeedRelatedPHIDs($object, $xactions);
$this->feedNotifyPHIDs = $this->getFeedNotifyPHIDs($object, $xactions);
}
PhabricatorWorker::scheduleTask(
'PhabricatorApplicationTransactionPublishWorker',
array(
'objectPHID' => $object->getPHID(),
'actorPHID' => $this->getActingAsPHID(),
'xactionPHIDs' => mpull($xactions, 'getPHID'),
'state' => $this->getWorkerState(),
),
array(
'objectPHID' => $object->getPHID(),
'priority' => PhabricatorWorker::PRIORITY_ALERTS,
));
return $xactions;
}
protected function didCatchDuplicateKeyException(
PhabricatorLiskDAO $object,
array $xactions,
Exception $ex) {
return;
}
public function publishTransactions(
PhabricatorLiskDAO $object,
array $xactions) {
// Hook for edges or other properties that may need (re-)loading
$object = $this->willPublish($object, $xactions);
$messages = array();
if (!$this->getDisableEmail()) {
if ($this->shouldSendMail($object, $xactions)) {
$messages = $this->buildMail($object, $xactions);
}
}
if ($this->supportsSearch()) {
id(new PhabricatorSearchIndexer())
->queueDocumentForIndexing(
$object->getPHID(),
$this->getSearchContextParameter($object, $xactions));
}
if ($this->shouldPublishFeedStory($object, $xactions)) {
$mailed = array();
foreach ($messages as $mail) {
foreach ($mail->buildRecipientList() as $phid) {
$mailed[$phid] = true;
}
}
$this->publishFeedStory($object, $xactions, $mailed);
}
// NOTE: This actually sends the mail. We do this last to reduce the chance
// that we send some mail, hit an exception, then send the mail again when
// retrying.
foreach ($messages as $mail) {
$mail->save();
}
return $xactions;
}
protected function didApplyTransactions(array $xactions) {
// Hook for subclasses.
return;
}
/**
* Determine if the editor should hold a read lock on the object while
* applying a transaction.
*
* If the editor does not hold a lock, two editors may read an object at the
* same time, then apply their changes without any synchronization. For most
* transactions, this does not matter much. However, it is important for some
* transactions. For example, if an object has a transaction count on it, both
* editors may read the object with `count = 23`, then independently update it
* and save the object with `count = 24` twice. This will produce the wrong
* state: the object really has 25 transactions, but the count is only 24.
*
* Generally, transactions fall into one of four buckets:
*
* - Append operations: Actions like adding a comment to an object purely
* add information to its state, and do not depend on the current object
* state in any way. These transactions never need to hold locks.
* - Overwrite operations: Actions like changing the title or description
* of an object replace the current value with a new value, so the end
* state is consistent without a lock. We currently do not lock these
* transactions, although we may in the future.
* - Edge operations: Edge and subscription operations have internal
* synchronization which limits the damage race conditions can cause.
* We do not currently lock these transactions, although we may in the
* future.
* - Update operations: Actions like incrementing a count on an object.
* These operations generally should use locks, unless it is not
* important that the state remain consistent in the presence of races.
*
* @param PhabricatorLiskDAO Object being updated.
* @param PhabricatorApplicationTransaction Transaction being applied.
* @return bool True to synchronize the edit with a lock.
*/
protected function shouldReadLock(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction) {
return false;
}
private function loadHandles(array $xactions) {
$phids = array();
foreach ($xactions as $key => $xaction) {
$phids[$key] = $xaction->getRequiredHandlePHIDs();
}
$handles = array();
$merged = array_mergev($phids);
if ($merged) {
$handles = id(new PhabricatorHandleQuery())
->setViewer($this->requireActor())
->withPHIDs($merged)
->execute();
}
foreach ($xactions as $key => $xaction) {
$xaction->setHandles(array_select_keys($handles, $phids[$key]));
}
}
private function loadSubscribers(PhabricatorLiskDAO $object) {
if ($object->getPHID() &&
($object instanceof PhabricatorSubscribableInterface)) {
$subs = PhabricatorSubscribersQuery::loadSubscribersForPHID(
$object->getPHID());
$this->subscribers = array_fuse($subs);
} else {
$this->subscribers = array();
}
}
private function validateEditParameters(
PhabricatorLiskDAO $object,
array $xactions) {
if (!$this->getContentSource()) {
throw new PhutilInvalidStateException('setContentSource');
}
// Do a bunch of sanity checks that the incoming transactions are fresh.
// They should be unsaved and have only "transactionType" and "newValue"
// set.
$types = array_fill_keys($this->getTransactionTypes(), true);
assert_instances_of($xactions, 'PhabricatorApplicationTransaction');
foreach ($xactions as $xaction) {
if ($xaction->getPHID() || $xaction->getID()) {
throw new PhabricatorApplicationTransactionStructureException(
$xaction,
pht('You can not apply transactions which already have IDs/PHIDs!'));
}
if ($xaction->getObjectPHID()) {
throw new PhabricatorApplicationTransactionStructureException(
$xaction,
pht(
'You can not apply transactions which already have %s!',
'objectPHIDs'));
}
if ($xaction->getAuthorPHID()) {
throw new PhabricatorApplicationTransactionStructureException(
$xaction,
pht(
'You can not apply transactions which already have %s!',
'authorPHIDs'));
}
if ($xaction->getCommentPHID()) {
throw new PhabricatorApplicationTransactionStructureException(
$xaction,
pht(
'You can not apply transactions which already have %s!',
'commentPHIDs'));
}
if ($xaction->getCommentVersion() !== 0) {
throw new PhabricatorApplicationTransactionStructureException(
$xaction,
pht(
'You can not apply transactions which already have '.
'commentVersions!'));
}
$expect_value = !$xaction->shouldGenerateOldValue();
$has_value = $xaction->hasOldValue();
if ($expect_value && !$has_value) {
throw new PhabricatorApplicationTransactionStructureException(
$xaction,
pht(
'This transaction is supposed to have an %s set, but it does not!',
'oldValue'));
}
if ($has_value && !$expect_value) {
throw new PhabricatorApplicationTransactionStructureException(
$xaction,
pht(
'This transaction should generate its %s automatically, '.
'but has already had one set!',
'oldValue'));
}
$type = $xaction->getTransactionType();
if (empty($types[$type])) {
throw new PhabricatorApplicationTransactionStructureException(
$xaction,
pht(
'Transaction has type "%s", but that transaction type is not '.
'supported by this editor (%s).',
$type,
get_class($this)));
}
}
}
protected function requireCapabilities(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction) {
if ($this->getIsNewObject()) {
return;
}
$actor = $this->requireActor();
switch ($xaction->getTransactionType()) {
case PhabricatorTransactions::TYPE_COMMENT:
PhabricatorPolicyFilter::requireCapability(
$actor,
$object,
PhabricatorPolicyCapability::CAN_VIEW);
break;
case PhabricatorTransactions::TYPE_VIEW_POLICY:
case PhabricatorTransactions::TYPE_EDIT_POLICY:
case PhabricatorTransactions::TYPE_JOIN_POLICY:
case PhabricatorTransactions::TYPE_SPACE:
PhabricatorPolicyFilter::requireCapability(
$actor,
$object,
PhabricatorPolicyCapability::CAN_EDIT);
break;
}
}
private function buildSubscribeTransaction(
PhabricatorLiskDAO $object,
array $xactions,
array $blocks) {
if (!($object instanceof PhabricatorSubscribableInterface)) {
return null;
}
if ($this->shouldEnableMentions($object, $xactions)) {
$texts = array_mergev($blocks);
$phids = PhabricatorMarkupEngine::extractPHIDsFromMentions(
$this->getActor(),
$texts);
} else {
$phids = array();
}
$this->mentionedPHIDs = $phids;
if ($object->getPHID()) {
// Don't try to subscribe already-subscribed mentions: we want to generate
// a dialog about an action having no effect if the user explicitly adds
// existing CCs, but not if they merely mention existing subscribers.
$phids = array_diff($phids, $this->subscribers);
}
if ($phids) {
$users = id(new PhabricatorPeopleQuery())
->setViewer($this->getActor())
->withPHIDs($phids)
->execute();
$users = mpull($users, null, 'getPHID');
foreach ($phids as $key => $phid) {
// Do not subscribe mentioned users
// who do not have VIEW Permissions
if ($object instanceof PhabricatorPolicyInterface
&& !PhabricatorPolicyFilter::hasCapability(
$users[$phid],
$object,
PhabricatorPolicyCapability::CAN_VIEW)
) {
unset($phids[$key]);
} else {
if ($object->isAutomaticallySubscribed($phid)) {
unset($phids[$key]);
}
}
}
$phids = array_values($phids);
}
// No else here to properly return null should we unset all subscriber
if (!$phids) {
return null;
}
$xaction = newv(get_class(head($xactions)), array());
$xaction->setTransactionType(PhabricatorTransactions::TYPE_SUBSCRIBERS);
$xaction->setNewValue(array('+' => $phids));
return $xaction;
}
protected function getRemarkupBlocksFromTransaction(
PhabricatorApplicationTransaction $transaction) {
return $transaction->getRemarkupBlocks();
}
protected function mergeTransactions(
PhabricatorApplicationTransaction $u,
PhabricatorApplicationTransaction $v) {
$type = $u->getTransactionType();
switch ($type) {
case PhabricatorTransactions::TYPE_SUBSCRIBERS:
return $this->mergePHIDOrEdgeTransactions($u, $v);
case PhabricatorTransactions::TYPE_EDGE:
$u_type = $u->getMetadataValue('edge:type');
$v_type = $v->getMetadataValue('edge:type');
if ($u_type == $v_type) {
return $this->mergePHIDOrEdgeTransactions($u, $v);
}
return null;
}
// By default, do not merge the transactions.
return null;
}
/**
* Optionally expand transactions which imply other effects. For example,
* resigning from a revision in Differential implies removing yourself as
* a reviewer.
*/
private function expandTransactions(
PhabricatorLiskDAO $object,
array $xactions) {
$results = array();
foreach ($xactions as $xaction) {
foreach ($this->expandTransaction($object, $xaction) as $expanded) {
$results[] = $expanded;
}
}
return $results;
}
protected function expandTransaction(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction) {
return array($xaction);
}
public function getExpandedSupportTransactions(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction) {
$xactions = array($xaction);
$xactions = $this->expandSupportTransactions(
$object,
$xactions);
if (count($xactions) == 1) {
return array();
}
foreach ($xactions as $index => $cxaction) {
if ($cxaction === $xaction) {
unset($xactions[$index]);
break;
}
}
return $xactions;
}
private function expandSupportTransactions(
PhabricatorLiskDAO $object,
array $xactions) {
$this->loadSubscribers($object);
$xactions = $this->applyImplicitCC($object, $xactions);
$blocks = array();
foreach ($xactions as $key => $xaction) {
$blocks[$key] = $this->getRemarkupBlocksFromTransaction($xaction);
}
$subscribe_xaction = $this->buildSubscribeTransaction(
$object,
$xactions,
$blocks);
if ($subscribe_xaction) {
$xactions[] = $subscribe_xaction;
}
// TODO: For now, this is just a placeholder.
$engine = PhabricatorMarkupEngine::getEngine('extract');
$engine->setConfig('viewer', $this->requireActor());
$block_xactions = $this->expandRemarkupBlockTransactions(
$object,
$xactions,
$blocks,
$engine);
foreach ($block_xactions as $xaction) {
$xactions[] = $xaction;
}
return $xactions;
}
private function expandRemarkupBlockTransactions(
PhabricatorLiskDAO $object,
array $xactions,
$blocks,
PhutilMarkupEngine $engine) {
$block_xactions = $this->expandCustomRemarkupBlockTransactions(
$object,
$xactions,
$blocks,
$engine);
$mentioned_phids = array();
if ($this->shouldEnableMentions($object, $xactions)) {
foreach ($blocks as $key => $xaction_blocks) {
foreach ($xaction_blocks as $block) {
$engine->markupText($block);
$mentioned_phids += $engine->getTextMetadata(
PhabricatorObjectRemarkupRule::KEY_MENTIONED_OBJECTS,
array());
}
}
}
if (!$mentioned_phids) {
return $block_xactions;
}
$mentioned_objects = id(new PhabricatorObjectQuery())
->setViewer($this->getActor())
->withPHIDs($mentioned_phids)
->execute();
$mentionable_phids = array();
if ($this->shouldEnableMentions($object, $xactions)) {
foreach ($mentioned_objects as $mentioned_object) {
if ($mentioned_object instanceof PhabricatorMentionableInterface) {
$mentioned_phid = $mentioned_object->getPHID();
if (idx($this->getUnmentionablePHIDMap(), $mentioned_phid)) {
continue;
}
// don't let objects mention themselves
if ($object->getPHID() && $mentioned_phid == $object->getPHID()) {
continue;
}
$mentionable_phids[$mentioned_phid] = $mentioned_phid;
}
}
}
if ($mentionable_phids) {
$edge_type = PhabricatorObjectMentionsObjectEdgeType::EDGECONST;
$block_xactions[] = newv(get_class(head($xactions)), array())
->setIgnoreOnNoEffect(true)
->setTransactionType(PhabricatorTransactions::TYPE_EDGE)
->setMetadataValue('edge:type', $edge_type)
->setNewValue(array('+' => $mentionable_phids));
}
return $block_xactions;
}
protected function expandCustomRemarkupBlockTransactions(
PhabricatorLiskDAO $object,
array $xactions,
$blocks,
PhutilMarkupEngine $engine) {
return array();
}
/**
* Attempt to combine similar transactions into a smaller number of total
* transactions. For example, two transactions which edit the title of an
* object can be merged into a single edit.
*/
private function combineTransactions(array $xactions) {
$stray_comments = array();
$result = array();
$types = array();
foreach ($xactions as $key => $xaction) {
$type = $xaction->getTransactionType();
if (isset($types[$type])) {
foreach ($types[$type] as $other_key) {
$merged = $this->mergeTransactions($result[$other_key], $xaction);
if ($merged) {
$result[$other_key] = $merged;
if ($xaction->getComment() &&
($xaction->getComment() !== $merged->getComment())) {
$stray_comments[] = $xaction->getComment();
}
if ($result[$other_key]->getComment() &&
($result[$other_key]->getComment() !== $merged->getComment())) {
$stray_comments[] = $result[$other_key]->getComment();
}
// Move on to the next transaction.
continue 2;
}
}
}
$result[$key] = $xaction;
$types[$type][] = $key;
}
// If we merged any comments away, restore them.
foreach ($stray_comments as $comment) {
$xaction = newv(get_class(head($result)), array());
$xaction->setTransactionType(PhabricatorTransactions::TYPE_COMMENT);
$xaction->setComment($comment);
$result[] = $xaction;
}
return array_values($result);
}
protected function mergePHIDOrEdgeTransactions(
PhabricatorApplicationTransaction $u,
PhabricatorApplicationTransaction $v) {
$result = $u->getNewValue();
foreach ($v->getNewValue() as $key => $value) {
if ($u->getTransactionType() == PhabricatorTransactions::TYPE_EDGE) {
if (empty($result[$key])) {
$result[$key] = $value;
} else {
// We're merging two lists of edge adds, sets, or removes. Merge
// them by merging individual PHIDs within them.
$merged = $result[$key];
foreach ($value as $dst => $v_spec) {
if (empty($merged[$dst])) {
$merged[$dst] = $v_spec;
} else {
// Two transactions are trying to perform the same operation on
// the same edge. Normalize the edge data and then merge it. This
// allows transactions to specify how data merges execute in a
// precise way.
$u_spec = $merged[$dst];
if (!is_array($u_spec)) {
$u_spec = array('dst' => $u_spec);
}
if (!is_array($v_spec)) {
$v_spec = array('dst' => $v_spec);
}
$ux_data = idx($u_spec, 'data', array());
$vx_data = idx($v_spec, 'data', array());
$merged_data = $this->mergeEdgeData(
$u->getMetadataValue('edge:type'),
$ux_data,
$vx_data);
$u_spec['data'] = $merged_data;
$merged[$dst] = $u_spec;
}
}
$result[$key] = $merged;
}
} else {
$result[$key] = array_merge($value, idx($result, $key, array()));
}
}
$u->setNewValue($result);
// When combining an "ignore" transaction with a normal transaction, make
// sure we don't propagate the "ignore" flag.
if (!$v->getIgnoreOnNoEffect()) {
$u->setIgnoreOnNoEffect(false);
}
return $u;
}
protected function mergeEdgeData($type, array $u, array $v) {
return $v + $u;
}
protected function getPHIDTransactionNewValue(
PhabricatorApplicationTransaction $xaction,
$old = null) {
if ($old !== null) {
$old = array_fuse($old);
} else {
$old = array_fuse($xaction->getOldValue());
}
$new = $xaction->getNewValue();
$new_add = idx($new, '+', array());
unset($new['+']);
$new_rem = idx($new, '-', array());
unset($new['-']);
$new_set = idx($new, '=', null);
if ($new_set !== null) {
$new_set = array_fuse($new_set);
}
unset($new['=']);
if ($new) {
throw new Exception(
pht(
"Invalid '%s' value for PHID transaction. Value should contain only ".
"keys '%s' (add PHIDs), '%' (remove PHIDs) and '%s' (set PHIDS).",
'new',
'+',
'-',
'='));
}
$result = array();
foreach ($old as $phid) {
if ($new_set !== null && empty($new_set[$phid])) {
continue;
}
$result[$phid] = $phid;
}
if ($new_set !== null) {
foreach ($new_set as $phid) {
$result[$phid] = $phid;
}
}
foreach ($new_add as $phid) {
$result[$phid] = $phid;
}
foreach ($new_rem as $phid) {
unset($result[$phid]);
}
return array_values($result);
}
protected function getEdgeTransactionNewValue(
PhabricatorApplicationTransaction $xaction) {
$new = $xaction->getNewValue();
$new_add = idx($new, '+', array());
unset($new['+']);
$new_rem = idx($new, '-', array());
unset($new['-']);
$new_set = idx($new, '=', null);
unset($new['=']);
if ($new) {
throw new Exception(
pht(
"Invalid '%s' value for Edge transaction. Value should contain only ".
"keys '%s' (add edges), '%s' (remove edges) and '%s' (set edges).",
'new',
'+',
'-',
'='));
}
$old = $xaction->getOldValue();
$lists = array($new_set, $new_add, $new_rem);
foreach ($lists as $list) {
$this->checkEdgeList($list);
}
$result = array();
foreach ($old as $dst_phid => $edge) {
if ($new_set !== null && empty($new_set[$dst_phid])) {
continue;
}
$result[$dst_phid] = $this->normalizeEdgeTransactionValue(
$xaction,
$edge,
$dst_phid);
}
if ($new_set !== null) {
foreach ($new_set as $dst_phid => $edge) {
$result[$dst_phid] = $this->normalizeEdgeTransactionValue(
$xaction,
$edge,
$dst_phid);
}
}
foreach ($new_add as $dst_phid => $edge) {
$result[$dst_phid] = $this->normalizeEdgeTransactionValue(
$xaction,
$edge,
$dst_phid);
}
foreach ($new_rem as $dst_phid => $edge) {
unset($result[$dst_phid]);
}
return $result;
}
private function checkEdgeList($list) {
if (!$list) {
return;
}
foreach ($list as $key => $item) {
if (phid_get_type($key) === PhabricatorPHIDConstants::PHID_TYPE_UNKNOWN) {
throw new Exception(
pht(
"Edge transactions must have destination PHIDs as in edge ".
"lists (found key '%s').",
$key));
}
if (!is_array($item) && $item !== $key) {
throw new Exception(
pht(
"Edge transactions must have PHIDs or edge specs as values ".
"(found value '%s').",
$item));
}
}
}
private function normalizeEdgeTransactionValue(
PhabricatorApplicationTransaction $xaction,
$edge,
$dst_phid) {
if (!is_array($edge)) {
if ($edge != $dst_phid) {
throw new Exception(
pht(
'Transaction edge data must either be the edge PHID or an edge '.
'specification dictionary.'));
}
$edge = array();
} else {
foreach ($edge as $key => $value) {
switch ($key) {
case 'src':
case 'dst':
case 'type':
case 'data':
case 'dateCreated':
case 'dateModified':
case 'seq':
case 'dataID':
break;
default:
throw new Exception(
pht(
'Transaction edge specification contains unexpected key "%s".',
$key));
}
}
}
$edge['dst'] = $dst_phid;
$edge_type = $xaction->getMetadataValue('edge:type');
if (empty($edge['type'])) {
$edge['type'] = $edge_type;
} else {
if ($edge['type'] != $edge_type) {
$this_type = $edge['type'];
throw new Exception(
pht(
"Edge transaction includes edge of type '%s', but ".
"transaction is of type '%s'. Each edge transaction ".
"must alter edges of only one type.",
$this_type,
$edge_type));
}
}
if (!isset($edge['data'])) {
$edge['data'] = array();
}
return $edge;
}
protected function sortTransactions(array $xactions) {
$head = array();
$tail = array();
// Move bare comments to the end, so the actions precede them.
foreach ($xactions as $xaction) {
$type = $xaction->getTransactionType();
if ($type == PhabricatorTransactions::TYPE_COMMENT) {
$tail[] = $xaction;
} else {
$head[] = $xaction;
}
}
return array_values(array_merge($head, $tail));
}
protected function filterTransactions(
PhabricatorLiskDAO $object,
array $xactions) {
$type_comment = PhabricatorTransactions::TYPE_COMMENT;
$no_effect = array();
$has_comment = false;
$any_effect = false;
foreach ($xactions as $key => $xaction) {
if ($this->transactionHasEffect($object, $xaction)) {
if ($xaction->getTransactionType() != $type_comment) {
$any_effect = true;
}
} else if ($xaction->getIgnoreOnNoEffect()) {
unset($xactions[$key]);
} else {
$no_effect[$key] = $xaction;
}
if ($xaction->hasComment()) {
$has_comment = true;
}
}
if (!$no_effect) {
return $xactions;
}
if (!$this->getContinueOnNoEffect() && !$this->getIsPreview()) {
throw new PhabricatorApplicationTransactionNoEffectException(
$no_effect,
$any_effect,
$has_comment);
}
if (!$any_effect && !$has_comment) {
// If we only have empty comment transactions, just drop them all.
return array();
}
foreach ($no_effect as $key => $xaction) {
if ($xaction->getComment()) {
$xaction->setTransactionType($type_comment);
$xaction->setOldValue(null);
$xaction->setNewValue(null);
} else {
unset($xactions[$key]);
}
}
return $xactions;
}
/**
* Hook for validating transactions. This callback will be invoked for each
* available transaction type, even if an edit does not apply any transactions
* of that type. This allows you to raise exceptions when required fields are
* missing, by detecting that the object has no field value and there is no
* transaction which sets one.
*
* @param PhabricatorLiskDAO Object being edited.
* @param string Transaction type to validate.
* @param list<PhabricatorApplicationTransaction> Transactions of given type,
* which may be empty if the edit does not apply any transactions of the
* given type.
* @return list<PhabricatorApplicationTransactionValidationError> List of
* validation errors.
*/
protected function validateTransaction(
PhabricatorLiskDAO $object,
$type,
array $xactions) {
$errors = array();
switch ($type) {
case PhabricatorTransactions::TYPE_VIEW_POLICY:
$errors[] = $this->validatePolicyTransaction(
$object,
$xactions,
$type,
PhabricatorPolicyCapability::CAN_VIEW);
break;
case PhabricatorTransactions::TYPE_EDIT_POLICY:
$errors[] = $this->validatePolicyTransaction(
$object,
$xactions,
$type,
PhabricatorPolicyCapability::CAN_EDIT);
break;
case PhabricatorTransactions::TYPE_SPACE:
$errors[] = $this->validateSpaceTransactions(
$object,
$xactions,
$type);
break;
case PhabricatorTransactions::TYPE_CUSTOMFIELD:
$groups = array();
foreach ($xactions as $xaction) {
$groups[$xaction->getMetadataValue('customfield:key')][] = $xaction;
}
$field_list = PhabricatorCustomField::getObjectFields(
$object,
PhabricatorCustomField::ROLE_EDIT);
$field_list->setViewer($this->getActor());
$role_xactions = PhabricatorCustomField::ROLE_APPLICATIONTRANSACTIONS;
foreach ($field_list->getFields() as $field) {
if (!$field->shouldEnableForRole($role_xactions)) {
continue;
}
$errors[] = $field->validateApplicationTransactions(
$this,
$type,
idx($groups, $field->getFieldKey(), array()));
}
break;
}
return array_mergev($errors);
}
private function validatePolicyTransaction(
PhabricatorLiskDAO $object,
array $xactions,
$transaction_type,
$capability) {
$actor = $this->requireActor();
$errors = array();
// Note $this->xactions is necessary; $xactions is $this->xactions of
// $transaction_type
$policy_object = $this->adjustObjectForPolicyChecks(
$object,
$this->xactions);
// Make sure the user isn't editing away their ability to $capability this
// object.
foreach ($xactions as $xaction) {
try {
PhabricatorPolicyFilter::requireCapabilityWithForcedPolicy(
$actor,
$policy_object,
$capability,
$xaction->getNewValue());
} catch (PhabricatorPolicyException $ex) {
$errors[] = new PhabricatorApplicationTransactionValidationError(
$transaction_type,
pht('Invalid'),
pht(
'You can not select this %s policy, because you would no longer '.
'be able to %s the object.',
$capability,
$capability),
$xaction);
}
}
if ($this->getIsNewObject()) {
if (!$xactions) {
$has_capability = PhabricatorPolicyFilter::hasCapability(
$actor,
$policy_object,
$capability);
if (!$has_capability) {
$errors[] = new PhabricatorApplicationTransactionValidationError(
$transaction_type,
pht('Invalid'),
pht(
'The selected %s policy excludes you. Choose a %s policy '.
'which allows you to %s the object.',
$capability,
$capability,
$capability));
}
}
}
return $errors;
}
private function validateSpaceTransactions(
PhabricatorLiskDAO $object,
array $xactions,
$transaction_type) {
$errors = array();
$actor = $this->getActor();
$has_spaces = PhabricatorSpacesNamespaceQuery::getViewerSpacesExist($actor);
$actor_spaces = PhabricatorSpacesNamespaceQuery::getViewerSpaces($actor);
$active_spaces = PhabricatorSpacesNamespaceQuery::getViewerActiveSpaces(
$actor);
foreach ($xactions as $xaction) {
$space_phid = $xaction->getNewValue();
if ($space_phid === null) {
if (!$has_spaces) {
// The install doesn't have any spaces, so this is fine.
continue;
}
// The install has some spaces, so every object needs to be put
// in a valid space.
$errors[] = new PhabricatorApplicationTransactionValidationError(
$transaction_type,
pht('Invalid'),
pht('You must choose a space for this object.'),
$xaction);
continue;
}
// If the PHID isn't `null`, it needs to be a valid space that the
// viewer can see.
if (empty($actor_spaces[$space_phid])) {
$errors[] = new PhabricatorApplicationTransactionValidationError(
$transaction_type,
pht('Invalid'),
pht(
'You can not shift this object in the selected space, because '.
'the space does not exist or you do not have access to it.'),
$xaction);
} else if (empty($active_spaces[$space_phid])) {
// It's OK to edit objects in an archived space, so just move on if
// we aren't adjusting the value.
$old_space_phid = $this->getTransactionOldValue($object, $xaction);
if ($space_phid == $old_space_phid) {
continue;
}
$errors[] = new PhabricatorApplicationTransactionValidationError(
$transaction_type,
pht('Archived'),
pht(
'You can not shift this object into the selected space, because '.
'the space is archived. Objects can not be created inside (or '.
'moved into) archived spaces.'),
$xaction);
}
}
return $errors;
}
protected function adjustObjectForPolicyChecks(
PhabricatorLiskDAO $object,
array $xactions) {
$copy = clone $object;
foreach ($xactions as $xaction) {
switch ($xaction->getTransactionType()) {
case PhabricatorTransactions::TYPE_SUBSCRIBERS:
$clone_xaction = clone $xaction;
$clone_xaction->setOldValue(array_values($this->subscribers));
$clone_xaction->setNewValue(
$this->getPHIDTransactionNewValue(
$clone_xaction));
PhabricatorPolicyRule::passTransactionHintToRule(
$copy,
new PhabricatorSubscriptionsSubscribersPolicyRule(),
array_fuse($clone_xaction->getNewValue()));
break;
case PhabricatorTransactions::TYPE_SPACE:
$space_phid = $this->getTransactionNewValue($object, $xaction);
$copy->setSpacePHID($space_phid);
break;
}
}
return $copy;
}
protected function validateAllTransactions(
PhabricatorLiskDAO $object,
array $xactions) {
return array();
}
/**
* Check for a missing text field.
*
* A text field is missing if the object has no value and there are no
* transactions which set a value, or if the transactions remove the value.
* This method is intended to make implementing @{method:validateTransaction}
* more convenient:
*
* $missing = $this->validateIsEmptyTextField(
* $object->getName(),
* $xactions);
*
* This will return `true` if the net effect of the object and transactions
* is an empty field.
*
* @param wild Current field value.
* @param list<PhabricatorApplicationTransaction> Transactions editing the
* field.
* @return bool True if the field will be an empty text field after edits.
*/
protected function validateIsEmptyTextField($field_value, array $xactions) {
if (strlen($field_value) && empty($xactions)) {
return false;
}
if ($xactions && strlen(last($xactions)->getNewValue())) {
return false;
}
return true;
}
/* -( Implicit CCs )------------------------------------------------------- */
/**
* When a user interacts with an object, we might want to add them to CC.
*/
final public function applyImplicitCC(
PhabricatorLiskDAO $object,
array $xactions) {
if (!($object instanceof PhabricatorSubscribableInterface)) {
// If the object isn't subscribable, we can't CC them.
return $xactions;
}
$actor_phid = $this->getActingAsPHID();
$type_user = PhabricatorPeopleUserPHIDType::TYPECONST;
if (phid_get_type($actor_phid) != $type_user) {
// Transactions by application actors like Herald, Harbormaster and
// Diffusion should not CC the applications.
return $xactions;
}
if ($object->isAutomaticallySubscribed($actor_phid)) {
// If they're auto-subscribed, don't CC them.
return $xactions;
}
$should_cc = false;
foreach ($xactions as $xaction) {
if ($this->shouldImplyCC($object, $xaction)) {
$should_cc = true;
break;
}
}
if (!$should_cc) {
// Only some types of actions imply a CC (like adding a comment).
return $xactions;
}
if ($object->getPHID()) {
if (isset($this->subscribers[$actor_phid])) {
// If the user is already subscribed, don't implicitly CC them.
return $xactions;
}
$unsub = PhabricatorEdgeQuery::loadDestinationPHIDs(
$object->getPHID(),
PhabricatorObjectHasUnsubscriberEdgeType::EDGECONST);
$unsub = array_fuse($unsub);
if (isset($unsub[$actor_phid])) {
// If the user has previously unsubscribed from this object explicitly,
// don't implicitly CC them.
return $xactions;
}
}
$xaction = newv(get_class(head($xactions)), array());
$xaction->setTransactionType(PhabricatorTransactions::TYPE_SUBSCRIBERS);
$xaction->setNewValue(array('+' => array($actor_phid)));
array_unshift($xactions, $xaction);
return $xactions;
}
protected function shouldImplyCC(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction) {
return $xaction->isCommentTransaction();
}
/* -( Sending Mail )------------------------------------------------------- */
/**
* @task mail
*/
protected function shouldSendMail(
PhabricatorLiskDAO $object,
array $xactions) {
return false;
}
/**
* @task mail
*/
private function buildMail(
PhabricatorLiskDAO $object,
array $xactions) {
$email_to = $this->mailToPHIDs;
$email_cc = $this->mailCCPHIDs;
$email_cc = array_merge($email_cc, $this->heraldEmailPHIDs);
$targets = $this->buildReplyHandler($object)
->getMailTargets($email_to, $email_cc);
// Set this explicitly before we start swapping out the effective actor.
$this->setActingAsPHID($this->getActingAsPHID());
$messages = array();
foreach ($targets as $target) {
$original_actor = $this->getActor();
$viewer = $target->getViewer();
$this->setActor($viewer);
$locale = PhabricatorEnv::beginScopedLocale($viewer->getTranslation());
$caught = null;
$mail = null;
try {
// Reload handles for the new viewer.
$this->loadHandles($xactions);
$mail = $this->buildMailForTarget($object, $xactions, $target);
} catch (Exception $ex) {
$caught = $ex;
}
$this->setActor($original_actor);
unset($locale);
if ($caught) {
throw $ex;
}
if ($mail) {
$messages[] = $mail;
}
}
return $messages;
}
private function buildMailForTarget(
PhabricatorLiskDAO $object,
array $xactions,
PhabricatorMailTarget $target) {
// Check if any of the transactions are visible for this viewer. If we
// don't have any visible transactions, don't send the mail.
$any_visible = false;
foreach ($xactions as $xaction) {
if (!$xaction->shouldHideForMail($xactions)) {
$any_visible = true;
break;
}
}
if (!$any_visible) {
return null;
}
$mail = $this->buildMailTemplate($object);
$body = $this->buildMailBody($object, $xactions);
$mail_tags = $this->getMailTags($object, $xactions);
$action = $this->getMailAction($object, $xactions);
if (PhabricatorEnv::getEnvConfig('metamta.email-preferences')) {
$this->addEmailPreferenceSectionToMailBody(
$body,
$object,
$xactions);
}
$mail
->setFrom($this->getActingAsPHID())
->setSubjectPrefix($this->getMailSubjectPrefix())
->setVarySubjectPrefix('['.$action.']')
->setThreadID($this->getMailThreadID($object), $this->getIsNewObject())
->setRelatedPHID($object->getPHID())
->setExcludeMailRecipientPHIDs($this->getExcludeMailRecipientPHIDs())
->setForceHeraldMailRecipientPHIDs($this->heraldForcedEmailPHIDs)
->setMailTags($mail_tags)
->setIsBulk(true)
->setBody($body->render())
->setHTMLBody($body->renderHTML());
foreach ($body->getAttachments() as $attachment) {
$mail->addAttachment($attachment);
}
if ($this->heraldHeader) {
$mail->addHeader('X-Herald-Rules', $this->heraldHeader);
}
if ($object instanceof PhabricatorProjectInterface) {
$this->addMailProjectMetadata($object, $mail);
}
if ($this->getParentMessageID()) {
$mail->setParentMessageID($this->getParentMessageID());
}
return $target->willSendMail($mail);
}
private function addMailProjectMetadata(
PhabricatorLiskDAO $object,
PhabricatorMetaMTAMail $template) {
$project_phids = PhabricatorEdgeQuery::loadDestinationPHIDs(
$object->getPHID(),
PhabricatorProjectObjectHasProjectEdgeType::EDGECONST);
if (!$project_phids) {
return;
}
// TODO: This viewer isn't quite right. It would be slightly better to use
// the mail recipient, but that's not very easy given the way rendering
// works today.
$handles = id(new PhabricatorHandleQuery())
->setViewer($this->requireActor())
->withPHIDs($project_phids)
->execute();
$project_tags = array();
foreach ($handles as $handle) {
if (!$handle->isComplete()) {
continue;
}
$project_tags[] = '<'.$handle->getObjectName().'>';
}
if (!$project_tags) {
return;
}
$project_tags = implode(', ', $project_tags);
$template->addHeader('X-Phabricator-Projects', $project_tags);
}
protected function getMailThreadID(PhabricatorLiskDAO $object) {
return $object->getPHID();
}
/**
* @task mail
*/
protected function getStrongestAction(
PhabricatorLiskDAO $object,
array $xactions) {
return last(msort($xactions, 'getActionStrength'));
}
/**
* @task mail
*/
protected function buildReplyHandler(PhabricatorLiskDAO $object) {
throw new Exception(pht('Capability not supported.'));
}
/**
* @task mail
*/
protected function getMailSubjectPrefix() {
throw new Exception(pht('Capability not supported.'));
}
/**
* @task mail
*/
protected function getMailTags(
PhabricatorLiskDAO $object,
array $xactions) {
$tags = array();
foreach ($xactions as $xaction) {
$tags[] = $xaction->getMailTags();
}
return array_mergev($tags);
}
/**
* @task mail
*/
public function getMailTagsMap() {
// TODO: We should move shared mail tags, like "comment", here.
return array();
}
/**
* @task mail
*/
protected function getMailAction(
PhabricatorLiskDAO $object,
array $xactions) {
return $this->getStrongestAction($object, $xactions)->getActionName();
}
/**
* @task mail
*/
protected function buildMailTemplate(PhabricatorLiskDAO $object) {
throw new Exception(pht('Capability not supported.'));
}
/**
* @task mail
*/
protected function getMailTo(PhabricatorLiskDAO $object) {
throw new Exception(pht('Capability not supported.'));
}
/**
* @task mail
*/
protected function getMailCC(PhabricatorLiskDAO $object) {
$phids = array();
$has_support = false;
if ($object instanceof PhabricatorSubscribableInterface) {
$phid = $object->getPHID();
$phids[] = PhabricatorSubscribersQuery::loadSubscribersForPHID($phid);
$has_support = true;
}
if ($object instanceof PhabricatorProjectInterface) {
$project_phids = PhabricatorEdgeQuery::loadDestinationPHIDs(
$object->getPHID(),
PhabricatorProjectObjectHasProjectEdgeType::EDGECONST);
if ($project_phids) {
$watcher_type = PhabricatorObjectHasWatcherEdgeType::EDGECONST;
$query = id(new PhabricatorEdgeQuery())
->withSourcePHIDs($project_phids)
->withEdgeTypes(array($watcher_type));
$query->execute();
$watcher_phids = $query->getDestinationPHIDs();
if ($watcher_phids) {
// We need to do a visibility check for all the watchers, as
// watching a project is not a guarantee that you can see objects
// associated with it.
$users = id(new PhabricatorPeopleQuery())
->setViewer($this->requireActor())
->withPHIDs($watcher_phids)
->execute();
$watchers = array();
foreach ($users as $user) {
$can_see = PhabricatorPolicyFilter::hasCapability(
$user,
$object,
PhabricatorPolicyCapability::CAN_VIEW);
if ($can_see) {
$watchers[] = $user->getPHID();
}
}
$phids[] = $watchers;
}
}
$has_support = true;
}
if (!$has_support) {
throw new Exception(pht('Capability not supported.'));
}
return array_mergev($phids);
}
/**
* @task mail
*/
protected function buildMailBody(
PhabricatorLiskDAO $object,
array $xactions) {
$body = new PhabricatorMetaMTAMailBody();
$body->setViewer($this->requireActor());
$this->addHeadersAndCommentsToMailBody($body, $xactions);
$this->addCustomFieldsToMailBody($body, $object, $xactions);
return $body;
}
/**
* @task mail
*/
protected function addEmailPreferenceSectionToMailBody(
PhabricatorMetaMTAMailBody $body,
PhabricatorLiskDAO $object,
array $xactions) {
$href = PhabricatorEnv::getProductionURI(
'/settings/panel/emailpreferences/');
$body->addLinkSection(pht('EMAIL PREFERENCES'), $href);
}
/**
* @task mail
*/
protected function addHeadersAndCommentsToMailBody(
PhabricatorMetaMTAMailBody $body,
array $xactions) {
$headers = array();
$comments = array();
foreach ($xactions as $xaction) {
if ($xaction->shouldHideForMail($xactions)) {
continue;
}
$header = $xaction->getTitleForMail();
if ($header !== null) {
$headers[] = $header;
}
$comment = $xaction->getBodyForMail();
if ($comment !== null) {
$comments[] = $comment;
}
}
$body->addRawSection(implode("\n", $headers));
foreach ($comments as $comment) {
$body->addRemarkupSection($comment);
}
}
/**
* @task mail
*/
protected function addCustomFieldsToMailBody(
PhabricatorMetaMTAMailBody $body,
PhabricatorLiskDAO $object,
array $xactions) {
if ($object instanceof PhabricatorCustomFieldInterface) {
$field_list = PhabricatorCustomField::getObjectFields(
$object,
PhabricatorCustomField::ROLE_TRANSACTIONMAIL);
$field_list->setViewer($this->getActor());
$field_list->readFieldsFromStorage($object);
foreach ($field_list->getFields() as $field) {
$field->updateTransactionMailBody(
$body,
$this,
$xactions);
}
}
}
/* -( Publishing Feed Stories )-------------------------------------------- */
/**
* @task feed
*/
protected function shouldPublishFeedStory(
PhabricatorLiskDAO $object,
array $xactions) {
return false;
}
/**
* @task feed
*/
protected function getFeedStoryType() {
return 'PhabricatorApplicationTransactionFeedStory';
}
/**
* @task feed
*/
protected function getFeedRelatedPHIDs(
PhabricatorLiskDAO $object,
array $xactions) {
$phids = array(
$object->getPHID(),
$this->getActingAsPHID(),
);
if ($object instanceof PhabricatorProjectInterface) {
$project_phids = PhabricatorEdgeQuery::loadDestinationPHIDs(
$object->getPHID(),
PhabricatorProjectObjectHasProjectEdgeType::EDGECONST);
foreach ($project_phids as $project_phid) {
$phids[] = $project_phid;
}
}
return $phids;
}
/**
* @task feed
*/
protected function getFeedNotifyPHIDs(
PhabricatorLiskDAO $object,
array $xactions) {
return array_unique(array_merge(
$this->getMailTo($object),
$this->getMailCC($object)));
}
/**
* @task feed
*/
protected function getFeedStoryData(
PhabricatorLiskDAO $object,
array $xactions) {
$xactions = msort($xactions, 'getActionStrength');
$xactions = array_reverse($xactions);
return array(
'objectPHID' => $object->getPHID(),
'transactionPHIDs' => mpull($xactions, 'getPHID'),
);
}
/**
* @task feed
*/
protected function publishFeedStory(
PhabricatorLiskDAO $object,
array $xactions,
array $mailed_phids) {
$xactions = mfilter($xactions, 'shouldHideForFeed', true);
if (!$xactions) {
return;
}
$related_phids = $this->feedRelatedPHIDs;
$subscribed_phids = $this->feedNotifyPHIDs;
$story_type = $this->getFeedStoryType();
$story_data = $this->getFeedStoryData($object, $xactions);
id(new PhabricatorFeedStoryPublisher())
->setStoryType($story_type)
->setStoryData($story_data)
->setStoryTime(time())
->setStoryAuthorPHID($this->getActingAsPHID())
->setRelatedPHIDs($related_phids)
->setPrimaryObjectPHID($object->getPHID())
->setSubscribedPHIDs($subscribed_phids)
->setMailRecipientPHIDs($mailed_phids)
->setMailTags($this->getMailTags($object, $xactions))
->publish();
}
/* -( Search Index )------------------------------------------------------- */
/**
* @task search
*/
protected function supportsSearch() {
return false;
}
/**
* @task search
*/
protected function getSearchContextParameter(
PhabricatorLiskDAO $object,
array $xactions) {
return null;
}
/* -( Herald Integration )-------------------------------------------------- */
protected function shouldApplyHeraldRules(
PhabricatorLiskDAO $object,
array $xactions) {
return false;
}
protected function buildHeraldAdapter(
PhabricatorLiskDAO $object,
array $xactions) {
throw new Exception(pht('No herald adapter specified.'));
}
private function setHeraldAdapter(HeraldAdapter $adapter) {
$this->heraldAdapter = $adapter;
return $this;
}
protected function getHeraldAdapter() {
return $this->heraldAdapter;
}
private function setHeraldTranscript(HeraldTranscript $transcript) {
$this->heraldTranscript = $transcript;
return $this;
}
protected function getHeraldTranscript() {
return $this->heraldTranscript;
}
private function applyHeraldRules(
PhabricatorLiskDAO $object,
array $xactions) {
$adapter = $this->buildHeraldAdapter($object, $xactions);
$adapter->setContentSource($this->getContentSource());
$adapter->setIsNewObject($this->getIsNewObject());
if ($this->getApplicationEmail()) {
$adapter->setApplicationEmail($this->getApplicationEmail());
}
$xscript = HeraldEngine::loadAndApplyRules($adapter);
$this->setHeraldAdapter($adapter);
$this->setHeraldTranscript($xscript);
return array_merge(
$this->didApplyHeraldRules($object, $adapter, $xscript),
$adapter->getQueuedTransactions());
}
protected function didApplyHeraldRules(
PhabricatorLiskDAO $object,
HeraldAdapter $adapter,
HeraldTranscript $transcript) {
return array();
}
/* -( Custom Fields )------------------------------------------------------ */
/**
* @task customfield
*/
private function getCustomFieldForTransaction(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction) {
$field_key = $xaction->getMetadataValue('customfield:key');
if (!$field_key) {
throw new Exception(
pht(
"Custom field transaction has no '%s'!",
'customfield:key'));
}
$field = PhabricatorCustomField::getObjectField(
$object,
PhabricatorCustomField::ROLE_APPLICATIONTRANSACTIONS,
$field_key);
if (!$field) {
throw new Exception(
pht(
"Custom field transaction has invalid '%s'; field '%s' ".
"is disabled or does not exist.",
'customfield:key',
$field_key));
}
if (!$field->shouldAppearInApplicationTransactions()) {
throw new Exception(
pht(
"Custom field transaction '%s' does not implement ".
"integration for %s.",
$field_key,
'ApplicationTransactions'));
}
$field->setViewer($this->getActor());
return $field;
}
/* -( Files )-------------------------------------------------------------- */
/**
* Extract the PHIDs of any files which these transactions attach.
*
* @task files
*/
private function extractFilePHIDs(
PhabricatorLiskDAO $object,
array $xactions) {
$blocks = array();
foreach ($xactions as $xaction) {
$blocks[] = $this->getRemarkupBlocksFromTransaction($xaction);
}
$blocks = array_mergev($blocks);
$phids = array();
if ($blocks) {
$phids[] = PhabricatorMarkupEngine::extractFilePHIDsFromEmbeddedFiles(
$this->getActor(),
$blocks);
}
foreach ($xactions as $xaction) {
$phids[] = $this->extractFilePHIDsFromCustomTransaction(
$object,
$xaction);
}
$phids = array_unique(array_filter(array_mergev($phids)));
if (!$phids) {
return array();
}
// Only let a user attach files they can actually see, since this would
// otherwise let you access any file by attaching it to an object you have
// view permission on.
$files = id(new PhabricatorFileQuery())
->setViewer($this->getActor())
->withPHIDs($phids)
->execute();
return mpull($files, 'getPHID');
}
/**
* @task files
*/
protected function extractFilePHIDsFromCustomTransaction(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction) {
return array();
}
/**
* @task files
*/
private function attachFiles(
PhabricatorLiskDAO $object,
array $file_phids) {
if (!$file_phids) {
return;
}
$editor = new PhabricatorEdgeEditor();
$src = $object->getPHID();
$type = PhabricatorObjectHasFileEdgeType::EDGECONST;
foreach ($file_phids as $dst) {
$editor->addEdge($src, $type, $dst);
}
$editor->save();
}
private function applyInverseEdgeTransactions(
PhabricatorLiskDAO $object,
PhabricatorApplicationTransaction $xaction,
$inverse_type) {
$old = $xaction->getOldValue();
$new = $xaction->getNewValue();
$add = array_keys(array_diff_key($new, $old));
$rem = array_keys(array_diff_key($old, $new));
$add = array_fuse($add);
$rem = array_fuse($rem);
$all = $add + $rem;
$nodes = id(new PhabricatorObjectQuery())
->setViewer($this->requireActor())
->withPHIDs($all)
->execute();
foreach ($nodes as $node) {
if (!($node instanceof PhabricatorApplicationTransactionInterface)) {
continue;
}
if ($node instanceof PhabricatorUser) {
// TODO: At least for now, don't record inverse edge transactions
// for users (for example, "alincoln joined project X"): Feed fills
// this role instead.
continue;
}
$editor = $node->getApplicationTransactionEditor();
$template = $node->getApplicationTransactionTemplate();
$target = $node->getApplicationTransactionObject();
if (isset($add[$node->getPHID()])) {
$edge_edit_type = '+';
} else {
$edge_edit_type = '-';
}
$template
->setTransactionType($xaction->getTransactionType())
->setMetadataValue('edge:type', $inverse_type)
->setNewValue(
array(
$edge_edit_type => array($object->getPHID() => $object->getPHID()),
));
$editor
->setContinueOnNoEffect(true)
->setContinueOnMissingFields(true)
->setParentMessageID($this->getParentMessageID())
->setIsInverseEdgeEditor(true)
->setActor($this->requireActor())
->setActingAsPHID($this->getActingAsPHID())
->setContentSource($this->getContentSource());
$editor->applyTransactions($target, array($template));
}
}
/* -( Workers )------------------------------------------------------------ */
/**
* Load any object state which is required to publish transactions.
*
* This hook is invoked in the main process before we compute data related
* to publishing transactions (like email "To" and "CC" lists), and again in
* the worker before publishing occurs.
*
* @return object Publishable object.
* @task workers
*/
protected function willPublish(PhabricatorLiskDAO $object, array $xactions) {
return $object;
}
/**
* Convert the editor state to a serializable dictionary which can be passed
* to a worker.
*
* This data will be loaded with @{method:loadWorkerState} in the worker.
*
* @return dict<string, wild> Serializable editor state.
* @task workers
*/
final private function getWorkerState() {
$state = array();
foreach ($this->getAutomaticStateProperties() as $property) {
$state[$property] = $this->$property;
}
$state += array(
'excludeMailRecipientPHIDs' => $this->getExcludeMailRecipientPHIDs(),
'custom' => $this->getCustomWorkerState(),
);
return $state;
}
/**
* Hook; return custom properties which need to be passed to workers.
*
* @return dict<string, wild> Custom properties.
* @task workers
*/
protected function getCustomWorkerState() {
return array();
}
/**
* Load editor state using a dictionary emitted by @{method:getWorkerState}.
*
* This method is used to load state when running worker operations.
*
* @param dict<string, wild> Editor state, from @{method:getWorkerState}.
* @return this
* @task workers
*/
final public function loadWorkerState(array $state) {
foreach ($this->getAutomaticStateProperties() as $property) {
$this->$property = idx($state, $property);
}
$exclude = idx($state, 'excludeMailRecipientPHIDs', array());
$this->setExcludeMailRecipientPHIDs($exclude);
$custom = idx($state, 'custom', array());
$this->loadCustomWorkerState($custom);
return $this;
}
/**
* Hook; set custom properties on the editor from data emitted by
* @{method:getCustomWorkerState}.
*
* @param dict<string, wild> Custom state,
* from @{method:getCustomWorkerState}.
* @return this
* @task workers
*/
protected function loadCustomWorkerState(array $state) {
return $this;
}
/**
* Get a list of object properties which should be automatically sent to
* workers in the state data.
*
* These properties will be automatically stored and loaded by the editor in
* the worker.
*
* @return list<string> List of properties.
* @task workers
*/
private function getAutomaticStateProperties() {
return array(
'parentMessageID',
'disableEmail',
'isNewObject',
'heraldEmailPHIDs',
'heraldForcedEmailPHIDs',
'heraldHeader',
'mailToPHIDs',
'mailCCPHIDs',
'feedNotifyPHIDs',
'feedRelatedPHIDs',
);
}
}
| UNCC-OpenProjects/Phabricator | src/applications/transactions/editor/PhabricatorApplicationTransactionEditor.php | PHP | apache-2.0 | 93,035 |
package me.code4fun.roboq;
import android.os.Handler;
/**
* @since 0.1
*/
public abstract class PostRunnableCallback extends HandlerCallback {
public PostRunnableCallback(Handler handler) {
super(handler);
}
@Override
public void onResponse(Request req, Response resp, Exception error) {
Runnable r = createRunnable(req, resp, error);
if (r != null)
processRunnable(r);
}
protected abstract Runnable createRunnable(Request req, Response resp, Exception error);
protected void processRunnable(Runnable r) {
handler.post(r);
}
}
| gaorx/Roboq | Roboq/src/me/code4fun/roboq/PostRunnableCallback.java | Java | apache-2.0 | 611 |
/*
* Copyright (C) 2015 University of Oregon
*
* You may distribute under the terms of either the GNU General Public
* License or the Apache License, as specified in the LICENSE file.
*
* For more information, see the LICENSE file.
*/
package vnmr.bo;
import java.util.*;
import java.io.*;
import vnmr.util.*;
import vnmr.ui.*;
import vnmr.ui.shuf.*;
/**
* There is one StatementHistory object per objType. The StatementHistory
* object maintains a buffer of statements. Each "statement" is simply
* represented by a hashtable.
*
* <p>StatementHistory has one additional responsibility, and that is,
* remembering the last statement of a particular statement type.
*
* @author Mark Cao
*/
public class StatementHistory implements Serializable {
// ==== static variables
/** maximum buffer length */
private static final int MAXLEN = 20;
// ==== instance variables
/** shuffler service */
private ShufflerService shufflerService;
/** History buffer. Place latest statements at end. */
private Vector buffer=null;
/** Buffer pointer. The buffer pointer points at the statement
* currently being displayed. If the buffer is empty, the pointer
* has value -1. */
private int bufPointer=-1;
/** The last buffer that was appended. */
private int lastAppendedBuf;
/** Most previous active bufPointer when append occured */
private int prevBufPointer;
/** statement listeners */
private Vector listeners;
/** previous statements, keyed by type */
private Hashtable prevStatements;
/** object type this StatementHistory is used for. */
private String objectType;
/**
* constructor
* @param shufflerService
*/
public StatementHistory(ShufflerService shufflerService, String objType,
Vector buf, int bufPointer){
this.shufflerService = shufflerService;
objectType = objType;
this.buffer = buf;
this.bufPointer = bufPointer;
if(buffer == null)
buffer = new Vector(MAXLEN);
listeners = new Vector();
prevStatements = new Hashtable();
} // StatementHistory()
/**
* Add a statement listener. Such listeners are notified of new
* statements.
* @param listener listener
*/
public void addStatementListener(StatementListener listener) {
listeners.addElement(listener);
} // addStatementListener
/**
* Append a statement to buffer. Note that append automatically
* resets the buffer pointer to the end.
* @param statement statement
*/
public void append(Hashtable statement) {
if(statement == null)
return;
// prior to appending, delete a statement if necessary
while (buffer.size() >= MAXLEN) {
buffer.removeElementAt(0);
}
buffer.addElement(statement.clone());
bufPointer = buffer.size() - 1;
String statementType = (String)statement.get("Statement_type");
prevStatements.put(statementType, statement);
for (Enumeration en = listeners.elements(); en.hasMoreElements(); ) {
StatementListener listener = (StatementListener)en.nextElement();
listener.newStatement(statement);
listener.backMovabilityChanged(canGoBack());
listener.forwardMovabilityChanged(canGoForward());
}
} // append()
/**
* Take a copy of the current statement, modify it with the given
* key/value pair, and append the new (modified) statement to the
* history buffer.
* @param key key
* @param value value
*/
public void append(String key, Object value) {
String str1, str2, str3;
String val;
Hashtable statement = getCurrentStatement();
if (statement != null) {
Hashtable newStatement = (Hashtable)statement.clone();
newStatement.put(key, value);
// If Attribute- set AttrValue- to its prev value or all.
// If Attribute- or AttrValue-, save the previous 3 values.
if(key.startsWith("Attribute-")) {
// If Attribute- set AttrValue- to its prev value or all.
String digit = key.substring(key.length() -1);
String attrVal = new String("AttrValue-" + digit);
String inVal = value.toString();
String attrPrev = inVal.concat("-prev-1");
// Try to get the prev value for this Attribute
val = (String) statement.get(attrPrev);
if(val != null) {
// Yes, use it.
newStatement.put(attrVal, val);
}
else
// No prev value, default to all
newStatement.put(attrVal, "all");
// Only save if it has changed since the last time we
// saved a prev-1
// Create the three key names for the Attribute-
str1 = key.concat("-prev-1");
str2 = key.concat("-prev-2");
str3 = key.concat("-prev-3");
String prevAttrName = (String)statement.get(key);
String prevVal = (String) statement.get(str1);
if(prevVal == null || !prevAttrName.equals(prevVal)) {
// rotate -1 and -2 up to -2 and -3 if they exist.
val = (String)newStatement.get(str2);
if(val != null)
newStatement.put(str3, val);
val = (String)newStatement.get(str1);
if(val != null)
newStatement.put(str2, val);
// Save the previous one.
newStatement.put(str1, prevAttrName);
}
// Also save the AttrValue- value we had.
// Create the three key names using the actual attr name
str1 = prevAttrName.concat("-prev-1");
str2 = prevAttrName.concat("-prev-2");
str3 = prevAttrName.concat("-prev-3");
// Get the current value of AttrValue-
String curVal = (String) statement.get(attrVal);
// Get the value of the -prev-1 entry
prevVal = (String) statement.get(str1);
// Only save the values if it has changed since the
// last time we saved a prev-1
if(prevVal == null || !curVal.equals(prevVal)) {
// rotate -1 and -2 up to -2 and -3 if they exist.
val = (String)newStatement.get(str2);
if(val != null)
newStatement.put(str3, val);
val = (String)newStatement.get(str1);
if(val != null)
newStatement.put(str2, val);
// Save the previous one.
newStatement.put(str1, curVal);
}
}
else if(key.startsWith("AttrValue-")) {
// Get the Attribute- name itself that goes with
// this AttrValue-
String digit = key.substring(key.length() -1);
String attr = new String("Attribute-" + digit);
String attrName = (String) newStatement.get(attr);
// Create the three key names using the actual attr name
str1 = attrName.concat("-prev-1");
str2 = attrName.concat("-prev-2");
str3 = attrName.concat("-prev-3");
// rotate -1 and -2 up to -2 and -3 if they exist.
val = (String)newStatement.get(str2);
if(val != null)
newStatement.put(str3, val);
val = (String)newStatement.get(str1);
if(val != null)
newStatement.put(str2, val);
newStatement.put(str1, value);
}
append(newStatement);
}
} // append()
/**
* Append the last statement of the given type. Note that memory
* of these previous statements is not limited to what's in the
* buffer.
*
* <p>If the given statementType has not been encountered before,
* query a default value from the shuffler service.
* @param statementType statement type
*/
public void appendLastOfType(String statementType) {
prevBufPointer = bufPointer;
Hashtable statement = (Hashtable)prevStatements.get(statementType);
if (statement == null)
statement = shufflerService.getDefaultStatement(statementType);
append(statement);
// If no new statement, don't allow anything to be removed later.
if(prevBufPointer == bufPointer)
lastAppendedBuf = -1;
else
lastAppendedBuf = bufPointer;
} // appendLastOfType()
/******************************************************************
* Summary: Return the last statement of this type.
*
*****************************************************************/
public Hashtable getLastOfType(String statementType) {
prevBufPointer = bufPointer;
Hashtable statement = (Hashtable)prevStatements.get(statementType);
if (statement == null)
statement = shufflerService.getDefaultStatement(statementType);
return statement;
}
/************************************************** <pre>
* Summary: Remove the last statement which was appended and restore
* statement to where it was before the last append.
*
</pre> **************************************************/
public void removeLastAppendedStatement() {
// Go to the most previous position
if(prevBufPointer >= 0 && prevBufPointer < buffer.size())
goToStatementByIndex(prevBufPointer);
// Remove the last appended statement.
if(lastAppendedBuf >= 0 && lastAppendedBuf < buffer.size())
buffer.removeElementAt(lastAppendedBuf);
// Fix the forward and backward arrows.
for (Enumeration en = listeners.elements();
en.hasMoreElements(); ) {
StatementListener listener =
(StatementListener)en.nextElement();
listener.backMovabilityChanged(canGoBack());
listener.forwardMovabilityChanged(canGoForward());
}
}
/************************************************** <pre>
* Summary: Switch to the given statement without effecting the history.
*
</pre> **************************************************/
public void goToStatementByIndex(int newBufPointer) {
if (newBufPointer >= 0 && newBufPointer < buffer.size()) {
bufPointer = newBufPointer;
updateWithoutNewHistory();
}
}
public int getNumInHistory() {
return buffer.size();
}
/**
* is it possible to go back?
* @return boolean
*/
public boolean canGoBack() {
return bufPointer > 0;
} // canGoBack()
/**
* Move the buffer pointer to the previous statement.
*/
public void goBack() {
if (bufPointer > 0) {
bufPointer--;
Hashtable statement = (Hashtable)buffer.elementAt(bufPointer);
for (Enumeration en = listeners.elements();
en.hasMoreElements(); ) {
StatementListener listener =
(StatementListener)en.nextElement();
listener.newStatement(statement);
listener.backMovabilityChanged(canGoBack());
listener.forwardMovabilityChanged(canGoForward());
}
}
} // goBack()
/**
* is it possible to go forward?
* @return boolean
*/
public boolean canGoForward() {
return bufPointer + 1 < buffer.size();
} // canGoForward()
/**
* Go forward to the next statement and return that statement.
* Return null if going forward is not possible.
* @return next statement
*/
public void goForward() {
if (bufPointer + 1 < buffer.size()) {
bufPointer++;
Hashtable statement = (Hashtable)buffer.elementAt(bufPointer);
for (Enumeration en = listeners.elements();
en.hasMoreElements(); ) {
StatementListener listener =
(StatementListener)en.nextElement();
listener.newStatement(statement);
listener.backMovabilityChanged(canGoBack());
listener.forwardMovabilityChanged(canGoForward());
}
}
} // goForward()
/**
* get current statement
* @return current
*/
public Hashtable getCurrentStatement() {
if (0 <= bufPointer && bufPointer < buffer.size()) {
return (Hashtable)buffer.elementAt(bufPointer);
}
else {
String statementType = shufflerService.getDefaultStatementType();
return shufflerService.getDefaultStatement(statementType);
}
} // getCurrentStatement()
/**
* Update the current shuffler panels, but no change has taken
* place that caused the need for a history update. This is
* primarily for use after adding or removing a file from the
* DB so that we can get the panels updated.
*/
public void updateWithoutNewHistory() {
StatementListener listener;
Enumeration en;
// If the locator is not being used, get out of here
if(FillDBManager.locatorOff())
return;
try {
Hashtable statement = getCurrentStatement();
// If there is nothing in 'buffer', then append this one
if(buffer.size() == 0) {
append(statement);
}
// If there is already something in 'buffer', then we will have
// the most recent one. So, do not append to history.
else {
for (en = listeners.elements(); en.hasMoreElements();) {
listener =(StatementListener)en.nextElement();
listener.newStatement(statement);
listener.backMovabilityChanged(canGoBack());
listener.forwardMovabilityChanged(canGoForward());
}
}
}
catch(Exception e) {
Messages.postError("Problem updating locator statement");
Messages.writeStackTrace(e);
}
}
/**
* Update the current shuffler panels, but no change has taken
* place that caused the need for a history update. This is
* primarily for use after adding or removing a file from the
* DB so that we can get the panels updated. Disallow statements
* ending in 'internal use'. They are for temp internal use and
* should not be gone to as a default history.
*/
public void updateWithoutNewHistoryNotInternal() {
StatementListener listener;
Enumeration en;
Hashtable statement=null;
boolean foundOne=false;
try {
// Get the current statement in case the buffer is empty
statement = getCurrentStatement();
// Start by looking at the current Statement, if it has a
// menuString of 'by objtype', go to the next previous one
// and test it until we find one which is not by that name.
while (bufPointer >= 0 && buffer.size() > 0) {
statement = (Hashtable)buffer.elementAt(bufPointer);
String menuString = (String) statement.get("MenuString");
if(!menuString.endsWith("internal use")) {
// We found one that is not 'internal use', so break out
// of here and use this statement.
foundOne = true;
break;
}
// try the next previous one
bufPointer--;
}
// If there is already something in 'buffer', then we will have
// the most recent one. So, do not append to history.
if(foundOne) {
for (en = listeners.elements(); en.hasMoreElements();) {
listener =(StatementListener)en.nextElement();
listener.newStatement(statement);
listener.backMovabilityChanged(canGoBack());
listener.forwardMovabilityChanged(canGoForward());
}
}
else {
// If we did not find one, then default to the standard
// default statement.
String statementType =shufflerService.getDefaultStatementType();
statement = shufflerService.getDefaultStatement(statementType);
append(statement);
}
}
catch(Exception e) {
Messages.postError("Problem updating locator statement");
Messages.writeStackTrace(e);
}
}
/** Write out the current shuffler statement to a named file.
* This one is for backwards compatibility before label was used
*/
public void writeCurStatement(String name) {
// Simply pass name as the label as well as the name itself.
writeCurStatement(name, name);
}
/** Write out the current shuffler statement to a named file.
*
*/
public void writeCurStatement(String name, String label) {
Hashtable curStatement;
//String dir, shufDir;
String filepath;
String filename;
ObjectOutput out;
//File file;
curStatement = getCurrentStatement();
//dir = System.getProperty("userdir");
//shufDir = new String(dir + "/shuffler");
//file = new File(shufDir);
// If this directory does not exist, make it.
//if(!file.exists()) {
// file.mkdir();
//}
if(label != null) {
// Set a value in the statement with the label string
curStatement.put("MenuLabel", label);
}
// Convert all spaces in the name to '_'.
filename = name.replace(' ', '_');
//filepath = new String (shufDir + "/" + filename);
filepath=FileUtil.savePath("USER/LOCATOR/"+filename);
try {
out = new ObjectOutputStream(new FileOutputStream(filepath));
// Write it out.
out.writeObject(curStatement);
out.close();
}
catch (Exception e) {
Messages.writeStackTrace(e);
}
// Update the spotter menu.
for (Enumeration en = listeners.elements(); en.hasMoreElements(); ) {
StatementListener listener = (StatementListener)en.nextElement();
listener.saveListChanged();
}
// The following is not really necessary, but when a programmable
// button is pressed and a new statement is written, there is no
// visible sign that it has completed. The following at least
// causes a blink of the locator table area to let the user know
// something has happened.
SessionShare sshare = ResultTable.getSshare();
StatementHistory history = sshare.statementHistory();
if(history != null)
history.updateWithoutNewHistory();
}
/** Remove the Saved Statement by this name from the disk
*/
public void removeSavedStatement(String name) {
String filepath;
String filename;
// Convert all spaces in the name to '_'.
filename = name.replace(' ', '_');
//filepath = new String (shufDir + "/" + filename);
filepath=FileUtil.savePath("USER/LOCATOR/"+filename);
// Remove the file
File file = new File(filepath);
file.delete();
// Update the spotter menu.
for (Enumeration en = listeners.elements(); en.hasMoreElements(); ) {
StatementListener listener = (StatementListener)en.nextElement();
listener.saveListChanged();
}
}
/** Read saved shuffler statement by this name.
*
*/
public void readNamedStatement(String name) {
String filepath;
String filename;
ObjectInputStream in;
//String dir;
Hashtable statement;
//dir = System.getProperty("userdir");
// Convert all spaces in the name to '_'.
filename = name.replace(' ', '_');
//filepath = new String (dir + "/shuffler/" + filename);
filepath=FileUtil.savePath("USER/LOCATOR/"+filename);
if(filepath==null)
return;
try {
in = new ObjectInputStream(new FileInputStream(filepath));
// Read it in.
statement = (Hashtable) in.readObject();
in.close();
}
catch (Exception e) {
Messages.writeStackTrace(e);
Messages.postWarning("This button has not been programmed. "
+ "\n Press and hold 3 sec to program it with the current "
+ "Locator search.");
return;
}
// Get the objType for the statement we read in.
// Check to see if it is different from the StatementHistory we
// are in now.
String objType = (String)statement.get("ObjectType");
if(!objType.equals(objectType)) {
// If the object type has changed, set the new object type as
// the active one and get the StatementHistory for this object
// type. then call append for That StatementHistory object
// and not the one we are in now.
SessionShare sshare = ResultTable.getSshare();
LocatorHistory lh = sshare.getLocatorHistory();
// Set History Active Object type to this type.
lh.setActiveObjType(objType);
// Now get history for this type.
StatementHistory history = sshare.statementHistory();
// If history is null, this button may not have been programmed
if(history == null) {
Messages.postWarning("This button has not been programmed. "
+ "\n Press and hold 3 sec to program it with the current "
+ "Locator search.");
return;
}
// append to history list and make it the current statement.
history.append(statement);
}
else
// append to history list and make it the current statement.
append(statement);
}
/** Get the list of named statements from the directory 'shuffler'.
*
* Return an ArrayList of ArrayLists where the inside one is
* a list of length 2 containing the filename and menu label as
* the two items.
*/
public ArrayList getNamedStatementList() {
File[] list;
String dir;
String statementType;
File file;
Hashtable statement;
LocatorHistory lHistory;
ArrayList allObjTypes;
/** list of custom saved statement menu entries in the form
of a list of nameNlabel items */
ArrayList menuList=null;
/** list of 2 items, filename and menu label */
ArrayList nameNlabel;
ObjectInputStream in;
dir=FileUtil.savePath("USER/LOCATOR");
file = new File (dir);
list = file.listFiles();
// If the directory does not exist, return an empty list
if(list == null)
list = new File[0];
SessionShare sshare = ResultTable.getSshare();
lHistory = sshare.getLocatorHistory();
ArrayList allStatementTypes = lHistory.getallStatementTypes();
menuList = new ArrayList();
// Go thru the files and only keep the ones with spotter types
// which are current.
for(int i=0; i < list.length; i++) {
try {
in = new ObjectInputStream(new FileInputStream(list[i]));
// Read it in.
statement = (Hashtable) in.readObject();
in.close();
}
catch (ClassNotFoundException e) {
continue;
}
catch (FileNotFoundException e) {
continue;
}
catch (IOException e) {
continue;
}
// This value is of the form objtype/menu_string
// eg. 'vnmr_data/by type'
statementType = (String)statement.get("Statement_type");
// Does this Type exist currently?
// Need all satementTypes, in the form objtype/menu_string
// combine objType and menuString
if (allStatementTypes.contains(statementType)) {
// Yes, add this to the menu list
// Use the value of MenuLabel for the menu here, unless
// is has the key string of '**skip**', then do not put
// this item into the menu.
String menuLabel = (String)statement.get("MenuLabel");
if(menuLabel == null) {
nameNlabel = new ArrayList(2);
String name = list[i].getName();
// Convert all '_' to spaces.
name = name.replace('_', ' ');
nameNlabel.add(name);
nameNlabel.add(name);
menuList.add(nameNlabel);
}
else if(!menuLabel.equals("**skip**")) {
nameNlabel = new ArrayList(2);
String name = list[i].getName();
// Convert all '_' to spaces.
name = name.replace('_', ' ');
nameNlabel.add(name);
nameNlabel.add(menuLabel);
menuList.add(nameNlabel);
}
// else if **skip**, do not add it.
}
}
return menuList;
}
public ShufflerService getShufflerService() {
return shufflerService;
}
public Vector getBuffer() {
return buffer;
}
public int getBufPointer() {
return bufPointer;
}
/************************************************** <pre>
* Summary: Update the column width values for the current statement.
*
* The args are fraction of the total width, where the sum of
* the 4 widths = 1.0.
*
*
</pre> **************************************************/
public void updateCurStatementWidth(double colWidth0, double colWidth1,
double colWidth2, double colWidth3) {
Hashtable curStatement;
curStatement = getCurrentStatement();
curStatement.put("colWidth0", new Double(colWidth0));
curStatement.put("colWidth1", new Double(colWidth1));
curStatement.put("colWidth2", new Double(colWidth2));
curStatement.put("colWidth3", new Double(colWidth3));
}
} // class StatementHistory
| OpenVnmrJ/OpenVnmrJ | src/vnmrj/src/vnmr/bo/StatementHistory.java | Java | apache-2.0 | 27,390 |
package ngo.music.soundcloudplayer.boundary.fragment.real;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v7.widget.Toolbar;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.View;
import android.view.ViewGroup;
import ngo.music.soundcloudplayer.R;
import ngo.music.soundcloudplayer.adapters.SCSongAdapter;
import ngo.music.soundcloudplayer.adapters.SCSearchSongAdapter;
import ngo.music.soundcloudplayer.boundary.MusicPlayerMainActivity;
import ngo.music.soundcloudplayer.boundary.fragment.abstracts.SoundCloudExploreFragment;
import ngo.music.soundcloudplayer.controller.SongController;
import ngo.music.soundcloudplayer.general.Constants;
import ngo.music.soundcloudplayer.service.MusicPlayerService;
public class SCSongSearchFragment extends SoundCloudExploreFragment {
public SCSongSearchFragment() {
// TODO Auto-generated constructor stub
super();
query = MusicPlayerMainActivity.query;
}
@Override
protected int getCategory() {
// TODO Auto-generated method stub
return SEARCH;
}
}
| FabioNgo/sound-cloud-player | Source Code/src/ngo/music/soundcloudplayer/boundary/fragment/real/SCSongSearchFragment.java | Java | apache-2.0 | 1,109 |
package com.sequenceiq.cloudbreak.cloud.event.validation;
import com.sequenceiq.cloudbreak.cloud.event.CloudPlatformResult;
public class FileSystemValidationResult extends CloudPlatformResult {
public FileSystemValidationResult(Long resourceId) {
super(resourceId);
}
public FileSystemValidationResult(String statusReason, Exception errorDetails, Long resourceId) {
super(statusReason, errorDetails, resourceId);
}
}
| hortonworks/cloudbreak | cloud-reactor-api/src/main/java/com/sequenceiq/cloudbreak/cloud/event/validation/FileSystemValidationResult.java | Java | apache-2.0 | 453 |
package edu.searchahouse.endpoints.aop;
import java.util.stream.Collectors;
import org.springframework.dao.DuplicateKeyException;
import org.springframework.hateoas.VndErrors;
import org.springframework.http.HttpStatus;
import org.springframework.validation.ObjectError;
import org.springframework.web.bind.MethodArgumentNotValidException;
import org.springframework.web.bind.annotation.ControllerAdvice;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.ResponseStatus;
import edu.searchahouse.exceptions.EntityNotFoundException;
import edu.searchahouse.exceptions.EntityNotUpdatedException;
//@formatter:off
/**
*
* Advice class that check if a rest endpoint throws an exception and change the response code and message according.
*
*
* 1XX: Information
*
* 2XX: Success
* 200 - OK:
* -> Everything worked
* 201 - Created (mostly use for POST):
* -> The server has successfully created a new resource
* -> Newly created resource's location returned in the Location header
* 202 - Accepted:
* -> The server has accepted the request, but it is not yet complete
* -> A location to determine the request's current status can be returned in the location header
* 3XX: Redirection
*
* 4XX: Client Error
* 400 - Bad Request:
* -> Malformed syntax
* -> Should not be repeated without modification
* 401 - Unauthorized:
* -> Authentication is required
* -> Includes a WWW-Authentication header
* 403 - Forbidden:
* -> Server has understood but refused to honor the request
* -> Should not be repeated without modification
* 404 - Not Found:
* -> The server cannot find a resource matching a URI
* 406 - Not Acceptable:
* -> The server can only return response entities that do not match the client's Accept header
* 409 - Conflict:
* -> The resource is in a state that is in conflict with the request
* -> Client should attempt to rectify the conflict and then resubmit the request
* 422 - Unprocessable Entity:
* -> The resource already exist.
*
* 5XX: Server Error (if the app is responding a 500 code it means we have a bug or didn't thought all possible scenarios.)
*
*
* @author Gustavo Orsi
*
*/
// @formatter:on
@ControllerAdvice
public class ExceptionControllerAdvice {
/**
*
* Catch <code>EntityNotFoundException</code> exception thrown by any endpoint and change the return code.
*
* @param ex
* @return
*/
@ResponseBody
@ExceptionHandler(EntityNotFoundException.class)
@ResponseStatus(HttpStatus.NOT_FOUND)
VndErrors courseNotFoundExceptionHandler(EntityNotFoundException ex) {
return new VndErrors(HttpStatus.NOT_FOUND.getReasonPhrase(), ex.getMessage());
}
/**
*
* Catch <code>IllegalArgumentException</code> exception thrown by any endpoint and change the return code.
*
* @param ex
* @return
*/
@ResponseBody
@ExceptionHandler(IllegalArgumentException.class)
@ResponseStatus(HttpStatus.BAD_REQUEST)
VndErrors illegalArgumentExceptionHandler(IllegalArgumentException ex) {
return new VndErrors(HttpStatus.BAD_REQUEST.getReasonPhrase(), ex.getMessage());
}
/**
*
*
* @param ex
* @return
*/
@ResponseBody
@ExceptionHandler(Exception.class)
@ResponseStatus(HttpStatus.INTERNAL_SERVER_ERROR)
VndErrors internalServerErrorHandler(Exception ex) {
return new VndErrors(HttpStatus.INTERNAL_SERVER_ERROR.getReasonPhrase(), ex.getLocalizedMessage());
}
/**
*
*
* @param ex
* @return
*/
@ResponseBody
@ExceptionHandler(MethodArgumentNotValidException.class)
@ResponseStatus(HttpStatus.BAD_REQUEST)
VndErrors internalServerErrorHandler(MethodArgumentNotValidException ex) {
String errorMessage = ex.getBindingResult().getAllErrors()
.stream()
.map( ObjectError::getDefaultMessage )
.collect( Collectors.toList() ).toString();
return new VndErrors(HttpStatus.BAD_REQUEST.getReasonPhrase(), errorMessage );
}
/**
*
*
* @param ex
* @return
*/
@ResponseBody
@ExceptionHandler(EntityNotUpdatedException.class)
@ResponseStatus(HttpStatus.BAD_REQUEST)
VndErrors entityNotUpdatedExceptionHandler(EntityNotUpdatedException ex) {
return new VndErrors(HttpStatus.BAD_REQUEST.getReasonPhrase(), ex.getLocalizedMessage());
}
/**
*
*
* @param ex
* @return
*/
@ResponseBody
@ExceptionHandler(DuplicateKeyException.class)
@ResponseStatus(HttpStatus.BAD_REQUEST)
VndErrors duplicateFieldHandler(DuplicateKeyException ex) {
return new VndErrors(HttpStatus.BAD_REQUEST.getReasonPhrase(), ex.getMostSpecificCause().getLocalizedMessage());
}
}
| gustavoorsi/searchahouse.com | searchahouse/src/main/java/edu/searchahouse/endpoints/aop/ExceptionControllerAdvice.java | Java | apache-2.0 | 4,756 |
package com.aishang.app.ui.MyBuyAndSale;
import android.content.Context;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import butterknife.Bind;
import butterknife.ButterKnife;
import com.aishang.app.R;
import com.aishang.app.data.model.JRentalListResult;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Inject;
/**
* Created by song on 2016/2/17.
*/
public class RentAdapter extends RecyclerView.Adapter<RentAdapter.ViewHolder> {
List<JRentalListResult.RentalItem> items;
@Inject public RentAdapter() {
items = new ArrayList<JRentalListResult.RentalItem>();
}
@Override public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.item_buy, parent, false);
return new ViewHolder(view);
}
@Override public void onBindViewHolder(final ViewHolder holder, int position) {
final JRentalListResult.RentalItem item = items.get(position);
holder.name.setText(item.getName());
holder.address.setText("地址:" + item.getAddress());
holder.roomNum.setText("房间数:");
if (item.getStatus() == 0) {
holder.status.setText("状态");
} else {
holder.status.setText("状态");
}
holder.rentDate.setText(
"出租时间:" + item.getResStartDate().split(" ")[0] + "-" + item.getResEndDate().split(" ")[0]);
holder.price.setText(item.getPriceText());
holder.itemView.setOnClickListener(new View.OnClickListener() {
@Override public void onClick(View v) {
//intentToDetail(holder.getContext(), hotel.getHotelID(), hotel.getName());
}
});
}
@Override public int getItemCount() {
return items.size();
}
public List<JRentalListResult.RentalItem> getItems() {
return items;
}
public void setItems(List<JRentalListResult.RentalItem> items) {
this.items = items;
}
private void intentToDetail(Context ctx, int hotelID, String hotelName) {
}
/**
* This class contains all butterknife-injected Views & Layouts from layout file
* 'item_my_order.xml'
* for easy to all layout elements.
*
* @author ButterKnifeZelezny, plugin for Android Studio by Avast Developers
* (http://github.com/avast)
*/
static class ViewHolder extends RecyclerView.ViewHolder {
@Bind(R.id.name) TextView name;
@Bind(R.id.status) TextView status;
@Bind(R.id.room_num) TextView roomNum;
@Bind(R.id.rent_date) TextView rentDate;
@Bind(R.id.address) TextView address;
@Bind(R.id.price) TextView price;
public Context getContext() {
return this.itemView.getContext();
}
ViewHolder(View view) {
super(view);
ButterKnife.bind(this, view);
}
}
} | find-happiness/android-aishang | app/src/main/java/com/aishang/app/ui/MyBuyAndSale/RentAdapter.java | Java | apache-2.0 | 2,867 |
$(document).ready(function () {
google.charts.load('current', { packages: ['corechart', 'bar'] });
google.charts.setOnLoadCallback(drawBuildSummary);
function drawBuildSummary() {
var elem = $('#elapsed_time_chart');
var data = [['Elapsed Time', 'Count']];
var categories = [];
//Categorize elapsed time based on their range
var values = elem.attr('data-values').split(';');
var eTime = ["0", "0", "0", "0", "0", "0"];
var i = 0;
values.forEach(function (str, _, _) {
eTime[i] = str;
i = i + 1;
});
var digits = 1;
var lowerRange = '0 ~ '
for (var i in eTime) {
var upperRange = Math.pow(10, digits);
var strRange = lowerRange + upperRange + 's'
data.push([strRange, parseInt(eTime[i])]);
categories.push(strRange);
lowerRange = upperRange + ' ~ ';
digits = digits + 1;
}
var dataTable = google.visualization.arrayToDataTable(data);
var options = {
title: 'Elapsed Time',
curveType: 'function',
bar: { groupWidth: '75%' },
isStacked: true
};
var chart = new google.visualization.BarChart(elem.get(0));
chart.draw(dataTable, options);
google.visualization.events.addListener(chart, 'select', function () {
var selectedItem = chart.getSelection()[0];
if (selectedItem) {
var category = categories[selectedItem.row];
$('#category_form_kind').attr('value', category);
var form = $('#category_form').submit()
}
});
}
});
| jaredpar/jenkins | Dashboard/Scripts/elapsed-time.js | JavaScript | apache-2.0 | 1,729 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.hints;
import java.io.File;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.BooleanSupplier;
import java.util.function.Predicate;
import java.util.function.Supplier;
import com.google.common.util.concurrent.RateLimiter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.concurrent.JMXEnabledThreadPoolExecutor;
import org.apache.cassandra.concurrent.NamedThreadFactory;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.io.FSReadError;
import org.apache.cassandra.locator.InetAddressAndPort;
import org.apache.cassandra.service.StorageService;
/**
* A multi-threaded (by default) executor for dispatching hints.
*
* Most of dispatch is triggered by {@link HintsDispatchTrigger} running every ~10 seconds.
*/
final class HintsDispatchExecutor
{
private static final Logger logger = LoggerFactory.getLogger(HintsDispatchExecutor.class);
private final File hintsDirectory;
private final ExecutorService executor;
private final AtomicBoolean isPaused;
private final Predicate<InetAddressAndPort> isAlive;
private final Map<UUID, Future> scheduledDispatches;
HintsDispatchExecutor(File hintsDirectory, int maxThreads, AtomicBoolean isPaused, Predicate<InetAddressAndPort> isAlive)
{
this.hintsDirectory = hintsDirectory;
this.isPaused = isPaused;
this.isAlive = isAlive;
scheduledDispatches = new ConcurrentHashMap<>();
executor = new JMXEnabledThreadPoolExecutor(maxThreads, 1, TimeUnit.MINUTES,
new LinkedBlockingQueue<>(),
new NamedThreadFactory("HintsDispatcher", Thread.MIN_PRIORITY),
"internal");
}
/*
* It's safe to terminate dispatch in process and to deschedule dispatch.
*/
void shutdownBlocking()
{
scheduledDispatches.clear();
executor.shutdownNow();
try
{
executor.awaitTermination(1, TimeUnit.MINUTES);
}
catch (InterruptedException e)
{
throw new AssertionError(e);
}
}
boolean isScheduled(HintsStore store)
{
return scheduledDispatches.containsKey(store.hostId);
}
Future dispatch(HintsStore store)
{
return dispatch(store, store.hostId);
}
Future dispatch(HintsStore store, UUID hostId)
{
/*
* It is safe to perform dispatch for the same host id concurrently in two or more threads,
* however there is nothing to win from it - so we don't.
*
* Additionally, having just one dispatch task per host id ensures that we'll never violate our per-destination
* rate limit, without having to share a ratelimiter between threads.
*
* It also simplifies reasoning about dispatch sessions.
*/
return scheduledDispatches.computeIfAbsent(hostId, uuid -> executor.submit(new DispatchHintsTask(store, hostId)));
}
Future transfer(HintsCatalog catalog, Supplier<UUID> hostIdSupplier)
{
return executor.submit(new TransferHintsTask(catalog, hostIdSupplier));
}
void completeDispatchBlockingly(HintsStore store)
{
Future future = scheduledDispatches.get(store.hostId);
try
{
if (future != null)
future.get();
}
catch (ExecutionException | InterruptedException e)
{
throw new RuntimeException(e);
}
}
void interruptDispatch(UUID hostId)
{
Future future = scheduledDispatches.remove(hostId);
if (null != future)
future.cancel(true);
}
private final class TransferHintsTask implements Runnable
{
private final HintsCatalog catalog;
/*
* Supplies target hosts to stream to. Generally returns the one the DynamicSnitch thinks is closest.
* We use a supplier here to be able to get a new host if the current one dies during streaming.
*/
private final Supplier<UUID> hostIdSupplier;
private TransferHintsTask(HintsCatalog catalog, Supplier<UUID> hostIdSupplier)
{
this.catalog = catalog;
this.hostIdSupplier = hostIdSupplier;
}
@Override
public void run()
{
UUID hostId = hostIdSupplier.get();
InetAddressAndPort address = StorageService.instance.getEndpointForHostId(hostId);
logger.info("Transferring all hints to {}: {}", address, hostId);
if (transfer(hostId))
return;
logger.warn("Failed to transfer all hints to {}: {}; will retry in {} seconds", address, hostId, 10);
try
{
TimeUnit.SECONDS.sleep(10);
}
catch (InterruptedException e)
{
throw new RuntimeException(e);
}
hostId = hostIdSupplier.get();
logger.info("Transferring all hints to {}: {}", address, hostId);
if (!transfer(hostId))
{
logger.error("Failed to transfer all hints to {}: {}", address, hostId);
throw new RuntimeException("Failed to transfer all hints to " + hostId);
}
}
private boolean transfer(UUID hostId)
{
catalog.stores()
.map(store -> new DispatchHintsTask(store, hostId))
.forEach(Runnable::run);
return !catalog.hasFiles();
}
}
private final class DispatchHintsTask implements Runnable
{
private final HintsStore store;
private final UUID hostId;
private final RateLimiter rateLimiter;
DispatchHintsTask(HintsStore store, UUID hostId)
{
this.store = store;
this.hostId = hostId;
// rate limit is in bytes per second. Uses Double.MAX_VALUE if disabled (set to 0 in cassandra.yaml).
// max rate is scaled by the number of nodes in the cluster (CASSANDRA-5272).
// the goal is to bound maximum hints traffic going towards a particular node from the rest of the cluster,
// not total outgoing hints traffic from this node - this is why the rate limiter is not shared between
// all the dispatch tasks (as there will be at most one dispatch task for a particular host id at a time).
int nodesCount = Math.max(1, StorageService.instance.getTokenMetadata().getSizeOfAllEndpoints() - 1);
int throttleInKB = DatabaseDescriptor.getHintedHandoffThrottleInKB() / nodesCount;
this.rateLimiter = RateLimiter.create(throttleInKB == 0 ? Double.MAX_VALUE : throttleInKB * 1024);
}
public void run()
{
try
{
dispatch();
}
finally
{
scheduledDispatches.remove(hostId);
}
}
private void dispatch()
{
while (true)
{
if (isPaused.get())
break;
HintsDescriptor descriptor = store.poll();
if (descriptor == null)
break;
try
{
if (!dispatch(descriptor))
break;
}
catch (FSReadError e)
{
logger.error(String.format("Failed to dispatch hints file %s: file is corrupted", descriptor.fileName()), e);
store.cleanUp(descriptor);
store.markCorrupted(descriptor);
throw e;
}
}
}
/*
* Will return true if dispatch was successful, false if we hit a failure (destination node went down, for example).
*/
private boolean dispatch(HintsDescriptor descriptor)
{
logger.trace("Dispatching hints file {}", descriptor.fileName());
InetAddressAndPort address = StorageService.instance.getEndpointForHostId(hostId);
if (address != null)
return deliver(descriptor, address);
// address == null means the target no longer exist; find new home for each hint entry.
convert(descriptor);
return true;
}
private boolean deliver(HintsDescriptor descriptor, InetAddressAndPort address)
{
File file = new File(hintsDirectory, descriptor.fileName());
InputPosition offset = store.getDispatchOffset(descriptor);
BooleanSupplier shouldAbort = () -> !isAlive.test(address) || isPaused.get();
try (HintsDispatcher dispatcher = HintsDispatcher.create(file, rateLimiter, address, descriptor.hostId, shouldAbort))
{
if (offset != null)
dispatcher.seek(offset);
if (dispatcher.dispatch())
{
store.delete(descriptor);
store.cleanUp(descriptor);
logger.info("Finished hinted handoff of file {} to endpoint {}: {}", descriptor.fileName(), address, hostId);
return true;
}
else
{
store.markDispatchOffset(descriptor, dispatcher.dispatchPosition());
store.offerFirst(descriptor);
logger.info("Finished hinted handoff of file {} to endpoint {}: {}, partially", descriptor.fileName(), address, hostId);
return false;
}
}
}
// for each hint in the hints file for a node that isn't part of the ring anymore, write RF hints for each replica
private void convert(HintsDescriptor descriptor)
{
File file = new File(hintsDirectory, descriptor.fileName());
try (HintsReader reader = HintsReader.open(file, rateLimiter))
{
reader.forEach(page -> page.hintsIterator().forEachRemaining(HintsService.instance::writeForAllReplicas));
store.delete(descriptor);
store.cleanUp(descriptor);
logger.info("Finished converting hints file {}", descriptor.fileName());
}
}
}
public boolean isPaused()
{
return isPaused.get();
}
public boolean hasScheduledDispatches()
{
return !scheduledDispatches.isEmpty();
}
}
| jasonstack/cassandra | src/java/org/apache/cassandra/hints/HintsDispatchExecutor.java | Java | apache-2.0 | 11,594 |
package com.vladmihalcea.book.hpjp.spring.transaction.jta.dao;
import java.io.Serializable;
/**
* @author Vlad Mihalcea
*/
public interface GenericDAO<T, ID extends Serializable> {
T findById(ID id);
T persist(T entity);
}
| vladmihalcea/high-performance-java-persistence | core/src/test/java/com/vladmihalcea/book/hpjp/spring/transaction/jta/dao/GenericDAO.java | Java | apache-2.0 | 237 |
var s = "Connected";
| wojons/scalr | app/www/ui2/js/connection.js | JavaScript | apache-2.0 | 21 |
require 'rails_helper'
feature 'Pages' do
scenario 'show article page' do
user = create(:user)
article = create(:article, author: user, status: :published)
visit root_path
fill_in '名前', with: user.name
fill_in 'Password', with: user.password
click_button 'Log in'
visit articles_path
expect(page).to have_content 'MyString'
end
scenario '下書きは一覧に表示されない' do
user = create(:user)
article = create(:article, author: user, status: :draft)
visit root_path
fill_in '名前', with: user.name
fill_in 'Password', with: user.password
click_button 'Log in'
visit articles_path
expect(page).not_to have_content 'MyString'
end
end | tanaka-yoshi10/knowledge | spec/features/pages_spec.rb | Ruby | apache-2.0 | 725 |
import React from 'react';
import Reflux from 'reflux';
import ChatActions from '../events/chat-actions';
import ChatStore from '../events/chat-store';
import classNames from 'classnames';
import {deepEqual} from '../events/chat-store-utils';
import {Dropdown, MenuItem} from 'react-bootstrap';
var GroupHeaderPane = React.createClass({
mixins: [Reflux.connect(ChatStore, 'store')],
shouldComponentUpdate: function(nextProps, nextState) {
return !deepEqual(this.state.store, nextState.store,
["selectedGroup.id",
"selectedGroup.followed",
"selectedIntegration.id",
"selectedIntegrationGroupTopic.id",
"selectedTopic.id",
"selectedIntegrationTopic.id",
"topics.id",
"integrationTopics.id"]);
},
onClick: function () {
if (this.state.store.selectedGroup) {
ChatActions.selectTopic();
} else if (this.state.store.selectedIntegrationGroup) {
ChatActions.selectIntegrationTopic(this.state.store.selectedIntegration, this.state.store.selectedIntegrationGroup);
}
},
onFollowStateChange: function (newFollowState) {
if (this.state.store.selectedGroup) {
ChatActions.groupFollowStatusChange(this.state.store.selectedGroup, newFollowState);
}
},
render: function () {
var self = this;
var newTopicClass = classNames({
["selected"]: !(self.state.store.selectedTopic || self.state.store.selectedIntegrationTopic),
["enabled"]: ((self.state.store.selectedGroup || self.state.store.selectedIntegrationGroup))
});
var groupHeader = !self.state.store.selectedIntegration &&
!self.state.store.selectedGroup ? "" :
(!self.state.store.selectedIntegration && self.state.store.selectedGroup ? ("# " + self.state.store.selectedGroup.name) :
(self.state.store.selectedGroup ? self.state.store.selectedGroup.name : self.state.store.selectedIntegration.name)
);
var followedClass = classNames({
["followed"]: self.state.store.selectedGroup && self.state.store.selectedGroup.followed
});
var followTitle = self.state.store.selectedGroup && self.state.store.selectedGroup.followed ? "Unfollow" : "Follow";
var star = self.state.store.selectedGroup ? (
<span className="glyphicon glyphicon-star-empty star" onClick={self.onFollowStateChange.bind(this, !self.state.store.selectedGroup.followed)}></span>
) : null;
var dropDown = self.state.store.selectedGroup ? (
<Dropdown id="group-drop-down" className="pull-right">
<a href="#" bsRole="toggle">
<span className="glyphicon glyphicon-chevron-down"></span>
</a>
<Dropdown.Menu bsRole="menu">
<MenuItem eventKey="1" onSelect={self.onFollowStateChange.bind(this, !self.state.store.selectedGroup.followed)}>{followTitle}</MenuItem>
</Dropdown.Menu>
</Dropdown>
) : null;
return self.state.store.selectedGroup || self.state.store.selectedIntegration ? (<div id="group-header-pane">
<a id="new-topic" className={newTopicClass}
onClick={self.onClick}>
New topic
</a>
<div id="group-header" className={followedClass}>{groupHeader}
{star}
{dropDown}
</div>
<div className="clearfix"></div>
</div>) : ( <div className="space"></div>);
}
});
export default GroupHeaderPane; | JetChat/JetChat | public/javascripts/components/group-header-pane.js | JavaScript | apache-2.0 | 3,767 |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes for converting the Code2Seq dataset to a PLUR dataset.
"""
import os
import tarfile
import apache_beam as beam
from plur.stage_1.plur_dataset import Configuration
from plur.stage_1.plur_dataset import PlurDataset
from plur.utils import constants
from plur.utils import util
from plur.utils.graph_to_output_example import GraphToOutputExample
from plur.utils.graph_to_output_example import GraphToOutputExampleNotValidError
import tqdm
class Code2SeqDataset(PlurDataset):
# pylint: disable=line-too-long
"""Converting data from code2seq dataset to a PLUR dataset.
The dataset is used in: Alon, Uri, et al. 'code2seq: Generating sequences from
structured representations of code.' arXiv preprint arXiv:1808.01400 (2018).
The task is to predict the function name given the function body.
The provided dataset by code2seq are the tokenized function name, and the AST
paths. Therefore we have to create our own graph representation of code2seq.
We try to mimic the code2seq model by constructing a graph similar to figure
3 in the code2seq paper. An example of such graph is shown in
https://drive.google.com/file/d/1-cH0FzYIMikgTkUpzVkEZDGjoiqBB9C1/view?usp=sharing.
In short, we build the AST path subtree and connects all AST paths with a
code2seq root node to make it a graph.
"""
_URLS_SMALL = {
'java-small-preprocessed.tar.gz': {
'url': 'https://s3.amazonaws.com/code2seq/datasets/java-small-preprocessed.tar.gz',
'sha1sum': '857c2495785f606ab99676c7bbae601ea2160f66',
}
}
_URLS_MED = {
'java-med-preprocessed.tar.gz': {
'url': 'https://s3.amazonaws.com/code2seq/datasets/java-med-preprocessed.tar.gz',
'sha1sum': '219e558ddf46678ef322ff75bf1982faa1b6204d',
}
}
_URLS_LARGE = {
'java-large-preprocessed.tar.gz': {
'url': 'https://s3.amazonaws.com/code2seq/datasets/java-large-preprocessed.tar.gz',
'sha1sum': 'ebc229ba1838a3c8f3a69ab507eb26fa5460152a',
}
}
# pylint: enable=line-too-long
_GIT_URL = {}
_DATASET_NAME = 'code2seq_dataset'
_DATASET_DESCRIPTION = """\
This dataset is used to train the code2seq model. The task is to predict the
function name, given the ast paths sampled the function AST. An AST path is
a path between two leaf nodes in the AST.
"""
def __init__(self,
stage_1_dir,
configuration: Configuration = Configuration(),
transformation_funcs=(),
filter_funcs=(),
user_defined_split_range=(),
num_shards=1000,
seed=0,
dataset_size='small',
deduplicate=False):
# dataset_size can only be 'small', 'med' or 'large'.
valid_dataset_size = {'small', 'med', 'large'}
if dataset_size not in valid_dataset_size:
raise ValueError('{} not in {}'.format(dataset_size,
str(valid_dataset_size)))
if dataset_size == 'small':
urls = self._URLS_SMALL
elif dataset_size == 'med':
urls = self._URLS_MED
else:
urls = self._URLS_LARGE
self.dataset_size = dataset_size
super().__init__(self._DATASET_NAME, urls, self._GIT_URL,
self._DATASET_DESCRIPTION, stage_1_dir,
transformation_funcs=transformation_funcs,
filter_funcs=filter_funcs,
user_defined_split_range=user_defined_split_range,
num_shards=num_shards, seed=seed,
configuration=configuration, deduplicate=deduplicate)
def download_dataset(self):
"""Download the dataset using requests and extract the tarfile."""
super().download_dataset_using_requests()
# Extract the tarfile depending on the dataset size.
if self.dataset_size == 'small':
self.code2seq_extracted_dir = os.path.join(
self.raw_data_dir, 'java-small')
tarfile_name = 'java-small-preprocessed.tar.gz'
elif self.dataset_size == 'med':
self.code2seq_extracted_dir = os.path.join(
self.raw_data_dir, 'java-med')
tarfile_name = 'java-med-preprocessed.tar.gz'
else:
self.code2seq_extracted_dir = os.path.join(
self.raw_data_dir, 'java-large')
tarfile_name = 'java-large-preprocessed.tar.gz'
tarfiles_to_extract = []
tarfiles_to_extract = util.check_need_to_extract(
tarfiles_to_extract, self.code2seq_extracted_dir,
tarfile_name)
for filename in tarfiles_to_extract:
dest = os.path.join(self.raw_data_dir, filename)
with tarfile.open(dest, 'r:gz') as tf:
for member in tqdm.tqdm(
tf.getmembers(),
unit='file',
desc='Extracting {}'.format(filename)):
tf.extract(member, self.raw_data_dir)
def get_all_raw_data_paths(self):
"""Get paths to all raw data."""
# Get the filenames depending on the dataset size.
if self.dataset_size == 'small':
train_file = os.path.join(
self.code2seq_extracted_dir, 'java-small.train.c2s')
validation_file = os.path.join(
self.code2seq_extracted_dir, 'java-small.val.c2s')
test_file = os.path.join(
self.code2seq_extracted_dir, 'java-small.test.c2s')
elif self.dataset_size == 'med':
train_file = os.path.join(
self.code2seq_extracted_dir, 'java-med.train.c2s')
validation_file = os.path.join(
self.code2seq_extracted_dir, 'java-med.val.c2s')
test_file = os.path.join(
self.code2seq_extracted_dir, 'java-med.test.c2s')
else:
train_file = os.path.join(
self.code2seq_extracted_dir, 'java-large.train.c2s')
validation_file = os.path.join(
self.code2seq_extracted_dir, 'java-large.val.c2s')
test_file = os.path.join(
self.code2seq_extracted_dir, 'java-large.test.c2s')
return [train_file, validation_file, test_file]
def raw_data_paths_to_raw_data_do_fn(self):
"""Returns a beam.DoFn subclass that reads the raw data."""
return C2SExtractor(super().get_random_split,
bool(self.user_defined_split_range))
def _construct_token_subtree(self, graph_to_output_example, token,
cur_node_id, token_root_name):
# pylint: disable=line-too-long
"""Construct the token subtree in a AST path.
We create a node for each subtoken in the token, all subtokens are connected
to the next subtoken via the 'NEXT_SUBTOKEN' edge. All subtokens are
connected to the token root node via the 'SUBTOKEN' edge. See the draw.io
figure mentioned in the class doc for the visualization.
Args:
graph_to_output_example: A GraphToOutputExample instance.
token: Starting or ending token in the AST path.
cur_node_id: Next available node id.
token_root_name: Node type and label for the token root node.
Returns:
A tuple of graph_to_output_example, cur_node_id, token_node_id.
graph_to_output_example is updated with the token subtree, cur_node_id is
the next available node id after all the token subtree nodes are added,
and token_node_id is the node id of the root token node.
"""
subtokens = token.split('|')
subtoken_node_ids = []
prev_subtoken_id = -1
# Create a node each subtoken.
for subtoken in subtokens:
graph_to_output_example.add_node(cur_node_id, 'SUBTOKEN', subtoken)
subtoken_node_ids.append(cur_node_id)
# Connects to the previous subtoken node
if prev_subtoken_id != -1:
graph_to_output_example.add_edge(prev_subtoken_id, cur_node_id,
'NEXT_SUBTOKEN')
prev_subtoken_id = cur_node_id
cur_node_id += 1
# Add a root node for the token subtree.
graph_to_output_example.add_node(cur_node_id, token_root_name,
token_root_name)
token_node_id = cur_node_id
cur_node_id += 1
# Connect all subtoken nodes to the token subtree root node.
for node_id in subtoken_node_ids:
graph_to_output_example.add_edge(token_node_id, node_id, 'SUBTOKEN')
return graph_to_output_example, cur_node_id, token_node_id
def _construct_ast_nodes_subtree(self, graph_to_output_example, ast_nodes,
cur_node_id):
"""Construct the AST nodes subtree in a AST path.
We create a node for each AST node in the AST path. Each AST node are
connected to the next AST node via the 'NEXT_AST_NODE' edge. See the draw.io
figure mentioned in the class doc for the visualization.
Args:
graph_to_output_example: A GraphToOutputExample instance.
ast_nodes: AST nodes in the AST path.
cur_node_id: Current available node id.
Returns:
A tuple of graph_to_output_example, cur_node_id, ast_node_ids.
graph_to_output_example is updated with the ast nodes subtree,
cur_node_id is the next available node id after all the ast nodes are
added, and ast_node_ids the node ids of all AST nodes.
"""
ast_nodes = ast_nodes.split('|')
ast_node_ids = []
prev_ast_node_id = -1
# Create a node each AST node.
for ast_node in ast_nodes:
graph_to_output_example.add_node(cur_node_id, 'AST_NODE', ast_node)
ast_node_ids.append(cur_node_id)
# Connects to the previous AST node.
if prev_ast_node_id != -1:
graph_to_output_example.add_edge(prev_ast_node_id, cur_node_id,
'NEXT_AST_NODE')
prev_ast_node_id = cur_node_id
cur_node_id += 1
return graph_to_output_example, cur_node_id, ast_node_ids
def raw_data_to_graph_to_output_example(self, raw_data):
# pylint: disable=line-too-long
"""Convert raw data to the unified GraphToOutputExample data structure.
The Code2Seq raw data contains the target function name, and the sampled
AST paths. Each AST path starts and ends with a token, and a series of
AST nodes that connects the two tokens. We use _construct_token_subtree
to build the token subtree and _construct_ast_nodes_subtree to build the
AST nodes subtree. Then, all AST paths' nodes are connected to a AST root
node.
All AST root nodes are connected to a single code2seq root node.
https://drive.google.com/file/d/1-cH0FzYIMikgTkUpzVkEZDGjoiqBB9C1/view?usp=sharing
shows an example of such a graph and the original AST path.
Args:
raw_data: A dictionary with 'split', 'target_label' and 'ast_paths' as keys.
The value of the 'split' field is the split (train/valid/test) that the
data belongs to. The value of the 'target_label' field is the function
name. The value of the 'ast_paths' field is a list of AST paths.
Raises:
GraphToOutputExampleNotValidError if the GraphToOutputExample is not
valid.
Returns:
A dictionary with keys 'split' and 'GraphToOutputExample'. Values are the
split(train/validation/test) the data belongs to, and the
GraphToOutputExample instance.
"""
# pylint: enable=line-too-long
split = raw_data['split']
target_label = raw_data['target_label']
ast_paths = raw_data['ast_paths']
graph_to_output_example = GraphToOutputExample()
cur_node_id = 0
ast_path_root_node_ids = []
# This is the root node of all AST path nodes.
graph_to_output_example.add_node(cur_node_id, 'C2C_ROOT', 'C2C_ROOT')
c2c_root_node_id = cur_node_id
cur_node_id += 1
for ast_path in ast_paths:
# The start_token subtree
start_token = ast_path[0]
graph_to_output_example, cur_node_id, start_token_node_id = (
self._construct_token_subtree(
graph_to_output_example, start_token, cur_node_id, 'START_TOKEN'))
# The ast_nodes subtree
ast_nodes = ast_path[1]
graph_to_output_example, cur_node_id, ast_node_ids = (
self._construct_ast_nodes_subtree(
graph_to_output_example, ast_nodes, cur_node_id))
# The end_token subtree
end_token = ast_path[2]
graph_to_output_example, cur_node_id, end_token_node_id = (
self._construct_token_subtree(
graph_to_output_example, end_token, cur_node_id, 'END_TOKEN'))
# Connects the start_token root node with the first node in the
# ast_nodes subtree.
graph_to_output_example.add_edge(
start_token_node_id, ast_node_ids[0], 'START_AST_PATH')
# Connects the end_token root node with the last node in the
# ast_nodes subtree.
graph_to_output_example.add_edge(
end_token_node_id, ast_node_ids[-1], 'END_AST_PATH')
# Add a root AST path node representing the AST path.
graph_to_output_example.add_node(
cur_node_id, 'ROOT_AST_PATH', 'ROOT_AST_PATH')
ast_path_root_node_id = cur_node_id
ast_path_root_node_ids.append(ast_path_root_node_id)
cur_node_id += 1
# Connects the root AST path node with the start_token and end_token
# subtree.
graph_to_output_example.add_edge(
ast_path_root_node_id, start_token_node_id, 'START_TOKEN')
graph_to_output_example.add_edge(
ast_path_root_node_id, end_token_node_id, 'END_TOKEN')
# Connects the root AST path node with all nodes in the ast_nodes subtree.
for node_id in ast_node_ids:
graph_to_output_example.add_edge(ast_path_root_node_id, node_id,
'AST_NODE')
# Connects the code2seq root node with all AST path root node.
for ast_path_root_node_id in ast_path_root_node_ids:
graph_to_output_example.add_edge(c2c_root_node_id, ast_path_root_node_id,
'AST_PATH')
for subtoken in target_label.split('|'):
graph_to_output_example.add_token_output(subtoken)
for transformation_fn in self.transformation_funcs:
graph_to_output_example = transformation_fn(graph_to_output_example)
if not graph_to_output_example.check_if_valid():
raise GraphToOutputExampleNotValidError(
'Invalid GraphToOutputExample found {}'.format(
graph_to_output_example))
for filter_fn in self.filter_funcs:
if not filter_fn(graph_to_output_example):
graph_to_output_example = None
break
return {'split': split, 'GraphToOutputExample': graph_to_output_example}
class C2SExtractor(beam.DoFn):
"""Class to read the code2seq dataset."""
def __init__(self, random_split_fn, use_random_split):
self.random_split_fn = random_split_fn
self.use_random_split = use_random_split
def _read_data(self, file_path):
"""Read and parse the code2seq raw data file.
Each line in the code2seq raw data file has the following format:
'<token> <token>,<node1>,<node2>,<token> <token>,<node3>,<token>'
The first token is the function name. The rest are the AST paths, separated
with a whitespace.
Args:
file_path: Path to a code2seq data file.
Yields:
A tuple of the function name, and a list of AST paths.
"""
with open(file_path) as f:
for line in f:
fields = line.rstrip().split(' ')
# The subtokens are still separated by '|', we handle them
# together in self.raw_data_to_graph_to_output_example()
target_label = fields[0]
ast_paths = []
for field in fields[1:]:
if field:
# The subtokens are still separated by '|', we handle them
# together in self.raw_data_to_graph_to_output_example()
ast_paths.append(field.split(','))
yield target_label, ast_paths
def _get_split(self, file_path):
"""Get the data split based on the filename suffix."""
if file_path.endswith('train.c2s'):
return constants.TRAIN_SPLIT_NAME
elif file_path.endswith('val.c2s'):
return constants.VALIDATION_SPLIT_NAME
else:
return constants.TEST_SPLIT_NAME
def process(self, file_path):
split = self._get_split(file_path)
for target_label, ast_paths in self._read_data(file_path):
yield {
'split': self.random_split_fn() if self.use_random_split else split,
'target_label': target_label,
'ast_paths': ast_paths
}
| google-research/plur | plur/stage_1/code2seq_dataset.py | Python | apache-2.0 | 16,900 |
package com.google.api.ads.dfp.jaxws.v201511;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for ProductPackageItemError.Reason.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType name="ProductPackageItemError.Reason">
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="ARCHIVED_PRODUCT_NOT_ALLOWED"/>
* <enumeration value="INACTIVE_MANDATORY_PRODUCT_NOT_ALLOWED"/>
* <enumeration value="UNKNOWN"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "ProductPackageItemError.Reason")
@XmlEnum
public enum ProductPackageItemErrorReason {
/**
*
* Add a archived product to product package is not allowed.
*
*
*/
ARCHIVED_PRODUCT_NOT_ALLOWED,
/**
*
* Inactive mandatory product is not allowed in active product package.
*
*
*/
INACTIVE_MANDATORY_PRODUCT_NOT_ALLOWED,
/**
*
* The value returned if the actual value is not exposed by the requested API version.
*
*
*/
UNKNOWN;
public String value() {
return name();
}
public static ProductPackageItemErrorReason fromValue(String v) {
return valueOf(v);
}
}
| gawkermedia/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201511/ProductPackageItemErrorReason.java | Java | apache-2.0 | 1,475 |
<?php
declare(strict_types=1);
// @link http://schemas.gdbots.io/json-schema/gdbots/ncr/command/unpublish-node/1-0-0.json#
namespace Gdbots\Schemas\Ncr\Command;
use Gdbots\Pbj\AbstractMessage;
use Gdbots\Pbj\Enum\Format;
use Gdbots\Pbj\FieldBuilder as Fb;
use Gdbots\Pbj\Schema;
use Gdbots\Pbj\Type as T;
use Gdbots\Schemas\Pbjx\Mixin\Command\CommandV1Mixin as GdbotsPbjxCommandV1Mixin;
final class UnpublishNodeV1 extends AbstractMessage
{
const SCHEMA_ID = 'pbj:gdbots:ncr:command:unpublish-node:1-0-0';
const SCHEMA_CURIE = 'gdbots:ncr:command:unpublish-node';
const SCHEMA_CURIE_MAJOR = 'gdbots:ncr:command:unpublish-node:v1';
const MIXINS = [
'gdbots:pbjx:mixin:command:v1',
'gdbots:pbjx:mixin:command',
'gdbots:common:mixin:taggable:v1',
'gdbots:common:mixin:taggable',
];
use GdbotsPbjxCommandV1Mixin;
protected static function defineSchema(): Schema
{
return new Schema(self::SCHEMA_ID, __CLASS__,
[
Fb::create('command_id', T\TimeUuidType::create())
->required()
->build(),
Fb::create('occurred_at', T\MicrotimeType::create())
->build(),
/*
* Used to perform optimistic concurrency control.
* @link https://en.wikipedia.org/wiki/HTTP_ETag
*/
Fb::create('expected_etag', T\StringType::create())
->maxLength(100)
->pattern('^[\w\.:-]+$')
->build(),
/*
* Multi-tenant apps can use this field to track the tenant id.
*/
Fb::create('ctx_tenant_id', T\StringType::create())
->pattern('^[\w\/\.:-]+$')
->build(),
/*
* The "ctx_retries" field is used to keep track of how many attempts were
* made to process this command. In some cases, the service or transport
* that handles the command may be down or an optimistic check has failed
* and is being retried.
*/
Fb::create('ctx_retries', T\TinyIntType::create())
->build(),
/*
* The "ctx_causator" is the actual causator object that "ctx_causator_ref"
* refers to. In some cases it's useful for command handlers to copy the
* causator into the command. For example, when a node is being updated we
* may want to know what the node will be after the update. We can derive
* this via the causator instead of requesting the node and engaging a race
* condition.
*/
Fb::create('ctx_causator', T\MessageType::create())
->build(),
Fb::create('ctx_causator_ref', T\MessageRefType::create())
->build(),
Fb::create('ctx_correlator_ref', T\MessageRefType::create())
->build(),
Fb::create('ctx_user_ref', T\MessageRefType::create())
->build(),
/*
* The "ctx_app" refers to the application used to send the command. This is
* different from ctx_ua (user_agent) because the agent used (Safari, Firefox)
* is not necessarily the app used (cms, iOS app, website)
*/
Fb::create('ctx_app', T\MessageType::create())
->anyOfCuries([
'gdbots:contexts::app',
])
->build(),
/*
* The "ctx_cloud" is set by the server receiving the command and is generally
* only used internally for tracking and performance monitoring.
*/
Fb::create('ctx_cloud', T\MessageType::create())
->anyOfCuries([
'gdbots:contexts::cloud',
])
->build(),
Fb::create('ctx_ip', T\StringType::create())
->format(Format::IPV4)
->overridable(true)
->build(),
Fb::create('ctx_ipv6', T\StringType::create())
->format(Format::IPV6)
->overridable(true)
->build(),
Fb::create('ctx_ua', T\TextType::create())
->overridable(true)
->build(),
/*
* An optional message/reason for the command being sent.
* Consider this like a git commit message.
*/
Fb::create('ctx_msg', T\TextType::create())
->build(),
/*
* Tags is a map that categorizes data or tracks references in
* external systems. The tags names should be consistent and descriptive,
* e.g. fb_user_id:123, salesforce_customer_id:456.
*/
Fb::create('tags', T\StringType::create())
->asAMap()
->pattern('^[\w\/\.:-]+$')
->build(),
Fb::create('node_ref', T\NodeRefType::create())
->required()
->build(),
],
self::MIXINS
);
}
}
| gdbots/schemas | build/php/src/Gdbots/Schemas/Ncr/Command/UnpublishNodeV1.php | PHP | apache-2.0 | 5,523 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
package org.apache.fop.fo;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import org.xml.sax.Attributes;
import org.xml.sax.Locator;
import org.apache.xmlgraphics.util.QName;
import org.apache.fop.apps.FOPException;
import org.apache.fop.fo.extensions.ExtensionAttachment;
import org.apache.fop.fo.flow.Marker;
import org.apache.fop.fo.properties.PropertyMaker;
/**
* Base class for representation of formatting objects and their processing.
* All standard formatting object classes extend this class.
*/
public abstract class FObj extends FONode implements Constants {
/** the list of property makers */
private static final PropertyMaker[] PROPERTY_LIST_TABLE
= FOPropertyMapping.getGenericMappings();
/** pointer to the descendant subtree */
protected FONode firstChild;
/** pointer to the end of the descendant subtree */
protected FONode lastChild;
/** The list of extension attachments, null if none */
private List<ExtensionAttachment> extensionAttachments = null;
/** The map of foreign attributes, null if none */
private Map<QName, String> foreignAttributes = null;
/** Used to indicate if this FO is either an Out Of Line FO (see rec)
* or a descendant of one. Used during FO validation.
*/
private boolean isOutOfLineFODescendant = false;
/** Markers added to this element. */
private Map markers = null;
private int bidiLevel = -1;
// The value of properties relevant for all fo objects
private String id = null;
// End of property values
/**
* Create a new formatting object.
*
* @param parent the parent node
*/
public FObj(FONode parent) {
super(parent);
// determine if isOutOfLineFODescendant should be set
if (parent != null && parent instanceof FObj) {
if (((FObj) parent).getIsOutOfLineFODescendant()) {
isOutOfLineFODescendant = true;
} else {
int foID = getNameId();
if (foID == FO_FLOAT || foID == FO_FOOTNOTE
|| foID == FO_FOOTNOTE_BODY) {
isOutOfLineFODescendant = true;
}
}
}
}
/** {@inheritDoc} */
public FONode clone(FONode parent, boolean removeChildren)
throws FOPException {
FObj fobj = (FObj) super.clone(parent, removeChildren);
if (removeChildren) {
fobj.firstChild = null;
}
return fobj;
}
/**
* Returns the PropertyMaker for a given property ID.
* @param propId the property ID
* @return the requested Property Maker
*/
public static PropertyMaker getPropertyMakerFor(int propId) {
return PROPERTY_LIST_TABLE[propId];
}
/** {@inheritDoc} */
public void processNode(String elementName, Locator locator,
Attributes attlist, PropertyList pList)
throws FOPException {
setLocator(locator);
pList.addAttributesToList(attlist);
if (!inMarker() || "marker".equals(elementName)) {
bind(pList);
}
}
/**
* Create a default property list for this element.
* {@inheritDoc}
*/
protected PropertyList createPropertyList(PropertyList parent,
FOEventHandler foEventHandler) throws FOPException {
return getBuilderContext().getPropertyListMaker().make(this, parent);
}
/**
* Bind property values from the property list to the FO node.
* Must be overridden in all FObj subclasses that have properties
* applying to it.
* @param pList the PropertyList where the properties can be found.
* @throws FOPException if there is a problem binding the values
*/
public void bind(PropertyList pList) throws FOPException {
id = pList.get(PR_ID).getString();
}
/**
* {@inheritDoc}
* @throws FOPException FOP Exception
*/
protected void startOfNode() throws FOPException {
if (id != null) {
checkId(id);
}
}
/**
* Setup the id for this formatting object.
* Most formatting objects can have an id that can be referenced.
* This methods checks that the id isn't already used by another FO
*
* @param id the id to check
* @throws ValidationException if the ID is already defined elsewhere
* (strict validation only)
*/
private void checkId(String id) throws ValidationException {
if (!inMarker() && !id.equals("")) {
Set idrefs = getBuilderContext().getIDReferences();
if (!idrefs.contains(id)) {
idrefs.add(id);
} else {
getFOValidationEventProducer().idNotUnique(this, getName(), id, true, locator);
}
}
}
/**
* Returns Out Of Line FO Descendant indicator.
* @return true if Out of Line FO or Out Of Line descendant, false otherwise
*/
boolean getIsOutOfLineFODescendant() {
return isOutOfLineFODescendant;
}
/** {@inheritDoc}*/
protected void addChildNode(FONode child) throws FOPException {
if (child.getNameId() == FO_MARKER) {
addMarker((Marker) child);
} else {
ExtensionAttachment attachment = child.getExtensionAttachment();
if (attachment != null) {
/* This removes the element from the normal children,
* so no layout manager is being created for them
* as they are only additional information.
*/
addExtensionAttachment(attachment);
} else {
if (firstChild == null) {
firstChild = child;
lastChild = child;
} else {
if (lastChild == null) {
FONode prevChild = firstChild;
while (prevChild.siblings != null
&& prevChild.siblings[1] != null) {
prevChild = prevChild.siblings[1];
}
FONode.attachSiblings(prevChild, child);
} else {
FONode.attachSiblings(lastChild, child);
lastChild = child;
}
}
}
}
}
/**
* Used by RetrieveMarker during Marker-subtree cloning
* @param child the (cloned) child node
* @param parent the (cloned) parent node
* @throws FOPException when the child could not be added to the parent
*/
protected static void addChildTo(FONode child, FONode parent)
throws FOPException {
parent.addChildNode(child);
}
/** {@inheritDoc} */
public void removeChild(FONode child) {
FONode nextChild = null;
if (child.siblings != null) {
nextChild = child.siblings[1];
}
if (child == firstChild) {
firstChild = nextChild;
if (firstChild != null) {
firstChild.siblings[0] = null;
}
} else {
FONode prevChild = child.siblings[0];
prevChild.siblings[1] = nextChild;
if (nextChild != null) {
nextChild.siblings[0] = prevChild;
}
}
if (child == lastChild) {
if (child.siblings != null) {
lastChild = siblings[0];
} else {
lastChild = null;
}
}
}
/**
* Find the nearest parent, grandparent, etc. FONode that is also an FObj
* @return FObj the nearest ancestor FONode that is an FObj
*/
public FObj findNearestAncestorFObj() {
FONode par = parent;
while (par != null && !(par instanceof FObj)) {
par = par.parent;
}
return (FObj) par;
}
/**
* Check if this formatting object generates reference areas.
* @return true if generates reference areas
* TODO see if needed
*/
public boolean generatesReferenceAreas() {
return false;
}
/** {@inheritDoc} */
public FONodeIterator getChildNodes() {
if (hasChildren()) {
return new FObjIterator(this);
}
return null;
}
/**
* Indicates whether this formatting object has children.
* @return true if there are children
*/
public boolean hasChildren() {
return this.firstChild != null;
}
/**
* Return an iterator over the object's childNodes starting
* at the passed-in node (= first call to iterator.next() will
* return childNode)
* @param childNode First node in the iterator
* @return A ListIterator or null if childNode isn't a child of
* this FObj.
*/
public FONodeIterator getChildNodes(FONode childNode) {
FONodeIterator it = getChildNodes();
if (it != null) {
if (firstChild == childNode) {
return it;
} else {
while (it.hasNext()
&& it.nextNode().siblings[1] != childNode) {
//nop
}
if (it.hasNext()) {
return it;
} else {
return null;
}
}
}
return null;
}
/**
* Notifies a FObj that one of it's children is removed.
* This method is subclassed by Block to clear the
* firstInlineChild variable in case it doesn't generate
* any areas (see addMarker()).
* @param node the node that was removed
*/
void notifyChildRemoval(FONode node) {
//nop
}
/**
* Add the marker to this formatting object.
* If this object can contain markers it checks that the marker
* has a unique class-name for this object and that it is
* the first child.
* @param marker Marker to add.
*/
protected void addMarker(Marker marker) {
String mcname = marker.getMarkerClassName();
if (firstChild != null) {
// check for empty childNodes
for (Iterator iter = getChildNodes(); iter.hasNext();) {
FONode node = (FONode) iter.next();
if (node instanceof FObj
|| (node instanceof FOText
&& ((FOText) node).willCreateArea())) {
getFOValidationEventProducer().markerNotInitialChild(this, getName(),
mcname, locator);
return;
} else if (node instanceof FOText) {
iter.remove();
notifyChildRemoval(node);
}
}
}
if (markers == null) {
markers = new java.util.HashMap();
}
if (!markers.containsKey(mcname)) {
markers.put(mcname, marker);
} else {
getFOValidationEventProducer().markerNotUniqueForSameParent(this, getName(),
mcname, locator);
}
}
/**
* @return true if there are any Markers attached to this object
*/
public boolean hasMarkers() {
return markers != null && !markers.isEmpty();
}
/**
* @return the collection of Markers attached to this object
*/
public Map getMarkers() {
return markers;
}
/** {@inheritDoc} */
protected String getContextInfoAlt() {
StringBuffer sb = new StringBuffer();
if (getLocalName() != null) {
sb.append(getName());
sb.append(", ");
}
if (hasId()) {
sb.append("id=").append(getId());
return sb.toString();
}
String s = gatherContextInfo();
if (s != null) {
sb.append("\"");
if (s.length() < 32) {
sb.append(s);
} else {
sb.append(s.substring(0, 32));
sb.append("...");
}
sb.append("\"");
return sb.toString();
} else {
return null;
}
}
/** {@inheritDoc} */
protected String gatherContextInfo() {
if (getLocator() != null) {
return super.gatherContextInfo();
} else {
ListIterator iter = getChildNodes();
if (iter == null) {
return null;
}
StringBuffer sb = new StringBuffer();
while (iter.hasNext()) {
FONode node = (FONode) iter.next();
String s = node.gatherContextInfo();
if (s != null) {
if (sb.length() > 0) {
sb.append(", ");
}
sb.append(s);
}
}
return (sb.length() > 0 ? sb.toString() : null);
}
}
/**
* Convenience method for validity checking. Checks if the
* incoming node is a member of the "%block;" parameter entity
* as defined in Sect. 6.2 of the XSL 1.0 & 1.1 Recommendations
*
* @param nsURI namespace URI of incoming node
* @param lName local name (i.e., no prefix) of incoming node
* @return true if a member, false if not
*/
protected boolean isBlockItem(String nsURI, String lName) {
return (FO_URI.equals(nsURI)
&& ("block".equals(lName)
|| "table".equals(lName)
|| "table-and-caption".equals(lName)
|| "block-container".equals(lName)
|| "list-block".equals(lName)
|| "float".equals(lName)
|| isNeutralItem(nsURI, lName)));
}
/**
* Convenience method for validity checking. Checks if the
* incoming node is a member of the "%inline;" parameter entity
* as defined in Sect. 6.2 of the XSL 1.0 & 1.1 Recommendations
*
* @param nsURI namespace URI of incoming node
* @param lName local name (i.e., no prefix) of incoming node
* @return true if a member, false if not
*/
protected boolean isInlineItem(String nsURI, String lName) {
return (FO_URI.equals(nsURI)
&& ("bidi-override".equals(lName)
|| "character".equals(lName)
|| "external-graphic".equals(lName)
|| "instream-foreign-object".equals(lName)
|| "inline".equals(lName)
|| "inline-container".equals(lName)
|| "leader".equals(lName)
|| "page-number".equals(lName)
|| "page-number-citation".equals(lName)
|| "page-number-citation-last".equals(lName)
|| "basic-link".equals(lName)
|| ("multi-toggle".equals(lName)
&& (getNameId() == FO_MULTI_CASE
|| findAncestor(FO_MULTI_CASE) > 0))
|| ("footnote".equals(lName)
&& !isOutOfLineFODescendant)
|| isNeutralItem(nsURI, lName)));
}
/**
* Convenience method for validity checking. Checks if the
* incoming node is a member of the "%block;" parameter entity
* or "%inline;" parameter entity
* @param nsURI namespace URI of incoming node
* @param lName local name (i.e., no prefix) of incoming node
* @return true if a member, false if not
*/
protected boolean isBlockOrInlineItem(String nsURI, String lName) {
return (isBlockItem(nsURI, lName) || isInlineItem(nsURI, lName));
}
/**
* Convenience method for validity checking. Checks if the
* incoming node is a member of the neutral item list
* as defined in Sect. 6.2 of the XSL 1.0 & 1.1 Recommendations
* @param nsURI namespace URI of incoming node
* @param lName local name (i.e., no prefix) of incoming node
* @return true if a member, false if not
*/
protected boolean isNeutralItem(String nsURI, String lName) {
return (FO_URI.equals(nsURI)
&& ("multi-switch".equals(lName)
|| "multi-properties".equals(lName)
|| "wrapper".equals(lName)
|| (!isOutOfLineFODescendant && "float".equals(lName))
|| "retrieve-marker".equals(lName)
|| "retrieve-table-marker".equals(lName)));
}
/**
* Convenience method for validity checking. Checks if the
* current node has an ancestor of a given name.
* @param ancestorID ID of node name to check for (e.g., FO_ROOT)
* @return number of levels above FO where ancestor exists,
* -1 if not found
*/
protected int findAncestor(int ancestorID) {
int found = 1;
FONode temp = getParent();
while (temp != null) {
if (temp.getNameId() == ancestorID) {
return found;
}
found += 1;
temp = temp.getParent();
}
return -1;
}
/**
* Clears the list of child nodes.
*/
public void clearChildNodes() {
this.firstChild = null;
}
/** @return the "id" property. */
public String getId() {
return id;
}
/** @return whether this object has an id set */
public boolean hasId() {
return (id != null && id.length() > 0);
}
/** {@inheritDoc} */
public String getNamespaceURI() {
return FOElementMapping.URI;
}
/** {@inheritDoc} */
public String getNormalNamespacePrefix() {
return "fo";
}
/** {@inheritDoc} */
public boolean isBidiRangeBlockItem() {
String ns = getNamespaceURI();
String ln = getLocalName();
return !isNeutralItem(ns, ln) && isBlockItem(ns, ln);
}
/**
* Recursively set resolved bidirectional level of FO (and its ancestors) if
* and only if it is non-negative and if either the current value is reset (-1)
* or the new value is less than the current value.
* @param bidiLevel a non-negative bidi embedding level
*/
public void setBidiLevel(int bidiLevel) {
assert bidiLevel >= 0;
if ( bidiLevel >= 0 ) {
if ( ( this.bidiLevel < 0 ) || ( bidiLevel < this.bidiLevel ) ) {
this.bidiLevel = bidiLevel;
if ( parent != null ) {
FObj foParent = (FObj) parent;
int parentBidiLevel = foParent.getBidiLevel();
if ( ( parentBidiLevel < 0 ) || ( bidiLevel < parentBidiLevel ) ) {
foParent.setBidiLevel ( bidiLevel );
}
}
}
}
}
/**
* Obtain resolved bidirectional level of FO.
* @return either a non-negative bidi embedding level or -1
* in case no bidi levels have been assigned
*/
public int getBidiLevel() {
return bidiLevel;
}
/**
* Obtain resolved bidirectional level of FO or nearest FO
* ancestor that has a resolved level.
* @return either a non-negative bidi embedding level or -1
* in case no bidi levels have been assigned to this FO or
* any ancestor
*/
public int getBidiLevelRecursive() {
for ( FONode fn = this; fn != null; fn = fn.getParent() ) {
if ( fn instanceof FObj ) {
int level = ( (FObj) fn).getBidiLevel();
if ( level >= 0 ) {
return level;
}
}
}
return -1;
}
/**
* Add a new extension attachment to this FObj.
* (see org.apache.fop.fo.FONode for details)
*
* @param attachment the attachment to add.
*/
void addExtensionAttachment(ExtensionAttachment attachment) {
if (attachment == null) {
throw new NullPointerException(
"Parameter attachment must not be null");
}
if (extensionAttachments == null) {
extensionAttachments = new java.util.ArrayList<ExtensionAttachment>();
}
if (log.isDebugEnabled()) {
log.debug("ExtensionAttachment of category "
+ attachment.getCategory() + " added to "
+ getName() + ": " + attachment);
}
extensionAttachments.add(attachment);
}
/** @return the extension attachments of this FObj. */
public List/*<ExtensionAttachment>*/ getExtensionAttachments() {
if (extensionAttachments == null) {
return Collections.EMPTY_LIST;
} else {
return extensionAttachments;
}
}
/** @return true if this FObj has extension attachments */
public boolean hasExtensionAttachments() {
return extensionAttachments != null;
}
/**
* Adds a foreign attribute to this FObj.
* @param attributeName the attribute name as a QName instance
* @param value the attribute value
*/
public void addForeignAttribute(QName attributeName, String value) {
/* TODO: Handle this over FOP's property mechanism so we can use
* inheritance.
*/
if (attributeName == null) {
throw new NullPointerException("Parameter attributeName must not be null");
}
if (foreignAttributes == null) {
foreignAttributes = new java.util.HashMap<QName, String>();
}
foreignAttributes.put(attributeName, value);
}
/** @return the map of foreign attributes */
public Map getForeignAttributes() {
if (foreignAttributes == null) {
return Collections.EMPTY_MAP;
} else {
return foreignAttributes;
}
}
/** {@inheritDoc} */
public String toString() {
return (super.toString() + "[@id=" + this.id + "]");
}
/** Basic {@link FONode.FONodeIterator} implementation */
public static class FObjIterator implements FONodeIterator {
private static final int F_NONE_ALLOWED = 0;
private static final int F_SET_ALLOWED = 1;
private static final int F_REMOVE_ALLOWED = 2;
private FONode currentNode;
private final FObj parentNode;
private int currentIndex;
private int flags = F_NONE_ALLOWED;
FObjIterator(FObj parent) {
this.parentNode = parent;
this.currentNode = parent.firstChild;
this.currentIndex = 0;
this.flags = F_NONE_ALLOWED;
}
/** {@inheritDoc} */
public FObj parentNode() {
return parentNode;
}
/** {@inheritDoc} */
public Object next() {
if (currentNode != null) {
if (currentIndex != 0) {
if (currentNode.siblings != null
&& currentNode.siblings[1] != null) {
currentNode = currentNode.siblings[1];
} else {
throw new NoSuchElementException();
}
}
currentIndex++;
flags |= (F_SET_ALLOWED | F_REMOVE_ALLOWED);
return currentNode;
} else {
throw new NoSuchElementException();
}
}
/** {@inheritDoc} */
public Object previous() {
if (currentNode.siblings != null
&& currentNode.siblings[0] != null) {
currentIndex--;
currentNode = currentNode.siblings[0];
flags |= (F_SET_ALLOWED | F_REMOVE_ALLOWED);
return currentNode;
} else {
throw new NoSuchElementException();
}
}
/** {@inheritDoc} */
public void set(Object o) {
if ((flags & F_SET_ALLOWED) == F_SET_ALLOWED) {
FONode newNode = (FONode) o;
if (currentNode == parentNode.firstChild) {
parentNode.firstChild = newNode;
} else {
FONode.attachSiblings(currentNode.siblings[0], newNode);
}
if (currentNode.siblings != null
&& currentNode.siblings[1] != null) {
FONode.attachSiblings(newNode, currentNode.siblings[1]);
}
if (currentNode == parentNode.lastChild) {
parentNode.lastChild = newNode;
}
} else {
throw new IllegalStateException();
}
}
/** {@inheritDoc} */
public void add(Object o) {
FONode newNode = (FONode) o;
if (currentIndex == -1) {
if (currentNode != null) {
FONode.attachSiblings(newNode, currentNode);
}
parentNode.firstChild = newNode;
currentIndex = 0;
currentNode = newNode;
if (parentNode.lastChild == null) {
parentNode.lastChild = newNode;
}
} else {
if (currentNode.siblings != null
&& currentNode.siblings[1] != null) {
FONode.attachSiblings((FONode) o, currentNode.siblings[1]);
}
FONode.attachSiblings(currentNode, (FONode) o);
if (currentNode == parentNode.lastChild) {
parentNode.lastChild = newNode;
}
}
flags &= F_NONE_ALLOWED;
}
/** {@inheritDoc} */
public boolean hasNext() {
return (currentNode != null)
&& ((currentIndex == 0)
|| (currentNode.siblings != null
&& currentNode.siblings[1] != null));
}
/** {@inheritDoc} */
public boolean hasPrevious() {
return (currentIndex != 0)
|| (currentNode.siblings != null
&& currentNode.siblings[0] != null);
}
/** {@inheritDoc} */
public int nextIndex() {
return currentIndex + 1;
}
/** {@inheritDoc} */
public int previousIndex() {
return currentIndex - 1;
}
/** {@inheritDoc} */
public void remove() {
if ((flags & F_REMOVE_ALLOWED) == F_REMOVE_ALLOWED) {
parentNode.removeChild(currentNode);
if (currentIndex == 0) {
//first node removed
currentNode = parentNode.firstChild;
} else if (currentNode.siblings != null
&& currentNode.siblings[0] != null) {
currentNode = currentNode.siblings[0];
currentIndex--;
} else {
currentNode = null;
}
flags &= F_NONE_ALLOWED;
} else {
throw new IllegalStateException();
}
}
/** {@inheritDoc} */
public FONode lastNode() {
while (currentNode != null
&& currentNode.siblings != null
&& currentNode.siblings[1] != null) {
currentNode = currentNode.siblings[1];
currentIndex++;
}
return currentNode;
}
/** {@inheritDoc} */
public FONode firstNode() {
currentNode = parentNode.firstChild;
currentIndex = 0;
return currentNode;
}
/** {@inheritDoc} */
public FONode nextNode() {
return (FONode) next();
}
/** {@inheritDoc} */
public FONode previousNode() {
return (FONode) previous();
}
}
}
| Distrotech/fop | src/java/org/apache/fop/fo/FObj.java | Java | apache-2.0 | 29,191 |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.workbench.ht.client.editors.tasklogs;
import java.util.List;
import javax.enterprise.context.Dependent;
import javax.enterprise.event.Event;
import javax.inject.Inject;
import com.google.gwt.safehtml.shared.SafeHtmlUtils;
import com.google.gwt.user.client.ui.Composite;
import org.jboss.errai.common.client.dom.HTMLElement;
import org.jboss.errai.common.client.dom.UnorderedList;
import org.jboss.errai.ui.shared.api.annotations.DataField;
import org.jboss.errai.ui.shared.api.annotations.Templated;
import org.uberfire.workbench.events.NotificationEvent;
import static org.jboss.errai.common.client.dom.DOMUtil.removeAllChildren;
import static org.jboss.errai.common.client.dom.Window.getDocument;
@Dependent
@Templated(value = "TaskLogsViewImpl.html")
public class TaskLogsViewImpl extends Composite implements TaskLogsPresenter.TaskLogsView {
@Inject
@DataField
public UnorderedList logTextArea;
@Inject
private Event<NotificationEvent> notification;
@Override
public void displayNotification(String text) {
notification.fire(new NotificationEvent(text));
}
@Override
public void setLogTextAreaText(final List<String> logs) {
removeAllChildren(logTextArea);
logs.forEach(log -> {
HTMLElement li = getDocument().createElement("li");
li.setInnerHTML(SafeHtmlUtils.htmlEscape(log));
logTextArea.appendChild(li);
});
}
}
| etirelli/jbpm-console-ng | jbpm-wb-human-tasks/jbpm-wb-human-tasks-client/src/main/java/org/jbpm/workbench/ht/client/editors/tasklogs/TaskLogsViewImpl.java | Java | apache-2.0 | 2,080 |
package com.dft.cordova.plugin.onyx;
import org.apache.cordova.CordovaWebView;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.CordovaInterface;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.util.Base64;
import android.util.Log;
import org.apache.cordova.PluginResult;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.imgproc.Imgproc;
import com.dft.onyx.FingerprintTemplate;
public class OnyxPlugin extends CordovaPlugin implements OnyxMatch.MatchResultCallback {
public static final String TAG = "OnyxPlugin";
public static final String IMAGE_URI_PREFIX = "data:image/jpeg;base64,";
public static String mPackageName;
public static CallbackContext mCallbackContext;
public static PluginResult mPluginResult;
private Activity mActivity;
private Context mContext;
public static JSONObject mArgs;
private static String mExecuteAction;
public static PluginAction mPluginAction;
public enum PluginAction {
CAPTURE("capture"),
MATCH("match");
private final String key;
PluginAction(String key) {
this.key = key;
}
public String getKey() {
return this.key;
}
}
public enum OnyxConfig {
ONYX_LICENSE("onyxLicense"),
RETURN_RAW_IMAGE("returnRawImage"),
RETURN_PROCESSED_IMAGE("returnProcessedImage"),
RETURN_ENHANCED_IMAGE("returnEnhancedImage"),
RETURN_WSQ("returnWSQ"),
RETURN_FINGERPRINT_TEMPLATE("returnFingerprintTemplate"),
SHOULD_CONVERT_TO_ISO_TEMPLATE("shouldConvertToISOTemplate"),
COMPUTE_NFIQ_METRICS("computeNfiqMetrics"),
CROP_SIZE("cropSize"),
CROP_SIZE_WIDTH("width"),
CROP_SIZE_HEIGHT("height"),
CROP_FACTOR("cropFactor"),
SHOW_LOADING_SPINNER("showLoadingSpinner"),
USE_MANUAL_CAPTURE("useManualCapture"),
USE_ONYX_LIVE("useOnyxLive"),
USE_FLASH("useFlash"),
RETICLE_ORIENTATION("reticleOrientation"),
RETICLE_ORIENTATION_LEFT("LEFT"),
RETICLE_ORIENTATION_RIGHT("RIGHT"),
RETICLE_ORIENTATION_THUMB_PORTRAIT("THUMB_PORTRAIT"),
BACKGROUND_COLOR_HEX_STRING("backgroundColorHexString"),
SHOW_BACK_BUTTON("showBackButton"),
SHOW_MANUAL_CAPTURE_TEXT("showManualCaptureText"),
MANUAL_CAPTURE_TEXT("manualCaptureText"),
BACK_BUTTON_TEXT("backButtonText"),
REFERENCE("reference"),
PROBE("probe"),
PYRAMID_SCALES("pyramidScales");
private final String key;
OnyxConfig(String key) {
this.key = key;
}
public String getKey() {
return this.key;
}
}
/**
* Constructor
*/
public OnyxPlugin() {
}
/**
* Sets the context of the Command. This can then be used to do things like
* get file paths associated with the Activity.
*
* @param cordova The context of the main Activity.
* @param webView The CordovaWebView Cordova is running in.
*/
public void initialize(CordovaInterface cordova, CordovaWebView webView) {
super.initialize(cordova, webView);
Log.v(TAG, "Init Onyx");
mPackageName = cordova.getActivity().getApplicationContext().getPackageName();
mPluginResult = new PluginResult(PluginResult.Status.NO_RESULT);
mActivity = cordova.getActivity();
mContext = cordova.getActivity().getApplicationContext();
}
/**
* Executes the request and returns PluginResult.
*
* @param action The action to execute.
* @param args JSONArry of arguments for the plugin.
* @param callbackContext The callback id used when calling back into JavaScript.
* @return A PluginResult object with a status and message.
*/
public boolean execute(final String action,
JSONArray args,
CallbackContext callbackContext) throws JSONException {
mCallbackContext = callbackContext;
Log.v(TAG, "OnyxPlugin action: " + action);
mExecuteAction = action;
mArgs = args.getJSONObject(0);
if (!mArgs.has("onyxLicense") || !mArgs.has("action")) {
mPluginResult = new PluginResult(PluginResult.Status.ERROR);
mCallbackContext.error("Missing required parameters");
mCallbackContext.sendPluginResult(mPluginResult);
return true;
}
if (action.equalsIgnoreCase(PluginAction.MATCH.getKey())) {
mPluginAction = PluginAction.MATCH;
} else if (action.equalsIgnoreCase(PluginAction.CAPTURE.getKey())) {
mPluginAction = PluginAction.CAPTURE;
}
if (null != mPluginAction) {
switch (mPluginAction) {
case MATCH:
doMatch();
break;
case CAPTURE:
launchOnyx();
break;
}
} else {
onError("Invalid plugin action.");
}
return true;
}
public static void onFinished(int resultCode, JSONObject result) {
if (resultCode == Activity.RESULT_OK) {
mPluginResult = new PluginResult(PluginResult.Status.OK);
try {
result.put("action", mExecuteAction);
} catch (JSONException e) {
String errorMessage = "Failed to set JSON key value pair: " + e.getMessage();
mCallbackContext.error(errorMessage);
mPluginResult = new PluginResult(PluginResult.Status.ERROR);
}
mCallbackContext.success(result);
} else if (resultCode == Activity.RESULT_CANCELED) {
mPluginResult = new PluginResult(PluginResult.Status.ERROR);
mCallbackContext.error("Cancelled");
}
mCallbackContext.sendPluginResult(mPluginResult);
}
private void keepCordovaCallback() {
mPluginResult = new PluginResult(PluginResult.Status.NO_RESULT);
mPluginResult.setKeepCallback(true);
mCallbackContext.sendPluginResult(mPluginResult);
}
public static void onError(String errorMessage) {
Log.e(TAG, errorMessage);
mCallbackContext.error(errorMessage);
mPluginResult = new PluginResult(PluginResult.Status.ERROR);
mCallbackContext.sendPluginResult(mPluginResult);
}
private void launchOnyx() {
mActivity.runOnUiThread(new Runnable() {
public void run() {
Intent onyxIntent = new Intent(mContext, OnyxActivity.class);
onyxIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
mContext.startActivity(onyxIntent);
}
});
keepCordovaCallback();
}
private void doMatch() throws JSONException {
// Get values for JSON keys
String encodedReference = mArgs.getString(OnyxConfig.REFERENCE.getKey());
String encodedProbe = mArgs.getString(OnyxConfig.PROBE.getKey());
JSONArray scalesJSONArray = null;
if (mArgs.has(OnyxConfig.PYRAMID_SCALES.getKey())) {
scalesJSONArray = mArgs.getJSONArray(OnyxConfig.PYRAMID_SCALES.getKey());
}
// Decode reference fingerprint template data
byte[] referenceBytes = Base64.decode(encodedReference, 0);
// Get encoded probe processed fingerprint image data from image URI
String encodedProbeDataString = encodedProbe.substring(IMAGE_URI_PREFIX.length(), encodedProbe.length());
// Decode probe probe image data
byte[] probeBytes = Base64.decode(encodedProbeDataString, 0);
// Create a bitmap from the probe bytes
Bitmap probeBitmap = BitmapFactory.decodeByteArray(probeBytes, 0, probeBytes.length);
// Create a mat from the bitmap
Mat matProbe = new Mat();
Utils.bitmapToMat(probeBitmap, matProbe);
Imgproc.cvtColor(matProbe, matProbe, Imgproc.COLOR_RGB2GRAY);
// Create reference fingerprint template from bytes
FingerprintTemplate ftRef = new FingerprintTemplate(referenceBytes, 0);
// Convert pyramid scales from JSON array to double array
double[] argsScales = null;
if (null != scalesJSONArray && scalesJSONArray.length() > 0) {
argsScales = new double[scalesJSONArray.length()];
for (int i = 0; i < argsScales.length; i++) {
argsScales[i] = Double.parseDouble(scalesJSONArray.optString(i));
}
}
final double[] pyramidScales = argsScales;
OnyxMatch matchTask = new OnyxMatch(mContext, OnyxPlugin.this);
matchTask.execute(ftRef, matProbe, pyramidScales);
}
@Override
public void onMatchFinished(boolean match, float score) {
JSONObject result = new JSONObject();
String errorMessage = null;
try {
result.put("isVerified", match);
result.put("matchScore", score);
} catch (JSONException e) {
errorMessage = "Failed to set JSON key value pair: " + e.toString();
}
if (null != errorMessage) {
Log.e(TAG, errorMessage);
onError(errorMessage);
} else {
onFinished(Activity.RESULT_OK, result);
}
}
}
| DFTinc/cordova-plugin-onyx | src/android/OnyxPlugin.java | Java | apache-2.0 | 9,623 |
<?php
class Language implements \ArrayAccess
{
private static $_instance = null;
private $_registry;
private $_data = [];
public function getInstance (\Registry $registry)
{
if (self::$_instance !== null) {
self::$_instance = new __CLASS__($registry);
}
return self::$_instance;
}
private function __construct (\Registry $registry)
{
$this->_registry = $registry;
$this->load();
}
protected function load ()
{
$lang = 'en_us';
if (isset($_SESSION['lang']))
$lang = $_SESSION['lang'];
else
$lang = ORM::forTable('setting')->where('key', 'default_language')->findOne()->value;
$this->_data = array_merge_recursive(
file_get_contents($this->_registry['config']['language_dir'] . 'en_us.php'),
file_get_contents($this->_registry['config']['language_dir'] . $lang . '.php')
);
}
private function __clone () {}
private function __wakeup () {}
public function offSetExists ($key)
{
return isset($this->_data[$key]);
}
public function offSetSet ($key, $value)
{
$this->_data[$key] = $value;
}
public function offSetGet ($key)
{
if (!isset($this->_data[$key])) {
throw new \InvalidArgumentException(sprintf('Identifier "%s" is not defined.', $key));
}
return $this->_data[$key]($this);
}
public function offSetUnset ($key)
{
if (isset($this->_data[$key])) {
unset($this->_data[$key]);
}
}
}
| Prezto/Shopr | system/lib/Language.php | PHP | apache-2.0 | 1,461 |
package jat.gps_ins.relative;
/* JAT: Java Astrodynamics Toolkit
*
* Copyright (c) 2003 The JAT Project. All rights reserved.
*
* This file is part of JAT. JAT is free software; you can
* redistribute it and/or modify it under the terms of the
* NASA Open Source Agreement, version 1.3 or later.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* NASA Open Source Agreement for more details.
*
* You should have received a copy of the NASA Open Source Agreement
* along with this program; if not, write to the NASA Goddard
* Space Flight Center at opensource@gsfc.nasa.gov.
*
*
* File Created on May 9, 2003
*/
import jat.alg.estimators.*;
import jat.alg.integrators.*;
import jat.gps.*;
//import jat.gps_ins.*;
import jat.ins.*;
/**
* The RGPS_SIMU.java Class runs the RGPS/IMU EKF.
* @author <a href="mailto:dgaylor@users.sourceforge.net">Dave Gaylor
* @version 1.0
*/
public class RGPS_SIMU {
/**
* main - runs the EKF.
* @params args none.
*/
public static void main(String[] args) {
String dir = "C:\\Jat\\jat\\input\\";
String gpsmeasfile = "gps\\rgpsmeas_rbar_geom1_mp.jat";
// String gpsmeasfile = "gpsmeasblk.txt";
String insmeasfile = "simu_rbar.jat";
String rinexfile = "gps\\rinex.n";
// RGPS_MeasurementList gps = new RGPS_MeasurementList();
// gps.readFromFile(dir+gpsmeasfile);
RGPS_MeasurementList gps = RGPS_MeasurementList.recover(dir+gpsmeasfile);
INS_MeasurementList ins = INS_MeasurementList.recover(dir+insmeasfile);
// int big = ins.size();
// System.out.println("ins size = "+big);
long seed = -1;
GPS_Constellation constellation = new GPS_Constellation(dir+rinexfile);
LinePrinter lp1 = new LinePrinter("C:\\Jat\\jat\\output\\ekf_simu_rbar_geom1_mp_new_at_1.txt");
LinePrinter lp2 = new LinePrinter("C:\\Jat\\jat\\output\\ekf_simu_rbar_geom1_mp_new_at_2.txt");
LinePrinter lp3 = new LinePrinter("C:\\Jat\\jat\\output\\ekf_simu_rbar_geom1_mp_at_resid.txt");
ProcessModel process = new RGPS_SIMU_ProcessModel(ins, constellation, lp1, lp2, lp3, seed);
MeasurementModel meas = new RGPS_INS_MeasurementModel(gps, constellation);
ExtendedKalmanFilter ekf = new ExtendedKalmanFilter(meas, gps, process);
long ts = System.currentTimeMillis();
ekf.process();
long tf = System.currentTimeMillis();
double dtf = (tf - ts)/(60.0 * 1000.0);
System.out.println("EKF Time Elapsed: "+dtf);
}
}
| MaximTar/satellite | src/main/resources/libs/JAT/jat/gps_ins/relative/RGPS_SIMU.java | Java | apache-2.0 | 2,539 |
/**
* @license Copyright 2017 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
'use strict';
/* global DOM, ViewerUIFeatures, ReportRenderer, DragAndDrop, GithubApi, PSIApi, logger, idbKeyval */
/** @typedef {import('./psi-api').PSIParams} PSIParams */
/**
* Guaranteed context.querySelector. Always returns an element or throws if
* nothing matches query.
* @param {string} query
* @param {ParentNode} context
* @return {HTMLElement}
*/
function find(query, context) {
/** @type {?HTMLElement} */
const result = context.querySelector(query);
if (result === null) {
throw new Error(`query ${query} not found`);
}
return result;
}
/**
* Class that manages viewing Lighthouse reports.
*/
class LighthouseReportViewer {
constructor() {
this._onPaste = this._onPaste.bind(this);
this._onSaveJson = this._onSaveJson.bind(this);
this._onFileLoad = this._onFileLoad.bind(this);
this._onUrlInputChange = this._onUrlInputChange.bind(this);
this._dragAndDropper = new DragAndDrop(this._onFileLoad);
this._github = new GithubApi();
this._psi = new PSIApi();
/**
* Used for tracking whether to offer to upload as a gist.
* @type {boolean}
*/
this._reportIsFromGist = false;
this._reportIsFromPSI = false;
this._reportIsFromJSON = false;
this._addEventListeners();
this._loadFromDeepLink();
this._listenForMessages();
}
static get APP_URL() {
return `${location.origin}${location.pathname}`;
}
/**
* Initialize event listeners.
* @private
*/
_addEventListeners() {
document.addEventListener('paste', this._onPaste);
const gistUrlInput = find('.js-gist-url', document);
gistUrlInput.addEventListener('change', this._onUrlInputChange);
// Hidden file input to trigger manual file selector.
const fileInput = find('#hidden-file-input', document);
fileInput.addEventListener('change', e => {
if (!e.target) {
return;
}
const inputTarget = /** @type {HTMLInputElement} */ (e.target);
if (inputTarget.files) {
this._onFileLoad(inputTarget.files[0]);
}
inputTarget.value = '';
});
// A click on the visual placeholder will trigger the hidden file input.
const placeholderTarget = find('.viewer-placeholder-inner', document);
placeholderTarget.addEventListener('click', e => {
const target = /** @type {?Element} */ (e.target);
if (target && target.localName !== 'input' && target.localName !== 'a') {
fileInput.click();
}
});
}
/**
* Attempts to pull gist id from URL and render report from it.
* @return {Promise<void>}
* @private
*/
_loadFromDeepLink() {
const params = new URLSearchParams(location.search);
const gistId = params.get('gist');
const psiurl = params.get('psiurl');
const jsonurl = params.get('jsonurl');
if (!gistId && !psiurl && !jsonurl) return Promise.resolve();
this._toggleLoadingBlur(true);
let loadPromise = Promise.resolve();
if (psiurl) {
loadPromise = this._fetchFromPSI({
url: psiurl,
category: params.has('category') ? params.getAll('category') : undefined,
strategy: params.get('strategy') || undefined,
locale: params.get('locale') || undefined,
utm_source: params.get('utm_source') || undefined,
});
} else if (gistId) {
loadPromise = this._github.getGistFileContentAsJson(gistId).then(reportJson => {
this._reportIsFromGist = true;
this._replaceReportHtml(reportJson);
}).catch(err => logger.error(err.message));
} else if (jsonurl) {
const firebaseAuth = this._github.getFirebaseAuth();
loadPromise = firebaseAuth.getAccessTokenIfLoggedIn()
.then(token => {
return token
? Promise.reject(new Error('Can only use jsonurl when not logged in'))
: null;
})
.then(() => fetch(jsonurl))
.then(resp => resp.json())
.then(json => {
this._reportIsFromJSON = true;
this._replaceReportHtml(json);
})
.catch(err => logger.error(err.message));
}
return loadPromise.finally(() => this._toggleLoadingBlur(false));
}
/**
* Basic Lighthouse report JSON validation.
* @param {LH.Result} reportJson
* @private
*/
_validateReportJson(reportJson) {
if (!reportJson.lighthouseVersion) {
throw new Error('JSON file was not generated by Lighthouse');
}
// Leave off patch version in the comparison.
const semverRe = new RegExp(/^(\d+)?\.(\d+)?\.(\d+)$/);
const reportVersion = reportJson.lighthouseVersion.replace(semverRe, '$1.$2');
const lhVersion = window.LH_CURRENT_VERSION.replace(semverRe, '$1.$2');
if (reportVersion < lhVersion) {
// TODO: figure out how to handler older reports. All permalinks to older
// reports will start to throw this warning when the viewer rev's its
// minor LH version.
// See https://github.com/GoogleChrome/lighthouse/issues/1108
logger.warn('Results may not display properly.\n' +
'Report was created with an earlier version of ' +
`Lighthouse (${reportJson.lighthouseVersion}). The latest ` +
`version is ${window.LH_CURRENT_VERSION}.`);
}
}
/**
* @param {LH.Result} json
* @private
*/
// TODO: Really, `json` should really have type `unknown` and
// we can have _validateReportJson verify that it's an LH.Result
_replaceReportHtml(json) {
// Allow users to view the runnerResult
if ('lhr' in json) {
json = /** @type {LH.RunnerResult} */ (json).lhr;
}
// Install as global for easier debugging
// @ts-ignore
window.__LIGHTHOUSE_JSON__ = json;
// eslint-disable-next-line no-console
console.log('window.__LIGHTHOUSE_JSON__', json);
this._validateReportJson(json);
// Redirect to old viewer if a v2 report. v3, v4, v5 handled by v5 viewer.
if (json.lighthouseVersion.startsWith('2')) {
this._loadInLegacyViewerVersion(json);
return;
}
const dom = new DOM(document);
const renderer = new ReportRenderer(dom);
const container = find('main', document);
try {
renderer.renderReport(json, container);
// Only give gist-saving callback if current report isn't from a gist.
let saveCallback = null;
if (!this._reportIsFromGist) {
saveCallback = this._onSaveJson;
}
// Only clear query string if current report isn't from a gist or PSI.
if (!this._reportIsFromGist && !this._reportIsFromPSI && !this._reportIsFromJSON) {
history.pushState({}, '', LighthouseReportViewer.APP_URL);
}
const features = new ViewerUIFeatures(dom, saveCallback);
features.initFeatures(json);
} catch (e) {
logger.error(`Error rendering report: ${e.message}`);
dom.resetTemplates(); // TODO(bckenny): hack
container.textContent = '';
throw e;
} finally {
this._reportIsFromGist = this._reportIsFromPSI = this._reportIsFromJSON = false;
}
// Remove the placeholder UI once the user has loaded a report.
const placeholder = document.querySelector('.viewer-placeholder');
if (placeholder) {
placeholder.remove();
}
if (window.ga) {
window.ga('send', 'event', 'report', 'view');
}
}
/**
* Updates the page's HTML with contents of the JSON file passed in.
* @param {File} file
* @return {Promise<void>}
* @throws file was not valid JSON generated by Lighthouse or an unknown file
* type was used.
* @private
*/
_onFileLoad(file) {
return this._readFile(file).then(str => {
let json;
try {
json = JSON.parse(str);
} catch (e) {
throw new Error('Could not parse JSON file.');
}
this._replaceReportHtml(json);
}).catch(err => logger.error(err.message));
}
/**
* Stores v2.x report in IDB, then navigates to legacy viewer in current tab.
* @param {LH.Result} reportJson
* @private
*/
_loadInLegacyViewerVersion(reportJson) {
const warnMsg = `Version mismatch between viewer and JSON. Opening compatible viewer...`;
logger.log(warnMsg, false);
// Place report in IDB, then navigate current tab to the legacy viewer
const viewerPath = new URL('../viewer2x/', location.href);
idbKeyval.set('2xreport', reportJson).then(_ => {
window.location.href = viewerPath.href;
});
}
/**
* Reads a file and returns its content as a string.
* @param {File} file
* @return {Promise<string>}
* @private
*/
_readFile(file) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = function(e) {
const readerTarget = /** @type {?FileReader} */ (e.target);
const result = /** @type {?string} */ (readerTarget && readerTarget.result);
if (!result) {
reject('Could not read file');
return;
}
resolve(result);
};
reader.onerror = reject;
reader.readAsText(file);
});
}
/**
* Saves the current report by creating a gist on GitHub.
* @param {LH.Result} reportJson
* @return {Promise<string|void>} id of the created gist.
* @private
*/
_onSaveJson(reportJson) {
if (window.ga) {
window.ga('send', 'event', 'report', 'share');
}
// TODO: find and reuse existing json gist if one exists.
return this._github.createGist(reportJson).then(id => {
if (window.ga) {
window.ga('send', 'event', 'report', 'created');
}
history.pushState({}, '', `${LighthouseReportViewer.APP_URL}?gist=${id}`);
return id;
}).catch(err => logger.log(err.message));
}
/**
* Enables pasting a JSON report or gist URL on the page.
* @param {ClipboardEvent} e
* @private
*/
_onPaste(e) {
if (!e.clipboardData) return;
e.preventDefault();
// Try paste as gist URL.
try {
const url = new URL(e.clipboardData.getData('text'));
this._loadFromGistURL(url.href);
if (window.ga) {
window.ga('send', 'event', 'report', 'paste-link');
}
} catch (err) {
// noop
}
// Try paste as json content.
try {
const json = JSON.parse(e.clipboardData.getData('text'));
this._replaceReportHtml(json);
if (window.ga) {
window.ga('send', 'event', 'report', 'paste');
}
} catch (err) {
}
}
/**
* Handles changes to the gist url input.
* @param {Event} e
* @private
*/
_onUrlInputChange(e) {
e.stopPropagation();
if (!e.target) {
return;
}
const inputElement = /** @type {HTMLInputElement} */ (e.target);
try {
this._loadFromGistURL(inputElement.value);
} catch (err) {
logger.error('Invalid URL');
}
}
/**
* Loads report json from gist URL, if valid. Updates page URL with gist ID
* and loads from github.
* @param {string} urlStr Gist URL.
* @private
*/
_loadFromGistURL(urlStr) {
try {
const url = new URL(urlStr);
if (url.origin !== 'https://gist.github.com') {
logger.error('URL was not a gist');
return;
}
const match = url.pathname.match(/[a-f0-9]{5,}/);
if (match) {
history.pushState({}, '', `${LighthouseReportViewer.APP_URL}?gist=${match[0]}`);
this._loadFromDeepLink();
}
} catch (err) {
logger.error('Invalid URL');
}
}
/**
* Initializes of a `message` listener to respond to postMessage events.
* @private
*/
_listenForMessages() {
window.addEventListener('message', e => {
if (e.source === self.opener && e.data.lhresults) {
this._replaceReportHtml(e.data.lhresults);
if (self.opener && !self.opener.closed) {
self.opener.postMessage({rendered: true}, '*');
}
if (window.ga) {
window.ga('send', 'event', 'report', 'open in viewer');
}
}
});
// If the page was opened as a popup, tell the opening window we're ready.
if (self.opener && !self.opener.closed) {
self.opener.postMessage({opened: true}, '*');
}
}
/**
* @param {PSIParams} params
*/
_fetchFromPSI(params) {
logger.log('Waiting for Lighthouse results ...');
return this._psi.fetchPSI(params).then(response => {
logger.hide();
if (!response.lighthouseResult) {
if (response.error) {
// eslint-disable-next-line no-console
console.error(response.error);
logger.error(response.error.message);
} else {
logger.error('PSI did not return a Lighthouse Result');
}
return;
}
this._reportIsFromPSI = true;
this._replaceReportHtml(response.lighthouseResult);
});
}
/**
* @param {boolean} force
*/
_toggleLoadingBlur(force) {
const placeholder = document.querySelector('.viewer-placeholder-inner');
if (placeholder) placeholder.classList.toggle('lh-loading', force);
}
}
// node export for testing.
if (typeof module !== 'undefined' && module.exports) {
module.exports = LighthouseReportViewer;
}
| umaar/lighthouse | lighthouse-viewer/app/src/lighthouse-report-viewer.js | JavaScript | apache-2.0 | 13,779 |
using System;
using System.Collections.Generic;
using System.Diagnostics.Contracts;
using Frontenac.Blueprints.Contracts;
namespace Frontenac.Blueprints
{
[ContractClass(typeof (IndexContract))]
public interface IIndex
{
/// <summary>
/// Get the name of the index.
/// </summary>
/// <value>the name of the index</value>
string Name { get; }
/// <summary>
/// Get the class that this index is indexing.
/// </summary>
/// <value>the class this index is indexing</value>
Type Type { get; }
/// <summary>
/// Index an element by a key and a value.
/// </summary>
/// <param name="key">the key to index the element by</param>
/// <param name="value">the value to index the element by</param>
/// <param name="element">the element to index</param>
void Put(string key, object value, IElement element);
/// <summary>
/// Get all elements that are indexed by the provided key/value.
/// </summary>
/// <param name="key">the key of the indexed elements</param>
/// <param name="value">the value of the indexed elements</param>
/// <returns>an IEnumerable of elements that have a particular key/value in the index</returns>
IEnumerable<IElement> Get(string key, object value);
/// <summary>
/// Get all the elements that are indexed by the provided key and specified query object.
/// This is useful for graph implementations that support complex query capabilities.
/// If querying is not supported, simply throw a NotSupportedException.
/// </summary>
/// <param name="key">the key of the indexed elements</param>
/// <param name="query">the query object for the indexed elements' keys</param>
/// <returns>an IEnumerable of elements that have a particular key/value in the index that match the query object</returns>
IEnumerable<IElement> Query(string key, object query);
/// <summary>
/// Get a count of elements with a particular key/value pair.
/// The semantics are the same as the get method.
/// </summary>
/// <param name="key">denoting the sub-index to search</param>
/// <param name="value">the value to search for</param>
/// <returns>the collection of elements that meet that criteria</returns>
long Count(string key, object value);
/// <summary>
/// Remove an element indexed by a particular key/value.
/// </summary>
/// <param name="key">the key of the indexed element</param>
/// <param name="value">the value of the indexed element</param>
/// <param name="element">the element to remove given the key/value pair</param>
void Remove(string key, object value, IElement element);
}
} | ezg/PanoramicDataWin8 | PanoramicDataWin8/utils/Blueprints2/IIndex.cs | C# | apache-2.0 | 2,943 |
package com.example.ledgr.dataobjects;
import java.util.ArrayList;
import java.util.UUID;
import junit.framework.Assert;
import org.joda.time.LocalDate;
import android.app.Activity;
import android.content.ContentValues;
import android.content.Context;
import android.content.SharedPreferences;
import android.database.Cursor;
import com.example.ledgr.MainActivity;
import com.example.ledgr.Utils;
import com.example.ledgr.alarmandnotification.Alarm;
import com.example.ledgr.contentprovider.ItemContentProvider;
import com.example.ledgr.contentprovider.RentalContentProvider;
import com.example.ledgr.temboo.TembooFacebook;
public class ItemsData {
public static final int SYNCED = 0;
public static final int INSERTED = 1;
public static final int UPDATED = 2;
public static final int DELETED = 3;
public static final int GET = 4;
public static final int RENTED_OUT = 0;
public static final int CURRENTLY_RENTING = 1;
public static final int PENDING = 0;
public static final int COMPLETE = 1;
protected int alarm_id = 0;
public static Cursor getItems_data(Activity activity) {
Cursor cursor = activity.getContentResolver().
query(ItemContentProvider.CONTENT_URI, null, null, null, null);
return cursor;
}
public static void addItem(Item item, Activity activity) {
ContentValues values = ItemContentProvider.itemToValues(item);
values.put(ItemContentProvider.C_DIRTY, INSERTED);
activity.getContentResolver().
insert(ItemContentProvider.CONTENT_URI, values);
activity.getContentResolver().notifyChange(ItemContentProvider.CONTENT_URI, null, true);
//temboo
TembooFacebook tfb = new TembooFacebook();
tfb.postItem(item);
}
public static void syncedItem(String item_id, Context context) {
ContentValues values = new ContentValues();
values.put(ItemContentProvider.C_DIRTY, SYNCED);
context.getContentResolver().update(ItemContentProvider.CONTENT_URI,
values, ItemContentProvider.C_ID + " = ?", new String[] {item_id});
}
public static void syncedDeleteItem(String item_id, Context context) {
context.getContentResolver().delete(ItemContentProvider.CONTENT_URI,
ItemContentProvider.C_ID + " = ?", new String[] {item_id});
}
public static Cursor itemsToSync(Context context) {
Cursor cursor = context.getContentResolver()
.query(ItemContentProvider.CONTENT_URI,
null,
ItemContentProvider.C_DIRTY + " != ?",
new String[] {Integer.toString(SYNCED)},
null);
return cursor;
}
public static Cursor rentalsToSync(Context context) {
Cursor cursor = context.getContentResolver()
.query(RentalContentProvider.CONTENT_URI,
null,
RentalContentProvider.C_DIRTY + " != ?",
new String[] {Integer.toString(SYNCED)},
null);
return cursor;
}
public static ArrayList<Item> retrieveItemsRentedByUserId(String userId, Activity activity) {
Cursor cursor = activity.getContentResolver()
.query(RentalContentProvider.CONTENT_URI, null,
RentalContentProvider.C_RENTER_ID + " = ? AND " + RentalContentProvider.C_DIRTY + " != ? AND " + RentalContentProvider.C_DIRTY + " != ?",
new String[] {userId, Integer.toString(DELETED), Integer.toString(GET)}, null);
cursor.moveToFirst();
ArrayList<Item> items = new ArrayList<Item>();
while(!cursor.isAfterLast()) {
Item item = RentalContentProvider.createItemFromCursor(cursor);
items.add(item);
cursor.moveToNext();
}
return items;
}
public static ArrayList<Rental> retrieveItemsLentByUserId(String userId, Activity activity) {
Cursor cursor = activity.getContentResolver()
.query(RentalContentProvider.CONTENT_URI, null,
RentalContentProvider.C_OWNER_ID + " = ? AND " + RentalContentProvider.C_DIRTY + " != ? AND " + RentalContentProvider.C_DIRTY + " != ?",
new String[] {userId, Integer.toString(DELETED), Integer.toString(GET)}, null);
cursor.moveToFirst();
ArrayList<Rental> rentals = new ArrayList<Rental>();
while(!cursor.isAfterLast()) {
Rental rental = RentalContentProvider.createRentalFromCursor(cursor);
rentals.add(rental);
cursor.moveToNext();
}
return rentals;
}
public static ArrayList<User> retrieveUserIdsCurrentlyRenting(Activity activity) {
Cursor cursor = activity.getContentResolver().query(RentalContentProvider.CONTENT_URI,
new String[] {"DISTINCT " + RentalContentProvider.C_OWNER_ID, RentalContentProvider.C_OWNER_NAME},
RentalContentProvider.C_DIRTY + " != ? AND " + RentalContentProvider.C_DIRTY + " != ? AND " + RentalContentProvider.C_RENTED_OUT_OR_RENTING + " = ?",
new String[] {Integer.toString(DELETED), Integer.toString(GET), Integer.toString(CURRENTLY_RENTING)}, null);
ArrayList<User> users = new ArrayList<User>();
cursor.moveToFirst();
while(!cursor.isAfterLast()) {
String userId = cursor.getString(cursor.getColumnIndexOrThrow(RentalContentProvider.C_OWNER_ID));
String name = cursor.getString(cursor.getColumnIndexOrThrow(RentalContentProvider.C_OWNER_NAME));
System.out.println("Current Users Renting = " + name + " " + userId);
User user = new User(userId, name);
users.add(user);
cursor.moveToNext();
}
return users;
}
public static ArrayList<User> retrieveUserIdsRenting(Activity activity) {
Cursor cursor = activity.getContentResolver().query(RentalContentProvider.CONTENT_URI,
new String[] {"DISTINCT " + RentalContentProvider.C_RENTER_ID, RentalContentProvider.C_RENTER_NAME},
RentalContentProvider.C_DIRTY + " != ? AND " + RentalContentProvider.C_DIRTY + " != ? AND " + RentalContentProvider.C_RENTED_OUT_OR_RENTING + " = ?",
new String[] {Integer.toString(DELETED), Integer.toString(GET), Integer.toString(RENTED_OUT)}, null);
ArrayList<User> users = new ArrayList<User>();
cursor.moveToFirst();
while(!cursor.isAfterLast()) {
String userId = cursor.getString(cursor.getColumnIndexOrThrow(RentalContentProvider.C_RENTER_ID));
String name = cursor.getString(cursor.getColumnIndexOrThrow(RentalContentProvider.C_RENTER_NAME));
User user = new User(userId, name);
users.add(user);
cursor.moveToNext();
}
return users;
}
public static Item retrieveItemById(String item_id, Activity activity) {
Cursor cursor = activity.getContentResolver().
query(ItemContentProvider.CONTENT_URI,
null,
ItemContentProvider.C_ID + " = ? and " + ItemContentProvider.C_DIRTY + " != ?",
new String[] {item_id, Integer.toString(DELETED)},
null);
cursor.moveToFirst();
Item item = ItemContentProvider.createItemFromCursor(cursor);
return item;
}
public static boolean deleteItemById(String item_id, Activity activity) {
ContentValues values = new ContentValues();
values.put(ItemContentProvider.C_DIRTY, DELETED);
activity.getContentResolver().update(ItemContentProvider.CONTENT_URI,
values, ItemContentProvider.C_ID + " = ?", new String[] {item_id});
activity.getContentResolver().notifyChange(ItemContentProvider.CONTENT_URI, null, true);
return true;
}
public static void updateItem(Item item, Activity activity) {
ContentValues values = ItemContentProvider.itemToValues(item);
values.put(ItemContentProvider.C_DIRTY, UPDATED);
activity.getContentResolver().update(ItemContentProvider.CONTENT_URI,
values, ItemContentProvider.C_ID + " = ?", new String[] {item.getItem_id()});
activity.getContentResolver().notifyChange(ItemContentProvider.CONTENT_URI, null, true);
}
public static boolean rentItemById(String item_id, User renter, LocalDate dueDate, Activity activity) {
ContentValues values = new ContentValues();
values.put(ItemContentProvider.C_RENTED, 1);
values.put(ItemContentProvider.C_DIRTY, UPDATED);
activity.getContentResolver().update(ItemContentProvider.CONTENT_URI, values,
ItemContentProvider.C_ID + " = ?", new String[] {item_id});
Cursor cursor = activity.getContentResolver().query(ItemContentProvider.CONTENT_URI, null, ItemContentProvider.C_ID + " = ?", new String[] {item_id}, null);
cursor.moveToFirst();
Item item = ItemContentProvider.createItemFromCursor(cursor);
SharedPreferences prefs = activity.getSharedPreferences(MainActivity.class.getSimpleName(), Context.MODE_PRIVATE);
String userid = prefs.getString(MainActivity.MAIN_USERID, "");
String username = prefs.getString(MainActivity.MAIN_USERNAME, "");
//TODO need to send rental
Rental rental = new Rental(item, dueDate, renter,
new User(userid, username));
Alarm alarm = new Alarm();
alarm.SetAlarm(activity.getApplicationContext(), 0, item.getTitle(), renter.getFirstName(), item_id, dueDate);
rental.setRental_alarm_id(0);
ContentValues contentValues = RentalContentProvider.rentalToValues(rental);
contentValues.put(RentalContentProvider.C_ID, UUID.randomUUID().toString());
contentValues.put(RentalContentProvider.C_DIRTY, INSERTED);
contentValues.put(RentalContentProvider.C_RENTED_OUT_OR_RENTING, RENTED_OUT);
activity.getContentResolver().insert(RentalContentProvider.CONTENT_URI, contentValues);
activity.getContentResolver().notifyChange(ItemContentProvider.CONTENT_URI, null, true);
return false;
}
public static boolean cancelRentalById(String item_id, Activity activity) {
ContentValues values = new ContentValues();
values.put(ItemContentProvider.C_RENTED, 0);
values.put(ItemContentProvider.C_DIRTY, UPDATED);
activity.getContentResolver().update(ItemContentProvider.CONTENT_URI, values, ItemContentProvider.C_ID + " = ?", new String[] {item_id});
ContentValues rentalValues = new ContentValues();
rentalValues.put(RentalContentProvider.C_DIRTY, DELETED);
activity.getContentResolver()
.update(RentalContentProvider.CONTENT_URI, rentalValues,
RentalContentProvider.C_ITEMID + " = ?", new String[] {item_id});
activity.getContentResolver().notifyChange(ItemContentProvider.CONTENT_URI, null, true);
//activity.getContentResolver().delete(RentalContentProvider.CONTENT_URI, RentalContentProvider.C_ITEMID + " = ?", new String[] {item_id});
return false;
}
public static void syncedDeleteRental(String rental_id, Context context) {
context.getContentResolver().delete(RentalContentProvider.CONTENT_URI,
RentalContentProvider.C_ID + " = ?", new String[] {rental_id});
}
public static void syncedRental(String rental_id, Context context) {
ContentValues values = new ContentValues();
values.put(ItemContentProvider.C_DIRTY, SYNCED);
context.getContentResolver().update(RentalContentProvider.CONTENT_URI,
values, RentalContentProvider.C_ID + " = ?", new String[] {rental_id});
}
public static void syncedGetRental(Rental rental, Context context) {
ContentValues values = RentalContentProvider.rentalToValues(rental);
System.out.println("pcount values" + values.getAsInteger(RentalContentProvider.C_PICTURE_COUNT));
values.put(RentalContentProvider.C_DIRTY, SYNCED);
values.put(RentalContentProvider.C_RENTED_OUT_OR_RENTING, CURRENTLY_RENTING);
values.put(RentalContentProvider.C_PENDING_RENTAL, PENDING);
context.getContentResolver().update(RentalContentProvider.CONTENT_URI,
values, RentalContentProvider.C_ID + " = ?", new String[] {rental.getRental_id()});
Cursor cursor = context.getContentResolver().query(RentalContentProvider.CONTENT_URI,
null, RentalContentProvider.C_DIRTY + " = ?", new String[] {rental.getRental_id()}, null);
//TODO: perform notification to user about new rental here!
// also refresh the rental screen
Utils.notifyRental(context, rental);
}
public static void fetchRental(String rental_id, Context context) {
ContentValues values = new ContentValues();
//put fake mock data
values.put(RentalContentProvider.C_DUEDAY, 15);
values.put(RentalContentProvider.C_DUEMONTH, 6);
values.put(RentalContentProvider.C_DUEYEAR, 2000);
values.put(RentalContentProvider.C_ID, rental_id);
values.put(RentalContentProvider.C_DIRTY, GET);
values.put(RentalContentProvider.C_RENTED_OUT_OR_RENTING, CURRENTLY_RENTING);
values.put(RentalContentProvider.C_PENDING_RENTAL, PENDING);
context.getContentResolver().insert(RentalContentProvider.CONTENT_URI, values);
context.getContentResolver().notifyChange(ItemContentProvider.CONTENT_URI, null, true);
}
public static void deleteRental(String rental_id, Context context) {
context.getContentResolver().delete(RentalContentProvider.CONTENT_URI, RentalContentProvider.C_ID + " = ?", new String[] {rental_id});
}
public static Rental retrieveRentalById(String item_id, Activity activity) {
Cursor cursor = activity.getContentResolver().query(RentalContentProvider.CONTENT_URI, null,
RentalContentProvider.C_ITEMID + " = ? AND " + RentalContentProvider.C_DIRTY + " != ? AND " + RentalContentProvider.C_DIRTY + " != ?",
new String[] {item_id, Integer.toString(DELETED), Integer.toString(GET)}, null);
cursor.moveToFirst();
return RentalContentProvider.cursorToRental(cursor);
}
public static void venmoChargeComplete(String item_id, Context context) {
ContentValues values = new ContentValues();
values.put(RentalContentProvider.C_PENDING_RENTAL, COMPLETE);
int count = context.getContentResolver().update(RentalContentProvider.CONTENT_URI,
values, RentalContentProvider.C_ITEMID + " = ?", new String[] {item_id});
Assert.assertEquals(1, count);
}
}
| randyychan/Ledgr | src/com/example/ledgr/dataobjects/ItemsData.java | Java | apache-2.0 | 13,338 |
<?php
require_once('fido3.php');
require_once('rex.php');
class Dog {
public function __construct(){
}
public function bark(){
return "Woof! Woof!";
}
public function dogFetch(){
return "fetching ball...ball fetched";
}
public function dogCome(){
return "returning to owner";
}
} | ZeusbaseWeb/zeusbasePHP | book-examples/JSPHP1-master/Ch 03/my_project/dog.php | PHP | apache-2.0 | 323 |
package org.springframework.social.strava.api.impl.json;
import com.fasterxml.jackson.annotation.JsonProperty;
abstract class StravaSegmentMixin extends StravaObjectMixin {
StravaSegmentMixin(
@JsonProperty("id") long id) {}
}
| pivotal/spring-social-strava | spring-social-strava/src/main/java/org/springframework/social/strava/api/impl/json/StravaSegmentMixin.java | Java | apache-2.0 | 247 |
// Copyright 2010 Victor Iacoban
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under
// the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
package org.zmlx.hg4idea.util;
import com.intellij.dvcs.DvcsUtil;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.util.BackgroundTaskUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Couple;
import com.intellij.openapi.util.ShutDownTracker;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.*;
import com.intellij.openapi.vcs.changes.Change;
import com.intellij.openapi.vcs.changes.ChangeListManager;
import com.intellij.openapi.vcs.changes.ContentRevision;
import com.intellij.openapi.vcs.changes.VcsDirtyScopeManager;
import com.intellij.openapi.vcs.history.FileHistoryPanelImpl;
import com.intellij.openapi.vcs.history.VcsFileRevisionEx;
import com.intellij.openapi.vcs.vfs.AbstractVcsVirtualFile;
import com.intellij.openapi.vcs.vfs.VcsVirtualFile;
import com.intellij.openapi.vfs.CharsetToolkit;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.ui.GuiUtils;
import com.intellij.util.ArrayUtil;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.vcsUtil.VcsUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.zmlx.hg4idea.*;
import org.zmlx.hg4idea.command.HgCatCommand;
import org.zmlx.hg4idea.command.HgRemoveCommand;
import org.zmlx.hg4idea.command.HgStatusCommand;
import org.zmlx.hg4idea.command.HgWorkingCopyRevisionsCommand;
import org.zmlx.hg4idea.execution.HgCommandResult;
import org.zmlx.hg4idea.execution.ShellCommand;
import org.zmlx.hg4idea.execution.ShellCommandException;
import org.zmlx.hg4idea.log.HgHistoryUtil;
import org.zmlx.hg4idea.provider.HgChangeProvider;
import org.zmlx.hg4idea.repo.HgRepository;
import org.zmlx.hg4idea.repo.HgRepositoryManager;
import java.io.*;
import java.lang.reflect.InvocationTargetException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* HgUtil is a collection of static utility methods for Mercurial.
*/
public abstract class HgUtil {
public static final Pattern URL_WITH_PASSWORD = Pattern.compile("(?:.+)://(?:.+)(:.+)@(?:.+)"); //http(s)://username:password@url
public static final int MANY_FILES = 100;
private static final Logger LOG = Logger.getInstance(HgUtil.class);
public static final String DOT_HG = ".hg";
public static final String TIP_REFERENCE = "tip";
public static final String HEAD_REFERENCE = "HEAD";
public static File copyResourceToTempFile(String basename, String extension) throws IOException {
final InputStream in = HgUtil.class.getClassLoader().getResourceAsStream("python/" + basename + extension);
final File tempFile = FileUtil.createTempFile(basename, extension);
final byte[] buffer = new byte[4096];
OutputStream out = null;
try {
out = new FileOutputStream(tempFile, false);
int bytesRead;
while ((bytesRead = in.read(buffer)) != -1)
out.write(buffer, 0, bytesRead);
} finally {
try {
out.close();
}
catch (IOException e) {
// ignore
}
}
try {
in.close();
}
catch (IOException e) {
// ignore
}
tempFile.deleteOnExit();
return tempFile;
}
public static void markDirectoryDirty(final Project project, final VirtualFile file)
throws InvocationTargetException, InterruptedException {
VfsUtil.markDirtyAndRefresh(true, true, false, file);
VcsDirtyScopeManager.getInstance(project).dirDirtyRecursively(file);
}
public static void markFileDirty( final Project project, final VirtualFile file ) throws InvocationTargetException, InterruptedException {
ApplicationManager.getApplication().runReadAction(new Runnable() {
public void run() {
VcsDirtyScopeManager.getInstance(project).fileDirty(file);
}
});
runWriteActionAndWait(new Runnable() {
public void run() {
file.refresh(true, false);
}
});
}
/**
* Runs the given task as a write action in the event dispatching thread and waits for its completion.
*/
public static void runWriteActionAndWait(@NotNull final Runnable runnable) throws InvocationTargetException, InterruptedException {
GuiUtils.runOrInvokeAndWait(new Runnable() {
public void run() {
ApplicationManager.getApplication().runWriteAction(runnable);
}
});
}
/**
* Schedules the given task to be run as a write action in the event dispatching thread.
*/
public static void runWriteActionLater(@NotNull final Runnable runnable) {
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
ApplicationManager.getApplication().runWriteAction(runnable);
}
});
}
/**
* Returns a temporary python file that will be deleted on exit.
*
* Also all compiled version of the python file will be deleted.
*
* @param base The basename of the file to copy
* @return The temporary copy the specified python file, with all the necessary hooks installed
* to make sure it is completely removed at shutdown
*/
@Nullable
public static File getTemporaryPythonFile(String base) {
try {
final File file = copyResourceToTempFile(base, ".py");
final String fileName = file.getName();
ShutDownTracker.getInstance().registerShutdownTask(new Runnable() {
public void run() {
File[] files = file.getParentFile().listFiles(new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.startsWith(fileName);
}
});
if (files != null) {
for (File file1 : files) {
file1.delete();
}
}
}
});
return file;
} catch (IOException e) {
return null;
}
}
/**
* Calls 'hg remove' to remove given files from the VCS.
* @param project
* @param files files to be removed from the VCS.
*/
public static void removeFilesFromVcs(Project project, List<FilePath> files) {
final HgRemoveCommand command = new HgRemoveCommand(project);
for (FilePath filePath : files) {
final VirtualFile vcsRoot = VcsUtil.getVcsRootFor(project, filePath);
if (vcsRoot == null) {
continue;
}
command.executeInCurrentThread(new HgFile(vcsRoot, filePath));
}
}
/**
* Finds the nearest parent directory which is an hg root.
* @param dir Directory which parent will be checked.
* @return Directory which is the nearest hg root being a parent of this directory,
* or <code>null</code> if this directory is not under hg.
* @see com.intellij.openapi.vcs.AbstractVcs#isVersionedDirectory(com.intellij.openapi.vfs.VirtualFile)
*/
@Nullable
public static VirtualFile getNearestHgRoot(VirtualFile dir) {
VirtualFile currentDir = dir;
while (currentDir != null) {
if (isHgRoot(currentDir)) {
return currentDir;
}
currentDir = currentDir.getParent();
}
return null;
}
/**
* Checks if the given directory is an hg root.
*/
public static boolean isHgRoot(@Nullable VirtualFile dir) {
return dir != null && dir.findChild(DOT_HG) != null;
}
/**
* Gets the Mercurial root for the given file path or null if non exists:
* the root should not only be in directory mappings, but also the .hg repository folder should exist.
* @see #getHgRootOrThrow(com.intellij.openapi.project.Project, com.intellij.openapi.vcs.FilePath)
*/
@Nullable
public static VirtualFile getHgRootOrNull(Project project, FilePath filePath) {
if (project == null) {
return getNearestHgRoot(VcsUtil.getVirtualFile(filePath.getPath()));
}
return getNearestHgRoot(VcsUtil.getVcsRootFor(project, filePath));
}
/**
* Get hg roots for paths
*
* @param filePaths the context paths
* @return a set of hg roots
*/
@NotNull
public static Set<VirtualFile> hgRoots(@NotNull Project project, @NotNull Collection<FilePath> filePaths) {
HashSet<VirtualFile> roots = new HashSet<>();
for (FilePath path : filePaths) {
ContainerUtil.addIfNotNull(roots, getHgRootOrNull(project, path));
}
return roots;
}
/**
* Gets the Mercurial root for the given file path or null if non exists:
* the root should not only be in directory mappings, but also the .hg repository folder should exist.
* @see #getHgRootOrThrow(com.intellij.openapi.project.Project, com.intellij.openapi.vcs.FilePath)
* @see #getHgRootOrNull(com.intellij.openapi.project.Project, com.intellij.openapi.vcs.FilePath)
*/
@Nullable
public static VirtualFile getHgRootOrNull(Project project, @NotNull VirtualFile file) {
return getHgRootOrNull(project, VcsUtil.getFilePath(file.getPath()));
}
/**
* Gets the Mercurial root for the given file path or throws a VcsException if non exists:
* the root should not only be in directory mappings, but also the .hg repository folder should exist.
* @see #getHgRootOrNull(com.intellij.openapi.project.Project, com.intellij.openapi.vcs.FilePath)
*/
@NotNull
public static VirtualFile getHgRootOrThrow(Project project, FilePath filePath) throws VcsException {
final VirtualFile vf = getHgRootOrNull(project, filePath);
if (vf == null) {
throw new VcsException(HgVcsMessages.message("hg4idea.exception.file.not.under.hg", filePath.getPresentableUrl()));
}
return vf;
}
@NotNull
public static VirtualFile getHgRootOrThrow(Project project, VirtualFile file) throws VcsException {
return getHgRootOrThrow(project, VcsUtil.getFilePath(file.getPath()));
}
@Nullable
public static VirtualFile getRootForSelectedFile(@NotNull Project project) {
VirtualFile selectedFile = DvcsUtil.getSelectedFile(project);
if (selectedFile != null) {
return getHgRootOrNull(project, selectedFile);
}
return null;
}
/**
* Shows a message dialog to enter the name of new branch.
*
* @return name of new branch or {@code null} if user has cancelled the dialog.
*/
@Nullable
public static String getNewBranchNameFromUser(@NotNull HgRepository repository,
@NotNull String dialogTitle) {
return Messages.showInputDialog(repository.getProject(), "Enter the name of new branch:", dialogTitle, Messages.getQuestionIcon(), "",
new HgBranchReferenceValidator(repository));
}
/**
* Checks is a merge operation is in progress on the given repository.
* Actually gets the number of parents of the current revision. If there are 2 parents, then a merge is going on. Otherwise there is
* only one parent.
* @param project project to work on.
* @param repository repository which is checked on merge.
* @return True if merge operation is in progress, false if there is no merge operation.
*/
public static boolean isMergeInProgress(@NotNull Project project, VirtualFile repository) {
return new HgWorkingCopyRevisionsCommand(project).parents(repository).size() > 1;
}
/**
* Groups the given files by their Mercurial repositories and returns the map of relative paths to files for each repository.
* @param hgFiles files to be grouped.
* @return key is repository, values is the non-empty list of relative paths to files, which belong to this repository.
*/
@NotNull
public static Map<VirtualFile, List<String>> getRelativePathsByRepository(Collection<HgFile> hgFiles) {
final Map<VirtualFile, List<String>> map = new HashMap<>();
if (hgFiles == null) {
return map;
}
for(HgFile file : hgFiles) {
final VirtualFile repo = file.getRepo();
List<String> files = map.get(repo);
if (files == null) {
files = new ArrayList<>();
map.put(repo, files);
}
files.add(file.getRelativePath());
}
return map;
}
@NotNull
public static HgFile getFileNameInTargetRevision(Project project, HgRevisionNumber vcsRevisionNumber, HgFile localHgFile) {
//get file name in target revision if it was moved/renamed
// if file was moved but not committed then hg status would return nothing, so it's better to point working dir as '.' revision
HgStatusCommand statCommand = new HgStatusCommand.Builder(false).copySource(true).baseRevision(vcsRevisionNumber).
targetRevision(HgRevisionNumber.getInstance("", ".")).build(project);
Set<HgChange> changes = statCommand.executeInCurrentThread(localHgFile.getRepo(), Collections.singletonList(localHgFile.toFilePath()));
for (HgChange change : changes) {
if (change.afterFile().equals(localHgFile)) {
return change.beforeFile();
}
}
return localHgFile;
}
@NotNull
public static FilePath getOriginalFileName(@NotNull FilePath filePath, ChangeListManager changeListManager) {
Change change = changeListManager.getChange(filePath);
if (change == null) {
return filePath;
}
FileStatus status = change.getFileStatus();
if (status == HgChangeProvider.COPIED ||
status == HgChangeProvider.RENAMED) {
ContentRevision beforeRevision = change.getBeforeRevision();
assert beforeRevision != null : "If a file's status is copied or renamed, there must be an previous version";
return beforeRevision.getFile();
}
else {
return filePath;
}
}
/**
* Returns all HG roots in the project.
*/
public static @NotNull List<VirtualFile> getHgRepositories(@NotNull Project project) {
final List<VirtualFile> repos = new LinkedList<>();
for (VcsRoot root : ProjectLevelVcsManager.getInstance(project).getAllVcsRoots()) {
if (HgVcs.VCS_NAME.equals(root.getVcs().getName())) {
repos.add(root.getPath());
}
}
return repos;
}
@NotNull
public static Map<VirtualFile, Collection<VirtualFile>> sortByHgRoots(@NotNull Project project, @NotNull Collection<VirtualFile> files) {
Map<VirtualFile, Collection<VirtualFile>> sorted = new HashMap<>();
HgRepositoryManager repositoryManager = getRepositoryManager(project);
for (VirtualFile file : files) {
HgRepository repo = repositoryManager.getRepositoryForFile(file);
if (repo == null) {
continue;
}
Collection<VirtualFile> filesForRoot = sorted.get(repo.getRoot());
if (filesForRoot == null) {
filesForRoot = new HashSet<>();
sorted.put(repo.getRoot(), filesForRoot);
}
filesForRoot.add(file);
}
return sorted;
}
@NotNull
public static Map<VirtualFile, Collection<FilePath>> groupFilePathsByHgRoots(@NotNull Project project,
@NotNull Collection<FilePath> files) {
Map<VirtualFile, Collection<FilePath>> sorted = new HashMap<>();
if (project.isDisposed()) return sorted;
HgRepositoryManager repositoryManager = getRepositoryManager(project);
for (FilePath file : files) {
HgRepository repo = repositoryManager.getRepositoryForFile(file);
if (repo == null) {
continue;
}
Collection<FilePath> filesForRoot = sorted.get(repo.getRoot());
if (filesForRoot == null) {
filesForRoot = new HashSet<>();
sorted.put(repo.getRoot(), filesForRoot);
}
filesForRoot.add(file);
}
return sorted;
}
@NotNull
public static ProgressIndicator executeOnPooledThread(@NotNull Runnable runnable, @NotNull Disposable parentDisposable) {
return BackgroundTaskUtil.executeOnPooledThread(runnable, parentDisposable);
}
/**
* Convert {@link VcsVirtualFile} to the {@link LocalFileSystem local} Virtual File.
*
* TODO
* It is a workaround for the following problem: VcsVirtualFiles returned from the {@link FileHistoryPanelImpl} contain the current path
* of the file, not the path that was in certain revision. This has to be fixed by making {@link HgFileRevision} implement
* {@link VcsFileRevisionEx}.
*/
@Nullable
public static VirtualFile convertToLocalVirtualFile(@Nullable VirtualFile file) {
if (!(file instanceof AbstractVcsVirtualFile)) {
return file;
}
LocalFileSystem lfs = LocalFileSystem.getInstance();
VirtualFile resultFile = lfs.findFileByPath(file.getPath());
if (resultFile == null) {
resultFile = lfs.refreshAndFindFileByPath(file.getPath());
}
return resultFile;
}
@NotNull
public static List<Change> getDiff(@NotNull final Project project,
@NotNull final VirtualFile root,
@NotNull final FilePath path,
@Nullable final HgRevisionNumber revNum1,
@Nullable final HgRevisionNumber revNum2) {
HgStatusCommand statusCommand;
if (revNum1 != null) {
//rev2==null means "compare with local version"
statusCommand = new HgStatusCommand.Builder(true).ignored(false).unknown(false).copySource(!path.isDirectory()).baseRevision(revNum1)
.targetRevision(revNum2).build(project);
}
else {
LOG.assertTrue(revNum2 != null, "revision1 and revision2 can't both be null. Path: " + path); //rev1 and rev2 can't be null both//
//get initial changes//
statusCommand =
new HgStatusCommand.Builder(true).ignored(false).unknown(false).copySource(false).baseRevision(revNum2)
.build(project);
}
Collection<HgChange> hgChanges = statusCommand.executeInCurrentThread(root, Collections.singleton(path));
List<Change> changes = new ArrayList<>();
//convert output changes to standart Change class
for (HgChange hgChange : hgChanges) {
FileStatus status = convertHgDiffStatus(hgChange.getStatus());
if (status != FileStatus.UNKNOWN) {
changes.add(HgHistoryUtil.createChange(project, root, hgChange.beforeFile().getRelativePath(), revNum1,
hgChange.afterFile().getRelativePath(), revNum2, status));
}
}
return changes;
}
@NotNull
public static FileStatus convertHgDiffStatus(@NotNull HgFileStatusEnum hgstatus) {
if (hgstatus.equals(HgFileStatusEnum.ADDED)) {
return FileStatus.ADDED;
}
else if (hgstatus.equals(HgFileStatusEnum.DELETED)) {
return FileStatus.DELETED;
}
else if (hgstatus.equals(HgFileStatusEnum.MODIFIED)) {
return FileStatus.MODIFIED;
}
else if (hgstatus.equals(HgFileStatusEnum.COPY)) {
return HgChangeProvider.COPIED;
}
else if (hgstatus.equals(HgFileStatusEnum.UNVERSIONED)) {
return FileStatus.UNKNOWN;
}
else if (hgstatus.equals(HgFileStatusEnum.IGNORED)) {
return FileStatus.IGNORED;
}
else {
return FileStatus.UNKNOWN;
}
}
@NotNull
public static byte[] loadContent(@NotNull Project project, @Nullable HgRevisionNumber revisionNumber, @NotNull HgFile fileToCat) {
HgCommandResult result = new HgCatCommand(project).execute(fileToCat, revisionNumber, fileToCat.toFilePath().getCharset());
return result != null && result.getExitValue() == 0 ? result.getBytesOutput() : ArrayUtil.EMPTY_BYTE_ARRAY;
}
public static String removePasswordIfNeeded(@NotNull String path) {
Matcher matcher = URL_WITH_PASSWORD.matcher(path);
if (matcher.matches()) {
return path.substring(0, matcher.start(1)) + path.substring(matcher.end(1), path.length());
}
return path;
}
@NotNull
public static String getDisplayableBranchOrBookmarkText(@NotNull HgRepository repository) {
HgRepository.State state = repository.getState();
String branchText = "";
if (state != HgRepository.State.NORMAL) {
branchText += state.toString() + " ";
}
return branchText + repository.getCurrentBranchName();
}
@NotNull
public static HgRepositoryManager getRepositoryManager(@NotNull Project project) {
return ServiceManager.getService(project, HgRepositoryManager.class);
}
@Nullable
public static HgRepository getCurrentRepository(@NotNull Project project) {
if (project.isDisposed()) return null;
return DvcsUtil.guessRepositoryForFile(project, getRepositoryManager(project),
DvcsUtil.getSelectedFile(project),
HgProjectSettings.getInstance(project).getRecentRootPath());
}
@Nullable
public static HgRepository getRepositoryForFile(@NotNull Project project, @Nullable VirtualFile file) {
if (file == null || project.isDisposed()) return null;
HgRepositoryManager repositoryManager = getRepositoryManager(project);
VirtualFile root = getHgRootOrNull(project, file);
return repositoryManager.getRepositoryForRoot(root);
}
@Nullable
public static String getRepositoryDefaultPath(@NotNull Project project, @NotNull VirtualFile root) {
HgRepository hgRepository = getRepositoryManager(project).getRepositoryForRoot(root);
assert hgRepository != null : "Repository can't be null for root " + root.getName();
return hgRepository.getRepositoryConfig().getDefaultPath();
}
@Nullable
public static String getRepositoryDefaultPushPath(@NotNull Project project, @NotNull VirtualFile root) {
HgRepository hgRepository = getRepositoryManager(project).getRepositoryForRoot(root);
assert hgRepository != null : "Repository can't be null for root " + root.getName();
return hgRepository.getRepositoryConfig().getDefaultPushPath();
}
@Nullable
public static String getRepositoryDefaultPushPath(@NotNull HgRepository repository) {
return repository.getRepositoryConfig().getDefaultPushPath();
}
@Nullable
public static String getConfig(@NotNull Project project,
@NotNull VirtualFile root,
@NotNull String section,
@Nullable String configName) {
HgRepository hgRepository = getRepositoryManager(project).getRepositoryForRoot(root);
assert hgRepository != null : "Repository can't be null for root " + root.getName();
return hgRepository.getRepositoryConfig().getNamedConfig(section, configName);
}
@NotNull
public static Collection<String> getRepositoryPaths(@NotNull Project project,
@NotNull VirtualFile root) {
HgRepository hgRepository = getRepositoryManager(project).getRepositoryForRoot(root);
assert hgRepository != null : "Repository can't be null for root " + root.getName();
return hgRepository.getRepositoryConfig().getPaths();
}
public static boolean isExecutableValid(@Nullable String executable) {
try {
if (StringUtil.isEmptyOrSpaces(executable)) {
return false;
}
HgCommandResult result = getVersionOutput(executable);
return result.getExitValue() == 0 && !result.getRawOutput().isEmpty();
}
catch (Throwable e) {
LOG.info("Error during hg executable validation: ", e);
return false;
}
}
@NotNull
public static HgCommandResult getVersionOutput(@NotNull String executable) throws ShellCommandException, InterruptedException {
String hgExecutable = executable.trim();
List<String> cmdArgs = new ArrayList<>();
cmdArgs.add(hgExecutable);
cmdArgs.add("version");
cmdArgs.add("-q");
ShellCommand shellCommand = new ShellCommand(cmdArgs, null, CharsetToolkit.getDefaultSystemCharset());
return shellCommand.execute(false, false);
}
public static List<String> getNamesWithoutHashes(Collection<HgNameWithHashInfo> namesWithHashes) {
//return names without duplication (actually for several heads in one branch)
List<String> names = new ArrayList<>();
for (HgNameWithHashInfo hash : namesWithHashes) {
if (!names.contains(hash.getName())) {
names.add(hash.getName());
}
}
return names;
}
public static List<String> getSortedNamesWithoutHashes(Collection<HgNameWithHashInfo> namesWithHashes) {
List<String> names = getNamesWithoutHashes(namesWithHashes);
Collections.sort(names);
return names;
}
@NotNull
public static Couple<String> parseUserNameAndEmail(@NotNull String authorString) {
//special characters should be retained for properly filtering by username. For Mercurial "a.b" username is not equal to "a b"
// Vasya Pupkin <vasya.pupkin@jetbrains.com> -> Vasya Pupkin , vasya.pupkin@jetbrains.com
int startEmailIndex = authorString.indexOf('<');
int startDomainIndex = authorString.indexOf('@');
int endEmailIndex = authorString.indexOf('>');
String userName;
String email;
if (0 < startEmailIndex && startEmailIndex < startDomainIndex && startDomainIndex < endEmailIndex) {
email = authorString.substring(startEmailIndex + 1, endEmailIndex);
userName = authorString.substring(0, startEmailIndex).trim();
}
// vasya.pupkin@email.com || <vasya.pupkin@email.com>
else if (!authorString.contains(" ") && startDomainIndex > 0) { //simple e-mail check. john@localhost
userName = "";
if (startEmailIndex >= 0 && startDomainIndex > startEmailIndex && startDomainIndex < endEmailIndex) {
email = authorString.substring(startEmailIndex + 1, endEmailIndex).trim();
} else {
email = authorString;
}
}
else {
userName = authorString.trim();
email = "";
}
return Couple.of(userName, email);
}
@NotNull
public static List<String> getTargetNames(@NotNull HgRepository repository) {
return ContainerUtil.sorted(ContainerUtil.map(repository.getRepositoryConfig().getPaths(), new Function<String, String>() {
@Override
public String fun(String s) {
return removePasswordIfNeeded(s);
}
}));
}
}
| hurricup/intellij-community | plugins/hg4idea/src/org/zmlx/hg4idea/util/HgUtil.java | Java | apache-2.0 | 26,823 |
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.k2crypto.keyversions;
/**
* Class representing PrivateKeyVersions. Extended by specific implementations such as
* DSAPrivateKeyVersion
*
* @author John Maheswaran (maheswaran@google.com)
*/
public abstract class PrivateKeyVersion extends AsymmetricKeyVersion {
/**
* Constructor for PrivateKeyVersion
*
* @param builder The Builder object used to build this PrivateKeyVersion
*/
protected PrivateKeyVersion(Builder builder) {
super(builder);
}
}
| asacamano/K2 | java/all-in-one/src/main/java/com/google/k2crypto/keyversions/PrivateKeyVersion.java | Java | apache-2.0 | 1,102 |
package liquibase.sqlgenerator.ext;
import liquibase.database.Database;
import liquibase.database.ext.HanaDBDatabase;
import liquibase.datatype.DataTypeFactory;
import liquibase.sql.Sql;
import liquibase.sql.UnparsedSql;
import liquibase.sqlgenerator.SqlGeneratorChain;
import liquibase.sqlgenerator.core.DropDefaultValueGenerator;
import liquibase.statement.core.DropDefaultValueStatement;
public class DropDefaultValueGeneratorHanaDB extends DropDefaultValueGenerator {
@Override
public int getPriority() {
return PRIORITY_DATABASE;
}
@Override
public boolean supports(DropDefaultValueStatement statement, Database database) {
return database instanceof HanaDBDatabase;
}
@Override
public Sql[] generateSql(DropDefaultValueStatement statement, Database database, SqlGeneratorChain sqlGeneratorChain) {
if (!supports(statement, database)) {
return sqlGeneratorChain.generateSql(statement, database);
}
String tableName = statement.getTableName();
String columnName = statement.getColumnName();
String catalogName = statement.getCatalogName();
String schemaToUse = statement.getSchemaName();
if (schemaToUse == null) {
schemaToUse = database.getDefaultSchemaName();
}
String columnDataType = statement.getColumnDataType();
if (columnDataType == null) {
columnDataType = SqlGeneratorHelperHanaDB.getColumnDataDefinition((HanaDBDatabase) database, catalogName, schemaToUse, tableName, columnName);
}
String sql = "ALTER TABLE " + database.escapeTableName(catalogName, schemaToUse, tableName) +
" ALTER (" + database.escapeColumnName(catalogName, schemaToUse, tableName, columnName) +
" " + DataTypeFactory.getInstance().fromDescription(columnDataType, database).toDatabaseDataType(database) +
" DEFAULT NULL)";
return new Sql[]{
new UnparsedSql(sql,
getAffectedColumn(statement))
};
}
}
| lbitonti/liquibase-hana | src/main/java/liquibase/sqlgenerator/ext/DropDefaultValueGeneratorHanaDB.java | Java | apache-2.0 | 2,082 |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.iotthingsgraph.model;
import javax.annotation.Generated;
/**
*
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public enum FlowExecutionEventType {
EXECUTION_STARTED("EXECUTION_STARTED"),
EXECUTION_FAILED("EXECUTION_FAILED"),
EXECUTION_ABORTED("EXECUTION_ABORTED"),
EXECUTION_SUCCEEDED("EXECUTION_SUCCEEDED"),
STEP_STARTED("STEP_STARTED"),
STEP_FAILED("STEP_FAILED"),
STEP_SUCCEEDED("STEP_SUCCEEDED"),
ACTIVITY_SCHEDULED("ACTIVITY_SCHEDULED"),
ACTIVITY_STARTED("ACTIVITY_STARTED"),
ACTIVITY_FAILED("ACTIVITY_FAILED"),
ACTIVITY_SUCCEEDED("ACTIVITY_SUCCEEDED"),
START_FLOW_EXECUTION_TASK("START_FLOW_EXECUTION_TASK"),
SCHEDULE_NEXT_READY_STEPS_TASK("SCHEDULE_NEXT_READY_STEPS_TASK"),
THING_ACTION_TASK("THING_ACTION_TASK"),
THING_ACTION_TASK_FAILED("THING_ACTION_TASK_FAILED"),
THING_ACTION_TASK_SUCCEEDED("THING_ACTION_TASK_SUCCEEDED"),
ACKNOWLEDGE_TASK_MESSAGE("ACKNOWLEDGE_TASK_MESSAGE");
private String value;
private FlowExecutionEventType(String value) {
this.value = value;
}
@Override
public String toString() {
return this.value;
}
/**
* Use this in place of valueOf.
*
* @param value
* real value
* @return FlowExecutionEventType corresponding to the value
*
* @throws IllegalArgumentException
* If the specified value does not map to one of the known values in this enum.
*/
public static FlowExecutionEventType fromValue(String value) {
if (value == null || "".equals(value)) {
throw new IllegalArgumentException("Value cannot be null or empty!");
}
for (FlowExecutionEventType enumEntry : FlowExecutionEventType.values()) {
if (enumEntry.toString().equals(value)) {
return enumEntry;
}
}
throw new IllegalArgumentException("Cannot create enum from " + value + " value!");
}
}
| jentfoo/aws-sdk-java | aws-java-sdk-iotthingsgraph/src/main/java/com/amazonaws/services/iotthingsgraph/model/FlowExecutionEventType.java | Java | apache-2.0 | 2,594 |
package com.weygo.weygophone.pages.tabs.home.widget;
import android.content.Context;
import android.util.AttributeSet;
import com.weygo.weygophone.R;
import com.weygo.weygophone.pages.common.widget.WGCommonHorizontalListView;
/**
* Created by muma on 2017/6/4.
*/
public class WGHomeContentFloorClassifyColumnView extends WGCommonHorizontalListView {
public WGHomeContentFloorClassifyColumnView(Context context) {
super(context);
}
public WGHomeContentFloorClassifyColumnView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public WGHomeContentFloorClassifyColumnView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
@Override
protected int itemResId() {
return R.layout.wghome_content_floor_classify_column_item;
}
}
| mumabinggan/WeygoPhone | app/src/main/java/com/weygo/weygophone/pages/tabs/home/widget/WGHomeContentFloorClassifyColumnView.java | Java | apache-2.0 | 858 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.el.parser;
/**
* @author Jacob Hookom [jacob@hookom.net]
* @version $Change: 181177 $$DateTime: 2001/06/26 08:45:09 $$Author: markt $
*/
public interface NodeVisitor {
public void visit(Node node) throws Exception;
}
| asual/summer | modules/el/src/main/java/org/jboss/el/parser/NodeVisitor.java | Java | apache-2.0 | 801 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
using System.Reflection;
using Microsoft.Extensions.Internal;
namespace Microsoft.AspNetCore.SignalR
{
/// <summary>
/// Context for a Hub invocation.
/// </summary>
public class HubInvocationContext
{
internal ObjectMethodExecutor ObjectMethodExecutor { get; } = default!;
/// <summary>
/// Instantiates a new instance of the <see cref="HubInvocationContext"/> class.
/// </summary>
/// <param name="context">Context for the active Hub connection and caller.</param>
/// <param name="serviceProvider">The <see cref="IServiceProvider"/> specific to the scope of this Hub method invocation.</param>
/// <param name="hub">The instance of the Hub.</param>
/// <param name="hubMethod">The <see cref="MethodInfo"/> for the Hub method being invoked.</param>
/// <param name="hubMethodArguments">The arguments provided by the client.</param>
public HubInvocationContext(HubCallerContext context, IServiceProvider serviceProvider, Hub hub, MethodInfo hubMethod, IReadOnlyList<object?> hubMethodArguments)
{
Hub = hub;
ServiceProvider = serviceProvider;
HubMethod = hubMethod;
HubMethodArguments = hubMethodArguments;
Context = context;
}
internal HubInvocationContext(ObjectMethodExecutor objectMethodExecutor, HubCallerContext context, IServiceProvider serviceProvider, Hub hub, object?[] hubMethodArguments)
: this(context, serviceProvider, hub, objectMethodExecutor.MethodInfo, hubMethodArguments)
{
ObjectMethodExecutor = objectMethodExecutor;
}
/// <summary>
/// Gets the context for the active Hub connection and caller.
/// </summary>
public HubCallerContext Context { get; }
/// <summary>
/// Gets the Hub instance.
/// </summary>
public Hub Hub { get; }
/// <summary>
/// Gets the name of the Hub method being invoked.
/// </summary>
public string HubMethodName => HubMethod.Name;
/// <summary>
/// Gets the arguments provided by the client.
/// </summary>
public IReadOnlyList<object?> HubMethodArguments { get; }
/// <summary>
/// The <see cref="IServiceProvider"/> specific to the scope of this Hub method invocation.
/// </summary>
public IServiceProvider ServiceProvider { get; }
/// <summary>
/// The <see cref="MethodInfo"/> for the Hub method being invoked.
/// </summary>
public MethodInfo HubMethod { get; }
}
}
| aspnet/AspNetCore | src/SignalR/server/Core/src/HubInvocationContext.cs | C# | apache-2.0 | 2,838 |
package org.docksidestage.postgresql.dbflute.cbean.nss;
import org.docksidestage.postgresql.dbflute.cbean.cq.ProductCategoryCQ;
/**
* The nest select set-upper of product_category.
* @author DBFlute(AutoGenerator)
*/
public class ProductCategoryNss {
// ===================================================================================
// Attribute
// =========
protected final ProductCategoryCQ _query;
public ProductCategoryNss(ProductCategoryCQ query) { _query = query; }
public boolean hasConditionQuery() { return _query != null; }
// ===================================================================================
// Nested Relation
// ===============
/**
* With nested relation columns to select clause. <br>
* (商品カテゴリ)product_category by my parent_category_code, named 'productCategorySelf'.
* @return The set-upper of more nested relation. {...with[nested-relation].with[more-nested-relation]} (NotNull)
*/
public ProductCategoryNss withProductCategorySelf() {
_query.xdoNss(() -> _query.queryProductCategorySelf());
return new ProductCategoryNss(_query.queryProductCategorySelf());
}
}
| dbflute-test/dbflute-test-dbms-postgresql | src/main/java/org/docksidestage/postgresql/dbflute/cbean/nss/ProductCategoryNss.java | Java | apache-2.0 | 1,489 |
class CreatePets < ActiveRecord::Migration
def change
create_table :pets do |t|
t.string :name
t.string :breed
t.string :type
t.integer :age
t.integer :weight
t.string :last_appointment
t.timestamps null: false
end
end
end
| peterk99/clinic | db/migrate/20160302074132_create_pets.rb | Ruby | apache-2.0 | 278 |
/*
* Copyright 2014-2015 Objectos, Fábrica de Software LTDA.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package br.com.objectos.way.orm.compiler;
import java.util.Objects;
import javax.lang.model.element.Modifier;
import com.squareup.javapoet.ClassName;
import com.squareup.javapoet.MethodSpec;
/**
* @author marcio.endo@objectos.com.br (Marcio Endo)
*/
class CompanionTypeFactory implements CompanionTypeExe {
private final OrmInject inject;
private CompanionTypeFactory(OrmInject inject) {
this.inject = inject;
}
public static CompanionTypeFactory of(OrmPojoInfo pojoInfo) {
OrmInject inject = pojoInfo.inject();
return new CompanionTypeFactory(inject);
}
@Override
public CompanionType acceptCompanionType(CompanionType type) {
ClassName companionTypeClassName = type.className();
return type.addMethod(MethodSpec.methodBuilder("get")
.addModifiers(Modifier.PUBLIC, Modifier.STATIC)
.addParameter(inject.parameterSpec())
.returns(companionTypeClassName)
.addStatement("$T.requireNonNull($L)", Objects.class, inject.name())
.addStatement("return new $T($L)", companionTypeClassName, inject.name())
.build());
}
} | objectos/way | orm/orm-compiler/src/main/java/br/com/objectos/way/orm/compiler/CompanionTypeFactory.java | Java | apache-2.0 | 1,727 |
require 'nmea_plus/message/ais/vdm_payload/vdm_msg'
module NMEAPlus
module Message
module AIS
module VDMPayload
# AIS Type 24: Static Data Report
class VDMMsg24 < NMEAPlus::Message::AIS::VDMPayload::VDMMsg
payload_reader :part_number, 38, 2, :_u
# Override default bitstring setting to dynamically calculate what fields belong in this message
# which can be either part A or part B dpeending on the {#part_number} field
def payload_bitstring=(val)
super
case part_number
when 0
self.class.payload_reader :name, 40, 120, :_t
when 1
self.class.payload_reader :ship_cargo_type, 40, 8, :_e
self.class.payload_reader :vendor_id, 48, 18, :_t
self.class.payload_reader :model_code, 66, 4, :_u
self.class.payload_reader :serial_number, 70, 20, :_u
self.class.payload_reader :callsign, 90, 42, :_t
# If the MMSI is that of an auxiliary craft, these 30 bits are read as the MMSI of the mother ship.
# otherwise they are the vessel dimensions
if auxiliary_craft?
self.class.payload_reader :mothership_mmsi, 132, 30, :_u
else
self.class.payload_reader :ship_dimension_to_bow, 132, 9, :_u
self.class.payload_reader :ship_dimension_to_stern, 141, 9, :_u
self.class.payload_reader :ship_dimension_to_port, 150, 6, :_u
self.class.payload_reader :ship_dimension_to_starboard, 156, 6, :_u
end
end
end
# @!parse attr_reader :ship_cargo_type_description
# @return [String] Cargo type description
def ship_cargo_type_description
get_ship_cargo_type_description(ship_cargo_type)
end
end
end
end
end
end
| ifreecarve/nmea_plus | lib/nmea_plus/message/ais/vdm_payload/vdm_msg24.rb | Ruby | apache-2.0 | 1,933 |
/*
* Copyright 2015 Google Inc. All rights reserved.
*
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this
* file except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
* ANY KIND, either express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.google.maps;
import com.google.gson.FieldNamingPolicy;
import com.google.maps.errors.ApiException;
import com.google.maps.internal.ApiConfig;
import com.google.maps.internal.ApiResponse;
import com.google.maps.internal.StringJoin;
import com.google.maps.internal.StringJoin.UrlValue;
import com.google.maps.model.PlaceDetails;
/**
* A <a href="https://developers.google.com/places/web-service/details#PlaceDetailsRequests">Place
* Details</a> request.
*/
public class PlaceDetailsRequest
extends PendingResultBase<PlaceDetails, PlaceDetailsRequest, PlaceDetailsRequest.Response> {
static final ApiConfig API_CONFIG =
new ApiConfig("/maps/api/place/details/json")
.fieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES);
public PlaceDetailsRequest(GeoApiContext context) {
super(context, API_CONFIG, Response.class);
}
/**
* Specifies the Place ID to get Place Details for. Required.
*
* @param placeId The Place ID to retrieve details for.
* @return Returns this {@code PlaceDetailsRequest} for call chaining.
*/
public PlaceDetailsRequest placeId(String placeId) {
return param("placeid", placeId);
}
/**
* Sets the SessionToken for this request. Use this for Place Details requests that are called
* following an autocomplete request in the same user session. Optional.
*
* @param sessionToken Session Token is the session identifier.
* @return Returns this {@code PlaceDetailsRequest} for call chaining.
*/
public PlaceDetailsRequest sessionToken(PlaceAutocompleteRequest.SessionToken sessionToken) {
return param("sessiontoken", sessionToken);
}
/**
* Sets the Region for this request. The region code, specified as a ccTLD (country code top-level
* domain) two-character value. Most ccTLD codes are identical to ISO 3166-1 codes, with some
* exceptions. This parameter will only influence, not fully restrict, results.
*
* @param region The region code.
* @return Returns this {@code PlaceDetailsRequest} for call chaining.
*/
public PlaceDetailsRequest region(String region) {
return param("region", region);
}
/**
* Specifies the field masks of the details to be returned by PlaceDetails.
*
* @param fields The Field Masks of the fields to return.
* @return Returns this {@code PlaceDetailsRequest} for call chaining.
*/
public PlaceDetailsRequest fields(FieldMask... fields) {
return param("fields", StringJoin.join(',', fields));
}
@Override
protected void validateRequest() {
if (!params().containsKey("placeid")) {
throw new IllegalArgumentException("Request must contain 'placeId'.");
}
}
public static class Response implements ApiResponse<PlaceDetails> {
public String status;
public PlaceDetails result;
public String[] htmlAttributions;
public String errorMessage;
@Override
public boolean successful() {
return "OK".equals(status) || "ZERO_RESULTS".equals(status);
}
@Override
public PlaceDetails getResult() {
if (result != null) {
result.htmlAttributions = htmlAttributions;
}
return result;
}
@Override
public ApiException getError() {
if (successful()) {
return null;
}
return ApiException.from(status, errorMessage);
}
}
public enum FieldMask implements UrlValue {
ADDRESS_COMPONENT("address_component"),
ADR_ADDRESS("adr_address"),
@Deprecated
ALT_ID("alt_id"),
BUSINESS_STATUS("business_status"),
FORMATTED_ADDRESS("formatted_address"),
FORMATTED_PHONE_NUMBER("formatted_phone_number"),
GEOMETRY("geometry"),
GEOMETRY_LOCATION("geometry/location"),
GEOMETRY_LOCATION_LAT("geometry/location/lat"),
GEOMETRY_LOCATION_LNG("geometry/location/lng"),
GEOMETRY_VIEWPORT("geometry/viewport"),
GEOMETRY_VIEWPORT_NORTHEAST("geometry/viewport/northeast"),
GEOMETRY_VIEWPORT_NORTHEAST_LAT("geometry/viewport/northeast/lat"),
GEOMETRY_VIEWPORT_NORTHEAST_LNG("geometry/viewport/northeast/lng"),
GEOMETRY_VIEWPORT_SOUTHWEST("geometry/viewport/southwest"),
GEOMETRY_VIEWPORT_SOUTHWEST_LAT("geometry/viewport/southwest/lat"),
GEOMETRY_VIEWPORT_SOUTHWEST_LNG("geometry/viewport/southwest/lng"),
ICON("icon"),
@Deprecated
ID("id"),
INTERNATIONAL_PHONE_NUMBER("international_phone_number"),
NAME("name"),
OPENING_HOURS("opening_hours"),
@Deprecated
PERMANENTLY_CLOSED("permanently_closed"),
USER_RATINGS_TOTAL("user_ratings_total"),
PHOTOS("photos"),
PLACE_ID("place_id"),
PLUS_CODE("plus_code"),
PRICE_LEVEL("price_level"),
RATING("rating"),
@Deprecated
REFERENCE("reference"),
REVIEW("review"),
@Deprecated
SCOPE("scope"),
TYPES("types"),
URL("url"),
UTC_OFFSET("utc_offset"),
VICINITY("vicinity"),
WEBSITE("website");
private final String field;
FieldMask(final String field) {
this.field = field;
}
@Override
public String toUrlValue() {
return field;
}
}
}
| googlemaps/google-maps-services-java | src/main/java/com/google/maps/PlaceDetailsRequest.java | Java | apache-2.0 | 5,703 |
using System;
namespace Example1a.Models
{
public interface IReportRepository
{
Report GetReport(DateTime date);
}
}
| jskeet/DemoCode | Diagnostics/Example1a/Models/IReportRepository.cs | C# | apache-2.0 | 141 |
package com.davidflex.supermarket.agents.shop;
import com.davidflex.supermarket.agents.behaviours.shop_agent.ListenEmployeesBehaviour;
import com.davidflex.supermarket.agents.behaviours.shop_agent.ListenNewOrdersBehaviour;
import com.davidflex.supermarket.agents.behaviours.shop_agent.ShowDataBehaviour;
import com.davidflex.supermarket.agents.utils.DFUtils;
import com.davidflex.supermarket.agents.utils.JadeUtils;
import com.davidflex.supermarket.ontologies.company.CompanyOntolagy;
import com.davidflex.supermarket.ontologies.company.CompanyOntolagyVocabulary;
import com.davidflex.supermarket.ontologies.company.concepts.Warehouse;
import com.davidflex.supermarket.ontologies.ecommerce.ECommerceOntologyVocabulary;
import com.davidflex.supermarket.ontologies.ecommerce.concepts.Location;
import com.davidflex.supermarket.ontologies.shop.ShopOntology;
import com.davidflex.supermarket.ontologies.shop.ShopOntologyVocabulary;
import jade.content.lang.Codec;
import jade.content.lang.sl.SLCodec;
import jade.content.onto.BeanOntologyException;
import jade.content.onto.Ontology;
import jade.core.AID;
import jade.core.Agent;
import jade.domain.FIPAException;
import jade.wrapper.ContainerController;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
public class ShopAgent extends Agent {
private static final Logger logger = LoggerFactory.getLogger(ShopAgent.class);
private Codec codec;
private Ontology ontology;
private ContainerController container;
private List<Warehouse> warehouses;
private AtomicLong orderIDs;
private Map<Long, Location> activeOrders;
private Map<AID, Location> drones;
public ShopAgent() {
codec = new SLCodec(0); // fipa-sl0
try {
ontology = CompanyOntolagy.getInstance();
} catch (BeanOntologyException e) {
logger.error("Ontology error!", e);
doDelete();
}
container = JadeUtils.createContainer("personalShopAgents");
warehouses = new ArrayList<>();
orderIDs = new AtomicLong();
activeOrders = new HashMap<>();
drones = new HashMap<>();
}
@Override
protected void setup() {
// Setup content manager
getContentManager().registerLanguage(codec);
getContentManager().registerOntology(ontology, ShopOntology.ONTOLOGY_NAME);
getContentManager().registerOntology(ontology, CompanyOntolagyVocabulary.ONTOLOGY_NAME);
// Register in DF
try {
DFUtils.registerInDF(this, ECommerceOntologyVocabulary.SHOP_NAME,
ECommerceOntologyVocabulary.SHOP_TYPE);
} catch (FIPAException e) {
logger.error("Error at registering in DF", e);
doDelete();
}
// Add behaviours
addBehaviour(new ListenNewOrdersBehaviour(this));
addBehaviour(new ListenEmployeesBehaviour(this));
addBehaviour(new ShowDataBehaviour(this));
}
@Override
protected void takeDown() {
try {
DFUtils.deregisterFromDF(this);
} catch (FIPAException e) {
logger.error("Error at deregistering in DF", e);
}
}
public Codec getCodec() {
return codec;
}
public String getShopOntologyName() {
return ShopOntologyVocabulary.ONTOLOGY_NAME;
}
public Ontology getCompanyOntology() {
return ontology;
}
public ContainerController getContainer() {
return container;
}
/**
* Register a new order.
*
* @param location customer location
* @return orderID
*/
public long addNewOrder(Location location) {
long num = orderIDs.incrementAndGet();
activeOrders.put(num, location);
return num;
}
public Map<Long, Location> getActiveOrders() {
return activeOrders;
}
public void registerWarehouse(Warehouse warehouse) {
warehouses.add(warehouse);
}
public List<Warehouse> getWarehouses() {
return warehouses;
}
public void setDronePosition(AID drone, Location position){
drones.put(drone, position);
}
public void unregisterDrone(AID drone) {
drones.remove(drone);
}
public Map<AID, Location> getDrones() {
return drones;
}
} | davidmigloz/supermarket-agent-system | src/main/java/com/davidflex/supermarket/agents/shop/ShopAgent.java | Java | apache-2.0 | 4,443 |
package org.feldspaten.hyperion.html;
import java.util.LinkedList;
import java.util.List;
public class Page extends Html {
private String title = "";
/** If > 0, autorefresh is enabled with the given interval */
private int autoRefreshDelay = 0;
/** Meta fields */
private List<String> metas = new LinkedList<>();
/** Stylesheet file */
private String stylesheet = null;
@Override
protected String generateHeader() {
final StringBuffer buffer = new StringBuffer();
buffer.append("<!DOCTYPE html>");
buffer.append("\n");
buffer.append("<html><head>");
buffer.append("\n");
if (!title.isEmpty()) {
buffer.append("<title>");
buffer.append(title);
buffer.append("</title>\n");
}
buffer.append("\n");
if (isAutoRefreshEnabled()) {
buffer.append("<meta http-equiv=\"refresh\" content=\""
+ autoRefreshDelay + "\">\n");
}
// Add additional metas
for (final String meta : metas) {
buffer.append("<meta " + meta + " />\n");
}
// Stylesheet, if applicable
if (stylesheet != null)
buffer.append("<link rel=\"stylesheet\" type=\"text/css\" href=\""
+ stylesheet + "\">\n");
buffer.append("</head>\n");
buffer.append("<body>");
return buffer.toString();
}
@Override
protected String generateFooter() {
return "</body>";
}
public void setStylesheetFile(final String url) {
this.stylesheet = url;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public int getAutoRefreshDelay() {
return autoRefreshDelay;
}
public void setAutoRefreshDelay(int autoRefreshDelay) {
this.autoRefreshDelay = autoRefreshDelay;
}
public boolean isAutoRefreshEnabled() {
return this.autoRefreshDelay > 0;
}
/**
* Add a raw meta field The meta tag is added automatically, so you don't
* need it here
*
* @param meta
* to be added
*/
public void addMeta(final String meta) {
if (meta == null || meta.trim().isEmpty())
return;
this.metas.add(meta);
}
}
| grisu48/Hyperion | org.feldspaten.hyperion/src/org/feldspaten/hyperion/html/Page.java | Java | apache-2.0 | 2,022 |
/**
* @license
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @fileoverview This file contains additional helper definitions on top of the
* Google Closure Library's logging subsystem (see
* <http://google.github.io/closure-library/api/namespace_goog_log.html>).
*
* Aside from providing several helper functions, this file, when executed, sets
* up the logging subsystem parameters:
*
* * The logging level of the root logger is set up according to whether or not
* the compilation is performed in a debug mode (see
* <http://google.github.io/closure-library/api/namespace_goog.html#DEBUG>).
* * Log messages that bubbled till the root logger are emitted to the
* JavaScript Console.
* * Log messages are set up to be kept (probably, truncated) in a background
* page's log buffer, which allows to export them later.
*/
goog.provide('GoogleSmartCard.Logging');
goog.require('GoogleSmartCard.LogBuffer');
goog.require('GoogleSmartCard.Logging.CrashLoopDetection');
goog.require('goog.array');
goog.require('goog.asserts');
goog.require('goog.debug');
goog.require('goog.debug.Console');
goog.require('goog.log');
goog.require('goog.log.Level');
goog.require('goog.log.Logger');
goog.require('goog.object');
goog.scope(function() {
const GSC = GoogleSmartCard;
/**
* @define {boolean} Whether to make every logger created via this library a
* child of the |LOGGER_SCOPE|.
* Overriding it to false allows to reduce the boilerplate printed in every
* logged message; the default true value, on the other hand, allows to avoid
* clashes in case the extension creates and manages its own Closure Library
* loggers.
*/
GSC.Logging.USE_SCOPED_LOGGERS =
goog.define('GoogleSmartCard.Logging.USE_SCOPED_LOGGERS', true);
/**
* @define {boolean} Whether to trigger the extension reload in case a fatal
* error occurs in Release mode.
*/
GSC.Logging.SELF_RELOAD_ON_FATAL_ERROR =
goog.define('GoogleSmartCard.Logging.SELF_RELOAD_ON_FATAL_ERROR', false);
/**
* Every logger created via this library is created as a child of this logger,
* as long as the |USE_SCOPED_LOGGERS| constant is true. Ignored when that
* constant is false.
*/
const LOGGER_SCOPE = 'GoogleSmartCard';
/**
* The logging level that will be applied to the root logger (and therefore
* would be effective for all loggers unless the ones that have an explicitly
* set level).
*/
const ROOT_LOGGER_LEVEL =
goog.DEBUG ? goog.log.Level.FINE : goog.log.Level.INFO;
/**
* The capacity of the buffer that stores the emitted log messages.
*
* When the number of log messages exceeds this capacity, the messages from the
* middle will be removed (so only some first and some last messages will be
* kept at any given moment of time).
*/
const LOG_BUFFER_CAPACITY = goog.DEBUG ? 20 * 1000 : 2000;
/**
* This constant specifies the name of the special window attribute in which our
* log buffer is stored. This is used so that popup windows and other pages can
* access the background page's log buffer and therefore use a centralized place
* for aggregating logs.
*/
const GLOBAL_LOG_BUFFER_VARIABLE_NAME = 'googleSmartCard_logBuffer';
/**
* @type {!goog.log.Logger}
*/
const rootLogger =
goog.asserts.assert(goog.log.getLogger(goog.log.ROOT_LOGGER_NAME));
/**
* @type {!goog.log.Logger}
*/
const logger = GSC.Logging.USE_SCOPED_LOGGERS ?
goog.asserts.assert(goog.log.getLogger(LOGGER_SCOPE)) :
rootLogger;
/** @type {boolean} */
let wasLoggingSetUp = false;
/**
* The log buffer that aggregates all log messages, to let them be exported if
* the user requests so. This variable is initialized to a new `LogBuffer`
* instance, but if we're running outside the background page this variable is
* later reassigned to the background page's log buffer.
* @type {!GSC.LogBuffer}
*/
let logBuffer = new GSC.LogBuffer(LOG_BUFFER_CAPACITY);
/**
* Sets up logging parameters and log buffering.
*
* This function is called automatically when this library file is included.
*/
GSC.Logging.setupLogging = function() {
if (wasLoggingSetUp)
return;
wasLoggingSetUp = true;
setupConsoleLogging();
setupRootLoggerLevel();
goog.log.fine(
logger,
'Logging was set up with level=' + ROOT_LOGGER_LEVEL.name +
' and enabled logging to JS console');
setupLogBuffer();
};
/**
* Returns a logger.
* @param {string} name
* @param {!goog.log.Level=} opt_level
* @return {!goog.log.Logger}
*/
GSC.Logging.getLogger = function(name, opt_level) {
const logger = goog.log.getLogger(name, opt_level);
GSC.Logging.check(logger);
goog.asserts.assert(logger);
return logger;
};
/**
* Returns a library-scoped logger.
* @param {string} name
* @param {!goog.log.Level=} opt_level
* @return {!goog.log.Logger}
*/
GSC.Logging.getScopedLogger = function(name, opt_level) {
let fullName;
if (GSC.Logging.USE_SCOPED_LOGGERS && name)
fullName = `${LOGGER_SCOPE}.${name}`;
else if (GSC.Logging.USE_SCOPED_LOGGERS)
fullName = LOGGER_SCOPE;
else
fullName = name;
return GSC.Logging.getLogger(fullName, opt_level);
};
/**
* Returns the logger with the specified name relative to the specified parent
* logger.
* @param {!goog.log.Logger} parentLogger
* @param {string} relativeName
* @param {!goog.log.Level=} opt_level
* @return {!goog.log.Logger}
*/
GSC.Logging.getChildLogger = function(parentLogger, relativeName, opt_level) {
return GSC.Logging.getLogger(parentLogger.getName() + '.' + relativeName);
};
/**
* Changes the logger level so that the logger is not more verbose than the
* specified level.
* @param {!goog.log.Logger} logger
* @param {!goog.log.Level} boundaryLevel
*/
GSC.Logging.setLoggerVerbosityAtMost = function(logger, boundaryLevel) {
const effectiveLevel = goog.log.getEffectiveLevel(logger);
if (!effectiveLevel || effectiveLevel.value < boundaryLevel.value)
goog.log.setLevel(logger, boundaryLevel);
};
/**
* Checks if the condition evaluates to true.
*
* In contrast to goog.asserts.assert method, this method works in non-Debug
* builds too.
* @template T
* @param {T} condition The condition to check.
* @param {string=} opt_message Error message in case of failure.
*/
GSC.Logging.check = function(condition, opt_message) {
if (!condition)
GSC.Logging.fail(opt_message);
};
/**
* The same as GSC.Logging.check, but the message is prefixed with the logger
* title.
* @template T
* @param {!goog.log.Logger} logger The logger which name is to be prepended
* to the error message.
* @param {T} condition The condition to check.
* @param {string=} opt_message Error message in case of failure.
*/
GSC.Logging.checkWithLogger = function(logger, condition, opt_message) {
if (!condition)
GSC.Logging.failWithLogger(logger, opt_message);
};
/**
* Throws an exception and emits severe log with the specified message.
*
* In the release mode, this additionally asynchronously initiates the App
* reload, unless a crash-and-reload loop is detected.
* @param {string=} opt_message Error message in case of failure.
*/
GSC.Logging.fail = function(opt_message) {
const message = opt_message ? opt_message : 'Failure';
goog.log.error(rootLogger, message);
scheduleAppReloadIfAllowed();
throw new Error(message);
};
/**
* Same as GSC.Logging.fail, but the message is prefixed with the logger title.
* @param {!goog.log.Logger} logger The logger which name is to be prepended
* to the error message.
* @param {string=} opt_message Error message in case of failure.
*/
GSC.Logging.failWithLogger = function(logger, opt_message) {
const messagePrefix = 'Failure in ' + logger.getName();
if (opt_message !== undefined) {
const transformedMessage = messagePrefix + ': ' + opt_message;
GSC.Logging.fail(transformedMessage);
} else {
GSC.Logging.fail(messagePrefix);
}
};
/**
* Returns the log buffer instance.
*
* The log buffer instance was either created during this script execution, or
* was reused from the background page's global attribute.
* @return {!GSC.LogBuffer}
*/
GSC.Logging.getLogBuffer = function() {
return logBuffer;
};
function scheduleAppReloadIfAllowed() {
if (goog.DEBUG || !GSC.Logging.SELF_RELOAD_ON_FATAL_ERROR)
return;
GSC.Logging.CrashLoopDetection.handleImminentCrash()
.then(function(isInCrashLoop) {
if (isInCrashLoop) {
goog.log.info(
rootLogger,
'Crash loop detected. The application is defunct, but the ' +
'execution state is kept in order to retain the failure logs.');
return;
}
goog.log.info(
rootLogger, 'Reloading the application due to the fatal error...');
reloadApp();
})
.catch(function() {
// Don't do anything for repeated crashes within a single run.
});
}
function reloadApp() {
// This method works only in non-kiosk mode. Since this is a much more common
// case and as this function doesn't generate errors in any case, this method
// is called first.
chrome.runtime.reload();
// This method works only in kiosk mode.
chrome.runtime.restart();
}
function setupConsoleLogging() {
const console = new goog.debug.Console;
const formatter = console.getFormatter();
formatter.showAbsoluteTime = true;
formatter.showRelativeTime = false;
console.setCapturing(true);
}
function setupRootLoggerLevel() {
goog.log.setLevel(rootLogger, ROOT_LOGGER_LEVEL);
}
function setupLogBuffer() {
GSC.LogBuffer.attachBufferToLogger(
logBuffer, rootLogger, document.location.href);
if (!chrome || !chrome.runtime || !chrome.runtime.getBackgroundPage) {
// We don't know whether we're running inside the background page and
// the API for talking to it is unavailable - therefore no action needed,
// i.e., our page will continue using our log buffer. This should only
// happen in tests or if this code is running outside an app/extension.
return;
}
// Expose our log buffer in the global window properties. Pages other than the
// background will use it to access the background page's log buffer - see the
// code directly below.
goog.global[GLOBAL_LOG_BUFFER_VARIABLE_NAME] = logBuffer;
chrome.runtime.getBackgroundPage(function(backgroundPage) {
GSC.Logging.check(backgroundPage);
goog.asserts.assert(backgroundPage);
if (backgroundPage === window) {
// We're running inside the background page - no action needed.
return;
}
// We've discovered we're running outside the background page - so need to
// switch to using the background page's log buffer in order to keep all
// logs aggregated and available in a single place.
// First, obtain a reference to the background page's log buffer.
const backgroundLogBuffer =
/** @type {GSC.LogBuffer} */ (
backgroundPage[GLOBAL_LOG_BUFFER_VARIABLE_NAME]);
GSC.Logging.check(backgroundLogBuffer);
goog.asserts.assert(backgroundLogBuffer);
// Copy the logs we've accumulated in the current page into the background
// page's log buffer.
logBuffer.copyToOtherBuffer(backgroundLogBuffer);
// From now, start using the background page's buffer for collecting data
// from our page's loggers. Dispose of our log buffer to avoid storing new
// logs twice.
GSC.LogBuffer.attachBufferToLogger(
backgroundLogBuffer, rootLogger, document.location.href);
logBuffer.dispose();
// Switch our reference to the background page's log buffer.
logBuffer = backgroundLogBuffer;
// The global reference is not needed if we're not the background page.
delete goog.global[GLOBAL_LOG_BUFFER_VARIABLE_NAME];
});
}
GSC.Logging.setupLogging();
}); // goog.scope
| GoogleChromeLabs/chromeos_smart_card_connector | common/js/src/logging/logging.js | JavaScript | apache-2.0 | 12,352 |