code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
/** * Copyright 2008 - 2015 The Loon Game Engine Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * * @project loon * @author cping * @email:javachenpeng@yahoo.com * @version 0.5 */ package loon.gwtref.client; @SuppressWarnings("rawtypes") public class Parameter { final String name; final CachedTypeLookup type; final String jnsi; Parameter(String name, Class type, String jnsi) { this.name = name; this.type = new CachedTypeLookup(type); this.jnsi = jnsi; } public String getName() { return name; } public Type getType() { return type.getType(); } public Class getClazz() { return type.clazz; } public String getJnsi() { return jnsi; } @Override public String toString() { return "Parameter [name=" + name + ", type=" + type + ", jnsi=" + jnsi + "]"; } }
cping/LGame
Java/Loon-Neo-GWT/src/loon/gwtref/client/Parameter.java
Java
apache-2.0
1,326
//===--- CompletionInstance.cpp -------------------------------------------===// // // This source file is part of the Swift.org open source project // // Copyright (c) 2014 - 2019 Apple Inc. and the Swift project authors // Licensed under Apache License v2.0 with Runtime Library Exception // // See https://swift.org/LICENSE.txt for license information // See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors // //===----------------------------------------------------------------------===// #include "swift/IDE/CompletionInstance.h" #include "swift/AST/ASTContext.h" #include "swift/AST/DiagnosticEngine.h" #include "swift/AST/DiagnosticsFrontend.h" #include "swift/AST/Module.h" #include "swift/AST/PrettyStackTrace.h" #include "swift/AST/SourceFile.h" #include "swift/Basic/LangOptions.h" #include "swift/Basic/PrettyStackTrace.h" #include "swift/Basic/SourceManager.h" #include "swift/ClangImporter/ClangModule.h" #include "swift/Driver/FrontendUtil.h" #include "swift/Frontend/Frontend.h" #include "swift/Parse/Lexer.h" #include "swift/Parse/PersistentParserState.h" #include "swift/Serialization/SerializedModuleLoader.h" #include "swift/Subsystems.h" #include "clang/AST/ASTContext.h" #include "llvm/ADT/Hashing.h" #include "llvm/Support/MemoryBuffer.h" using namespace swift; using namespace ide; std::unique_ptr<llvm::MemoryBuffer> swift::ide::makeCodeCompletionMemoryBuffer(const llvm::MemoryBuffer *origBuf, unsigned &Offset, StringRef bufferIdentifier) { auto origBuffSize = origBuf->getBufferSize(); if (Offset > origBuffSize) Offset = origBuffSize; auto newBuffer = llvm::WritableMemoryBuffer::getNewUninitMemBuffer( origBuffSize + 1, bufferIdentifier); auto *pos = origBuf->getBufferStart() + Offset; auto *newPos = std::copy(origBuf->getBufferStart(), pos, newBuffer->getBufferStart()); *newPos = '\0'; std::copy(pos, origBuf->getBufferEnd(), newPos + 1); return std::unique_ptr<llvm::MemoryBuffer>(newBuffer.release()); } namespace { /// Returns index number of \p D in \p Decls . If it's not found, returns ~0. template <typename Range> unsigned findIndexInRange(Decl *D, const Range &Decls) { unsigned N = 0; for (auto I = Decls.begin(), E = Decls.end(); I != E; ++I) { if ((*I)->isImplicit()) continue; if (*I == D) return N; ++N; } return ~0U; } /// Return the element at \p N in \p Decls . template <typename Range> Decl *getElementAt(const Range &Decls, unsigned N) { for (auto I = Decls.begin(), E = Decls.end(); I != E; ++I) { if ((*I)->isImplicit()) continue; if (N == 0) return *I; --N; } return nullptr; } /// Find the equivalent \c DeclContext with \p DC from \p SF AST. /// This assumes the AST which contains \p DC has exact the same structure with /// \p SF. static DeclContext *getEquivalentDeclContextFromSourceFile(DeclContext *DC, SourceFile *SF) { PrettyStackTraceDeclContext trace("getting equivalent decl context for", DC); auto *newDC = DC; // NOTE: Shortcut for DC->getParentSourceFile() == SF case is not needed // because they should be always different. // Get the index path in the current AST. SmallVector<unsigned, 4> IndexStack; do { auto *D = newDC->getAsDecl(); if (!D) return nullptr; auto *parentDC = newDC->getParent(); unsigned N = ~0U; if (auto accessor = dyn_cast<AccessorDecl>(D)) { // The AST for accessors is like: // DeclContext -> AbstractStorageDecl -> AccessorDecl // We need to push the index of the accessor within the accessor list // of the storage. auto *storage = accessor->getStorage(); if (!storage) return nullptr; auto accessorN = findIndexInRange(accessor, storage->getAllAccessors()); IndexStack.push_back(accessorN); D = storage; } if (auto parentSF = dyn_cast<SourceFile>(parentDC)) { N = findIndexInRange(D, parentSF->getTopLevelDecls()); } else if (auto parentIDC = dyn_cast_or_null<IterableDeclContext>( parentDC->getAsDecl())) { N = findIndexInRange(D, parentIDC->getMembers()); } else { #ifndef NDEBUG llvm_unreachable("invalid DC kind for finding equivalent DC (indexpath)"); #endif return nullptr; } // Not found in the decl context tree. if (N == ~0U) { return nullptr; } IndexStack.push_back(N); newDC = parentDC; } while (!newDC->isModuleScopeContext()); assert(isa<SourceFile>(newDC) && "DC should be in a SourceFile"); // Query the equivalent decl context from the base SourceFile using the index // path. newDC = SF; do { auto N = IndexStack.pop_back_val(); Decl *D = nullptr; if (auto parentSF = dyn_cast<SourceFile>(newDC)) D = getElementAt(parentSF->getTopLevelDecls(), N); else if (auto parentIDC = dyn_cast<IterableDeclContext>(newDC->getAsDecl())) D = getElementAt(parentIDC->getMembers(), N); else llvm_unreachable("invalid DC kind for finding equivalent DC (query)"); if (auto storage = dyn_cast_or_null<AbstractStorageDecl>(D)) { if (IndexStack.empty()) return nullptr; auto accessorN = IndexStack.pop_back_val(); D = getElementAt(storage->getAllAccessors(), accessorN); } newDC = dyn_cast_or_null<DeclContext>(D); if (!newDC) return nullptr; } while (!IndexStack.empty()); assert(newDC->getContextKind() == DC->getContextKind()); return newDC; } /// For each dependency file in \p CI, run \p callback until the callback /// returns \c true. Returns \c true if any callback call returns \c true, \c /// false otherwise. static bool forEachDependencyUntilTrue(CompilerInstance &CI, unsigned excludeBufferID, llvm::function_ref<bool(StringRef)> callback) { // Check files in the current module. for (FileUnit *file : CI.getMainModule()->getFiles()) { StringRef filename; if (auto SF = dyn_cast<SourceFile>(file)) { if (SF->getBufferID() == excludeBufferID) continue; filename = SF->getFilename(); } else if (auto LF = dyn_cast<LoadedFile>(file)) filename = LF->getFilename(); else continue; // Ignore synthesized files. if (filename.empty() || filename.front() == '<') continue; if (callback(filename)) return true; } // Check other non-system depenencies (e.g. modules, headers). for (auto &dep : CI.getDependencyTracker()->getDependencies()) { if (callback(dep)) return true; } for (auto &dep : CI.getDependencyTracker()->getIncrementalDependencies()) { if (callback(dep)) return true; } return false; } /// Collect hash codes of the dependencies into \c Map. static void cacheDependencyHashIfNeeded(CompilerInstance &CI, unsigned excludeBufferID, llvm::StringMap<llvm::hash_code> &Map) { auto &FS = CI.getFileSystem(); forEachDependencyUntilTrue( CI, excludeBufferID, [&](StringRef filename) { if (Map.count(filename)) return false; auto stat = FS.status(filename); if (!stat) return false; // We will check the hash only if the modification time of the dependecy // is zero. See 'areAnyDependentFilesInvalidated() below'. if (stat->getLastModificationTime() != llvm::sys::TimePoint<>()) return false; auto buf = FS.getBufferForFile(filename); Map[filename] = llvm::hash_value(buf.get()->getBuffer()); return false; }); } /// Check if any dependent files are modified since \p timestamp. static bool areAnyDependentFilesInvalidated( CompilerInstance &CI, llvm::vfs::FileSystem &FS, unsigned excludeBufferID, llvm::sys::TimePoint<> timestamp, llvm::StringMap<llvm::hash_code> &Map) { return forEachDependencyUntilTrue( CI, excludeBufferID, [&](StringRef filePath) { auto stat = FS.status(filePath); if (!stat) // Missing. return true; auto lastModTime = stat->getLastModificationTime(); if (lastModTime > timestamp) // Modified. return true; // If the last modification time is zero, this file is probably from a // virtual file system. We need to check the content. if (lastModTime == llvm::sys::TimePoint<>()) { // Get the hash code of the last content. auto oldHashEntry = Map.find(filePath); if (oldHashEntry == Map.end()) // Unreachable? Not virtual in old filesystem, but virtual in new // one. return true; auto oldHash = oldHashEntry->second; // Calculate the hash code of the current content. auto newContent = FS.getBufferForFile(filePath); if (!newContent) // Unreachable? stat succeeded, but coundn't get the content. return true; auto newHash = llvm::hash_value(newContent.get()->getBuffer()); if (oldHash != newHash) return true; } return false; }); } /// Get interface hash of \p SF including the type members in the file. /// /// See if the inteface of the function and types visible from a function body /// has changed since the last completion. If they haven't changed, completion /// can reuse the existing AST of the source file. \c SF->getInterfaceHash() is /// not enough because it doesn't take the interface of the type members into /// account. For example: /// /// struct S { /// func foo() {} /// } /// func main(val: S) { /// val.<HERE> /// } /// /// In this case, we need to ensure that the interface of \c S hasn't changed. /// Note that we don't care about local types (i.e. type declarations inside /// function bodies, closures, or top level statement bodies) because they are /// not visible from other functions where the completion is happening. void getInterfaceHashIncludingTypeMembers(SourceFile *SF, llvm::SmallString<32> &str) { /// FIXME: Gross. Hashing multiple "hash" values. llvm::MD5 hash; SF->getInterfaceHash(str); hash.update(str); std::function<void(IterableDeclContext *)> hashTypeBodyFingerprints = [&](IterableDeclContext *IDC) { if (auto fp = IDC->getBodyFingerprint()) hash.update(*fp); for (auto *member : IDC->getParsedMembers()) if (auto *childIDC = dyn_cast<IterableDeclContext>(member)) hashTypeBodyFingerprints(childIDC); }; for (auto *D : SF->getTopLevelDecls()) { if (auto IDC = dyn_cast<IterableDeclContext>(D)) hashTypeBodyFingerprints(IDC); } llvm::MD5::MD5Result result; hash.final(result); str = result.digest(); } } // namespace bool CompletionInstance::performCachedOperationIfPossible( llvm::hash_code ArgsHash, llvm::IntrusiveRefCntPtr<llvm::vfs::FileSystem> FileSystem, llvm::MemoryBuffer *completionBuffer, unsigned int Offset, DiagnosticConsumer *DiagC, llvm::function_ref<void(CompilerInstance &, bool)> Callback) { llvm::PrettyStackTraceString trace( "While performing cached completion if possible"); if (!CachedCI) return false; if (CachedReuseCount >= Opts.MaxASTReuseCount) return false; if (CachedArgHash != ArgsHash) return false; auto &CI = *CachedCI; auto *oldSF = CI.getCodeCompletionFile(); assert(oldSF->getBufferID()); auto *oldState = oldSF->getDelayedParserState(); assert(oldState->hasCodeCompletionDelayedDeclState()); auto &oldInfo = oldState->getCodeCompletionDelayedDeclState(); auto &SM = CI.getSourceMgr(); auto bufferName = completionBuffer->getBufferIdentifier(); if (SM.getIdentifierForBuffer(*oldSF->getBufferID()) != bufferName) return false; if (shouldCheckDependencies()) { if (areAnyDependentFilesInvalidated( CI, *FileSystem, *oldSF->getBufferID(), DependencyCheckedTimestamp, InMemoryDependencyHash)) return false; DependencyCheckedTimestamp = std::chrono::system_clock::now(); } // Parse the new buffer into temporary SourceFile. SourceManager tmpSM; auto tmpBufferID = tmpSM.addMemBufferCopy(completionBuffer); tmpSM.setCodeCompletionPoint(tmpBufferID, Offset); LangOptions langOpts = CI.getASTContext().LangOpts; langOpts.DisableParserLookup = true; TypeCheckerOptions typeckOpts = CI.getASTContext().TypeCheckerOpts; SearchPathOptions searchPathOpts = CI.getASTContext().SearchPathOpts; DiagnosticEngine tmpDiags(tmpSM); ClangImporterOptions clangOpts; std::unique_ptr<ASTContext> tmpCtx( ASTContext::get(langOpts, typeckOpts, searchPathOpts, clangOpts, tmpSM, tmpDiags)); registerParseRequestFunctions(tmpCtx->evaluator); registerIDERequestFunctions(tmpCtx->evaluator); registerTypeCheckerRequestFunctions(tmpCtx->evaluator); registerSILGenRequestFunctions(tmpCtx->evaluator); ModuleDecl *tmpM = ModuleDecl::create(Identifier(), *tmpCtx); SourceFile *tmpSF = new (*tmpCtx) SourceFile(*tmpM, oldSF->Kind, tmpBufferID, oldSF->getParsingOptions()); // FIXME: Since we don't setup module loaders on the temporary AST context, // 'canImport()' conditional compilation directive always fails. That causes // interface hash change and prevents fast-completion. // Parse and get the completion context. auto *newState = tmpSF->getDelayedParserState(); // Couldn't find any completion token? if (!newState->hasCodeCompletionDelayedDeclState()) return false; auto &newInfo = newState->getCodeCompletionDelayedDeclState(); unsigned newBufferID; DeclContext *traceDC = nullptr; switch (newInfo.Kind) { case CodeCompletionDelayedDeclKind::FunctionBody: { // If the interface has changed, AST must be refreshed. llvm::SmallString<32> oldInterfaceHash{}; llvm::SmallString<32> newInterfaceHash{}; getInterfaceHashIncludingTypeMembers(oldSF, oldInterfaceHash); getInterfaceHashIncludingTypeMembers(tmpSF, newInterfaceHash); if (oldInterfaceHash != newInterfaceHash) return false; DeclContext *DC = getEquivalentDeclContextFromSourceFile(newInfo.ParentContext, oldSF); if (!DC || !isa<AbstractFunctionDecl>(DC)) return false; // OK, we can perform fast completion for this. Update the orignal delayed // decl state. // Fast completion keeps the buffer in memory for multiple completions. // To reduce the consumption, slice the source buffer so it only holds // the portion that is needed for the second pass. auto startOffset = newInfo.StartOffset; if (newInfo.PrevOffset != ~0u) startOffset = newInfo.PrevOffset; auto startLoc = tmpSM.getLocForOffset(tmpBufferID, startOffset); startLoc = Lexer::getLocForStartOfLine(tmpSM, startLoc); startOffset = tmpSM.getLocOffsetInBuffer(startLoc, tmpBufferID); auto endOffset = newInfo.EndOffset; auto endLoc = tmpSM.getLocForOffset(tmpBufferID, endOffset); endLoc = Lexer::getLocForEndOfToken(tmpSM, endLoc); endOffset = tmpSM.getLocOffsetInBuffer(endLoc, tmpBufferID); newInfo.StartOffset -= startOffset; newInfo.EndOffset -= startOffset; if (newInfo.PrevOffset != ~0u) newInfo.PrevOffset -= startOffset; auto sourceText = completionBuffer->getBuffer().slice(startOffset, endOffset); auto newOffset = Offset - startOffset; newBufferID = SM.addMemBufferCopy(sourceText, bufferName); SM.openVirtualFile(SM.getLocForBufferStart(newBufferID), tmpSM.getDisplayNameForLoc(startLoc), tmpSM.getPresumedLineAndColumnForLoc(startLoc).first - 1); SM.setCodeCompletionPoint(newBufferID, newOffset); // Construct dummy scopes. We don't need to restore the original scope // because they are probably not 'isResolvable()' anyway. auto &SI = oldState->getScopeInfo(); assert(SI.getCurrentScope() == nullptr); Scope Top(SI, ScopeKind::TopLevel); Scope Body(SI, ScopeKind::FunctionBody); assert(oldInfo.Kind == CodeCompletionDelayedDeclKind::FunctionBody && "If the interface hash is the same as old one, the previous kind " "must be FunctionBody too. Otherwise, hashing is too weak"); oldInfo.Kind = CodeCompletionDelayedDeclKind::FunctionBody; oldInfo.ParentContext = DC; oldInfo.StartOffset = newInfo.StartOffset; oldInfo.EndOffset = newInfo.EndOffset; oldInfo.PrevOffset = newInfo.PrevOffset; oldState->restoreCodeCompletionDelayedDeclState(oldInfo); auto newBufferStart = SM.getRangeForBuffer(newBufferID).getStart(); SourceRange newBodyRange(newBufferStart.getAdvancedLoc(newInfo.StartOffset), newBufferStart.getAdvancedLoc(newInfo.EndOffset)); auto *AFD = cast<AbstractFunctionDecl>(DC); AFD->setBodyToBeReparsed(newBodyRange); SM.setReplacedRange({AFD->getOriginalBodySourceRange(), newBodyRange}); oldSF->clearScope(); traceDC = AFD; break; } case CodeCompletionDelayedDeclKind::Decl: case CodeCompletionDelayedDeclKind::TopLevelCodeDecl: { // Support decl/top-level code only if the completion happens in a single // file 'main' script (e.g. playground). auto *oldM = oldInfo.ParentContext->getParentModule(); if (oldM->getFiles().size() != 1 || oldSF->Kind != SourceFileKind::Main) return false; // Perform fast completion. // Prepare the new buffer in the source manager. auto sourceText = completionBuffer->getBuffer(); if (newInfo.Kind == CodeCompletionDelayedDeclKind::TopLevelCodeDecl) { // We don't need the source text after the top-level code. auto endOffset = newInfo.EndOffset; auto endLoc = tmpSM.getLocForOffset(tmpBufferID, endOffset); endLoc = Lexer::getLocForEndOfToken(tmpSM, endLoc); endOffset = tmpSM.getLocOffsetInBuffer(endLoc, tmpBufferID); sourceText = sourceText.slice(0, endOffset); } newBufferID = SM.addMemBufferCopy(sourceText, bufferName); SM.setCodeCompletionPoint(newBufferID, Offset); // Create a new module and a source file using the current AST context. auto &Ctx = oldM->getASTContext(); auto *newM = ModuleDecl::createMainModule(Ctx, oldM->getName(), oldM->getImplicitImportInfo()); auto *newSF = new (Ctx) SourceFile(*newM, SourceFileKind::Main, newBufferID, oldSF->getParsingOptions()); newM->addFile(*newSF); // Tell the compiler instance we've replaced the main module. CI.setMainModule(newM); // Re-process the whole file (parsing will be lazily triggered). Still // re-use imported modules. performImportResolution(*newSF); bindExtensions(*newM); traceDC = newM; #ifndef NDEBUG const auto *reparsedState = newSF->getDelayedParserState(); assert(reparsedState->hasCodeCompletionDelayedDeclState() && "Didn't find completion token?"); auto &reparsedInfo = reparsedState->getCodeCompletionDelayedDeclState(); assert(reparsedInfo.Kind == newInfo.Kind); #endif break; } } { PrettyStackTraceDeclContext trace("performing cached completion", traceDC); if (DiagC) CI.addDiagnosticConsumer(DiagC); Callback(CI, /*reusingASTContext=*/true); if (DiagC) CI.removeDiagnosticConsumer(DiagC); } CachedReuseCount += 1; return true; } bool CompletionInstance::performNewOperation( Optional<llvm::hash_code> ArgsHash, swift::CompilerInvocation &Invocation, llvm::IntrusiveRefCntPtr<llvm::vfs::FileSystem> FileSystem, llvm::MemoryBuffer *completionBuffer, unsigned int Offset, std::string &Error, DiagnosticConsumer *DiagC, llvm::function_ref<void(CompilerInstance &, bool)> Callback) { llvm::PrettyStackTraceString trace("While performing new completion"); auto isCachedCompletionRequested = ArgsHash.hasValue(); auto TheInstance = std::make_unique<CompilerInstance>(); // Track non-system dependencies in fast-completion mode to invalidate the // compiler instance if any dependent files are modified. Invocation.getFrontendOptions().IntermoduleDependencyTracking = IntermoduleDepTrackingMode::ExcludeSystem; { auto &CI = *TheInstance; if (DiagC) CI.addDiagnosticConsumer(DiagC); SWIFT_DEFER { if (DiagC) CI.removeDiagnosticConsumer(DiagC); }; if (FileSystem != llvm::vfs::getRealFileSystem()) CI.getSourceMgr().setFileSystem(FileSystem); Invocation.setCodeCompletionPoint(completionBuffer, Offset); if (CI.setup(Invocation)) { Error = "failed to setup compiler instance"; return false; } registerIDERequestFunctions(CI.getASTContext().evaluator); // If we're expecting a standard library, but there either isn't one, or it // failed to load, let's bail early and hand back an empty completion // result to avoid any downstream crashes. if (CI.loadStdlibIfNeeded()) return true; CI.performParseAndResolveImportsOnly(); // If we didn't find a code completion token, bail. auto *state = CI.getCodeCompletionFile()->getDelayedParserState(); if (!state->hasCodeCompletionDelayedDeclState()) return true; Callback(CI, /*reusingASTContext=*/false); } // Cache the compiler instance if fast completion is enabled. if (isCachedCompletionRequested) cacheCompilerInstance(std::move(TheInstance), *ArgsHash); return true; } void CompletionInstance::cacheCompilerInstance( std::unique_ptr<CompilerInstance> CI, llvm::hash_code ArgsHash) { CachedCI = std::move(CI); CachedArgHash = ArgsHash; auto now = std::chrono::system_clock::now(); DependencyCheckedTimestamp = now; CachedReuseCount = 0; InMemoryDependencyHash.clear(); cacheDependencyHashIfNeeded( *CachedCI, CachedCI->getASTContext().SourceMgr.getCodeCompletionBufferID(), InMemoryDependencyHash); } bool CompletionInstance::shouldCheckDependencies() const { assert(CachedCI); using namespace std::chrono; auto now = system_clock::now(); auto threshold = DependencyCheckedTimestamp + seconds(Opts.DependencyCheckIntervalSecond); return threshold < now; } void CompletionInstance::setOptions(CompletionInstance::Options NewOpts) { std::lock_guard<std::mutex> lock(mtx); Opts = NewOpts; } bool swift::ide::CompletionInstance::performOperation( swift::CompilerInvocation &Invocation, llvm::ArrayRef<const char *> Args, llvm::IntrusiveRefCntPtr<llvm::vfs::FileSystem> FileSystem, llvm::MemoryBuffer *completionBuffer, unsigned int Offset, std::string &Error, DiagnosticConsumer *DiagC, llvm::function_ref<void(CompilerInstance &, bool)> Callback) { // Always disable source location resolutions from .swiftsourceinfo file // because they're somewhat heavy operations and aren't needed for completion. Invocation.getFrontendOptions().IgnoreSwiftSourceInfo = true; // Disable to build syntax tree because code-completion skips some portion of // source text. That breaks an invariant of syntax tree building. Invocation.getLangOptions().BuildSyntaxTree = false; // We don't need token list. Invocation.getLangOptions().CollectParsedToken = false; // Compute the signature of the invocation. llvm::hash_code ArgsHash(0); for (auto arg : Args) ArgsHash = llvm::hash_combine(ArgsHash, StringRef(arg)); // Concurrent completions will block so that they have higher chance to use // the cached completion instance. std::lock_guard<std::mutex> lock(mtx); if (performCachedOperationIfPossible(ArgsHash, FileSystem, completionBuffer, Offset, DiagC, Callback)) { return true; } if(performNewOperation(ArgsHash, Invocation, FileSystem, completionBuffer, Offset, Error, DiagC, Callback)) { return true; } assert(!Error.empty()); return false; }
atrick/swift
lib/IDE/CompletionInstance.cpp
C++
apache-2.0
24,230
/* * Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. */ /* * Copyright 1999-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * $Id: ExpressionContext.java,v 1.2.4.1 2005/09/10 19:34:03 jeffsuttor Exp $ */ package com.sun.org.apache.xalan.internal.extensions; import javax.xml.transform.ErrorListener; import com.sun.org.apache.xpath.internal.objects.XObject; import org.w3c.dom.Node; import org.w3c.dom.traversal.NodeIterator; /** * An object that implements this interface can supply * information about the current XPath expression context. */ public interface ExpressionContext { /** * Get the current context node. * @return The current context node. */ public Node getContextNode(); /** * Get the current context node list. * @return An iterator for the current context list, as * defined in XSLT. */ public NodeIterator getContextNodes(); /** * Get the error listener. * @return The registered error listener. */ public ErrorListener getErrorListener(); /** * Get the value of a node as a number. * @param n Node to be converted to a number. May be null. * @return value of n as a number. */ public double toNumber(Node n); /** * Get the value of a node as a string. * @param n Node to be converted to a string. May be null. * @return value of n as a string, or an empty string if n is null. */ public String toString(Node n); /** * Get a variable based on it's qualified name. * * @param qname The qualified name of the variable. * * @return The evaluated value of the variable. * * @throws javax.xml.transform.TransformerException */ public XObject getVariableOrParam(com.sun.org.apache.xml.internal.utils.QName qname) throws javax.xml.transform.TransformerException; /** * Get the XPathContext that owns this ExpressionContext. * * Note: exslt:function requires the XPathContext to access * the variable stack and TransformerImpl. * * @return The current XPathContext. * @throws javax.xml.transform.TransformerException */ public com.sun.org.apache.xpath.internal.XPathContext getXPathContext() throws javax.xml.transform.TransformerException; }
haikuowuya/android_system_code
src/com/sun/org/apache/xalan/internal/extensions/ExpressionContext.java
Java
apache-2.0
2,875
/* * Copyright 2016-2018, EnMasse authors. * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). */ package io.enmasse.systemtest.bases; import com.google.common.collect.Ordering; import io.enmasse.address.model.Address; import io.enmasse.address.model.AddressSpace; import io.enmasse.systemtest.Environment; import io.enmasse.systemtest.IndicativeSentences; import io.enmasse.systemtest.UserCredentials; import io.enmasse.systemtest.amqp.AmqpClient; import io.enmasse.systemtest.info.TestInfo; import io.enmasse.systemtest.listener.JunitCallbackListener; import io.enmasse.systemtest.logs.GlobalLogCollector; import io.enmasse.systemtest.manager.ResourceManager; import io.enmasse.systemtest.mqtt.MqttUtils; import io.enmasse.systemtest.platform.KubeCMDClient; import io.enmasse.systemtest.time.TimeoutBudget; import io.enmasse.systemtest.utils.AddressSpaceUtils; import io.enmasse.systemtest.utils.AddressUtils; import io.enmasse.systemtest.utils.JmsProvider; import io.enmasse.systemtest.utils.TestUtils; import io.enmasse.systemtest.utils.UserUtils; import io.enmasse.user.model.v1.Operation; import io.enmasse.user.model.v1.UserAuthorizationBuilder; import org.apache.qpid.proton.amqp.messaging.AmqpValue; import org.apache.qpid.proton.message.Message; import org.eclipse.paho.client.mqttv3.IMqttClient; import org.eclipse.paho.client.mqttv3.MqttMessage; import org.hamcrest.Matchers; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayNameGeneration; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.api.extension.ExtendWith; import org.slf4j.Logger; import javax.jms.DeliveryMode; import javax.jms.MessageConsumer; import javax.jms.MessageProducer; import javax.jms.Session; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.Callable; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.IntStream; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; /** * Base class for all tests */ @ExtendWith(JunitCallbackListener.class) @DisplayNameGeneration(IndicativeSentences.class) @TestInstance(TestInstance.Lifecycle.PER_CLASS) public abstract class TestBase implements ITestBase, ITestSeparator { protected static final UserCredentials clusterUser = new UserCredentials(KubeCMDClient.getOCUser()); protected static final Environment environment = Environment.getInstance(); protected static final GlobalLogCollector logCollector = new GlobalLogCollector(kubernetes, environment.testLogDir()); protected ResourceManager resourcesManager; protected UserCredentials defaultCredentials = null; protected UserCredentials managementCredentials = null; @BeforeEach public void initTest() throws Exception { LOGGER.info("Test init"); resourcesManager = getResourceManager(); if (TestInfo.getInstance().isTestShared()) { defaultCredentials = environment.getSharedDefaultCredentials(); managementCredentials = environment.getSharedManagementCredentials(); resourcesManager.setAddressSpacePlan(getDefaultAddressSpacePlan()); resourcesManager.setAddressSpaceType(getAddressSpaceType().toString()); resourcesManager.setDefaultAddSpaceIdentifier(getDefaultAddrSpaceIdentifier()); if (resourcesManager.getSharedAddressSpace() == null) { resourcesManager.setup(); } } else { defaultCredentials = environment.getDefaultCredentials(); managementCredentials = environment.getManagementCredentials(); resourcesManager.setup(); } } //================================================================================================ //======================================= Help methods =========================================== //================================================================================================ protected void waitForBrokerReplicas(AddressSpace addressSpace, Address destination, int expectedReplicas) throws Exception { TimeoutBudget budget = new TimeoutBudget(10, TimeUnit.MINUTES); TestUtils.waitForNBrokerReplicas(addressSpace, expectedReplicas, true, destination, budget, 5000); } protected void waitForRouterReplicas(AddressSpace addressSpace, int expectedReplicas) throws Exception { TimeoutBudget budget = new TimeoutBudget(3, TimeUnit.MINUTES); Map<String, String> labels = new HashMap<>(); labels.put("name", "qdrouterd"); labels.put("infraUuid", AddressSpaceUtils.getAddressSpaceInfraUuid(addressSpace)); TestUtils.waitForNReplicas(expectedReplicas, kubernetes.getInfraNamespace(), labels, budget); } protected void waitForPodsToTerminate(List<String> uids) throws Exception { LOGGER.info("Waiting for following pods to be deleted {}", uids); assertWaitForValue(true, () -> (kubernetes.listPods(kubernetes.getInfraNamespace()).stream() .noneMatch(pod -> uids.contains(pod.getMetadata().getUid()))), new TimeoutBudget(2, TimeUnit.MINUTES)); } protected void logWithSeparator(Logger logger, String... messages) { logger.info("--------------------------------------------------------------------------------"); for (String message : messages) { logger.info(message); } } protected List<String> extractBodyAsString(Future<List<Message>> msgs) throws Exception { return msgs.get(1, TimeUnit.MINUTES).stream().map(m -> (String) ((AmqpValue) m.getBody()).getValue()).collect(Collectors.toList()); } //================================================================================================ //==================================== Asserts methods =========================================== //================================================================================================ protected static void assertSimpleMQTTSendReceive(Address dest, IMqttClient client, int msgCount) throws Exception { List<MqttMessage> messages = IntStream.range(0, msgCount).boxed().map(i -> { MqttMessage m = new MqttMessage(); m.setPayload(String.format("mqtt-simple-send-receive-%s", i).getBytes(StandardCharsets.UTF_8)); m.setQos(1); return m; }).collect(Collectors.toList()); List<CompletableFuture<MqttMessage>> receiveFutures = MqttUtils.subscribeAndReceiveMessages(client, dest.getSpec().getAddress(), messages.size(), 1); List<CompletableFuture<Void>> publishFutures = MqttUtils.publish(client, dest.getSpec().getAddress(), messages); int publishCount = MqttUtils.awaitAndReturnCode(publishFutures, 1, TimeUnit.MINUTES); assertThat("Incorrect count of messages published", publishCount, is(messages.size())); int receivedCount = MqttUtils.awaitAndReturnCode(receiveFutures, 1, TimeUnit.MINUTES); assertThat("Incorrect count of messages received", receivedCount, is(messages.size())); } protected <T extends Comparable<T>> void assertSorted(String message, Iterable<T> list) throws Exception { assertSorted(message, list, false); } protected <T> void assertSorted(String message, Iterable<T> list, Comparator<T> comparator) throws Exception { assertSorted(message, list, false, comparator); } protected <T extends Comparable<T>> void assertSorted(String message, Iterable<T> list, boolean reverse) { LOGGER.info("Assert sort reverse: " + reverse); if (!reverse) { assertTrue(Ordering.natural().isOrdered(list), message); } else { assertTrue(Ordering.natural().reverse().isOrdered(list), message); } } protected <T> void assertSorted(String message, Iterable<T> list, boolean reverse, Comparator<T> comparator) { LOGGER.info("Assert sort reverse: " + reverse); if (!reverse) { assertTrue(Ordering.from(comparator).isOrdered(list), message); } else { assertTrue(Ordering.from(comparator).reverse().isOrdered(list), message); } } protected <T> void assertWaitForValue(T expected, Callable<T> fn, TimeoutBudget budget) throws Exception { T got = null; LOGGER.info("waiting for expected value '{}' ...", expected); while (budget.timeLeft() >= 0) { got = fn.call(); if (Objects.equals(expected, got)) { return; } Thread.sleep(100); } fail(String.format("Incorrect result value! expected: '%s', got: '%s'", expected, Objects.requireNonNull(got))); } protected static void assertDefaultEnabled(final Boolean enabled) { if (enabled != null && !Boolean.TRUE.equals(enabled)) { fail("Default value must be 'null' or 'true'"); } } protected void assertConcurentMessaging(List<Address> dest, List<UserCredentials> users, String destNamePrefix, int customerIndex, int messageCount) throws Exception { ArrayList<AmqpClient> clients = new ArrayList<>(users.size()); String sufix = AddressSpaceUtils.isBrokered(resourcesManager.getSharedAddressSpace()) ? "#" : "*"; users.forEach((user) -> { try { resourcesManager.createOrUpdateUser(resourcesManager.getSharedAddressSpace(), UserUtils.createUserResource(user) .editSpec() .withAuthorization(Collections.singletonList( new UserAuthorizationBuilder() .withAddresses(String.format("%s.%s.%s", destNamePrefix, customerIndex, sufix)) .withOperations(Operation.send, Operation.recv).build())) .endSpec() .done()); AmqpClient queueClient = resourcesManager.getAmqpClientFactory().createQueueClient(); queueClient.getConnectOptions().setCredentials(user); clients.add(queueClient); } catch (Exception e) { e.printStackTrace(); } }); AddressUtils.waitForDestinationsReady(dest.toArray(new Address[0])); //start sending messages int everyN = 3; for (AmqpClient client : clients) { for (int i = 0; i < dest.size(); i++) { if (i % everyN == 0) { Future<Integer> sent = client.sendMessages(dest.get(i).getSpec().getAddress(), TestUtils.generateMessages(messageCount)); //wait for messages sent assertEquals(messageCount, sent.get(1, TimeUnit.MINUTES).intValue(), "Incorrect count of messages send"); } } } //receive messages for (AmqpClient client : clients) { for (int i = 0; i < dest.size(); i++) { if (i % everyN == 0) { Future<List<Message>> received = client.recvMessages(dest.get(i).getSpec().getAddress(), messageCount); //wait for messages received assertEquals(messageCount, received.get(1, TimeUnit.MINUTES).size(), "Incorrect count of messages received"); } } client.close(); } } protected void assertSendReceiveLargeMessageQueue(JmsProvider jmsProvider, double sizeInMB, Address dest, int count) throws Exception { assertSendReceiveLargeMessageQueue(jmsProvider, sizeInMB, dest, count, DeliveryMode.NON_PERSISTENT); } protected void assertSendReceiveLargeMessageQueue(JmsProvider jmsProvider, double sizeInMB, Address dest, int count, int mode) throws Exception { int size = (int) (sizeInMB * 1024 * 1024); Session session = jmsProvider.getConnection().createSession(false, Session.AUTO_ACKNOWLEDGE); javax.jms.Queue testQueue = (javax.jms.Queue) jmsProvider.getDestination(dest.getSpec().getAddress()); List<javax.jms.Message> messages = jmsProvider.generateMessages(session, count, size); MessageProducer sender = session.createProducer(testQueue); MessageConsumer receiver = session.createConsumer(testQueue); assertSendReceiveLargeMessage(jmsProvider, sender, receiver, sizeInMB, mode, count, messages); } protected void assertSendReceiveLargeMessageTopic(JmsProvider jmsProvider, double sizeInMB, Address dest, int count) throws Exception { assertSendReceiveLargeMessageTopic(jmsProvider, sizeInMB, dest, count, DeliveryMode.NON_PERSISTENT); } protected void assertSendReceiveLargeMessageTopic(JmsProvider jmsProvider, double sizeInMB, Address dest, int count, int mode) throws Exception { int size = (int) (sizeInMB * 1024 * 1024); Session session = jmsProvider.getConnection().createSession(false, Session.AUTO_ACKNOWLEDGE); javax.jms.Topic testTopic = (javax.jms.Topic) jmsProvider.getDestination(dest.getSpec().getAddress()); List<javax.jms.Message> messages = jmsProvider.generateMessages(session, count, size); MessageProducer sender = session.createProducer(testTopic); MessageConsumer receiver = session.createConsumer(testTopic); assertSendReceiveLargeMessage(jmsProvider, sender, receiver, sizeInMB, mode, count, messages); session.close(); sender.close(); receiver.close(); } private void assertSendReceiveLargeMessage(JmsProvider jmsProvider, MessageProducer sender, MessageConsumer receiver, double sizeInMB, int mode, int count, List<javax.jms.Message> messages) { List<javax.jms.Message> recvd; jmsProvider.sendMessages(sender, messages, mode, javax.jms.Message.DEFAULT_PRIORITY, javax.jms.Message.DEFAULT_TIME_TO_LIVE); LOGGER.info("{}MB {} message sent", sizeInMB, mode == DeliveryMode.PERSISTENT ? "durable" : "non-durable"); recvd = jmsProvider.receiveMessages(receiver, count, 2000); assertThat("Wrong count of received messages", recvd.size(), Matchers.is(count)); LOGGER.info("{}MB {} message received", sizeInMB, mode == DeliveryMode.PERSISTENT ? "durable" : "non-durable"); } protected void assertAddressApi(AddressSpace addressSpace, Address d1, Address d2) throws Exception { List<String> destinationsNames = Arrays.asList(d1.getSpec().getAddress(), d2.getSpec().getAddress()); resourcesManager.setAddresses(d1); resourcesManager.appendAddresses(d2); //d1, d2 List<String> response = AddressUtils.getAddresses(addressSpace).stream().map(address -> address.getSpec().getAddress()).collect(Collectors.toList()); assertThat("Rest api does not return all addresses", response, is(destinationsNames)); LOGGER.info("addresses {} successfully created", Arrays.toString(destinationsNames.toArray())); //get specific address d2 Address res = kubernetes.getAddressClient(addressSpace.getMetadata().getNamespace()).withName(d2.getMetadata().getName()).get(); assertThat("Rest api does not return specific address", res.getSpec().getAddress(), is(d2.getSpec().getAddress())); resourcesManager.deleteAddresses(d1); //d2 response = AddressUtils.getAddresses(addressSpace).stream().map(address -> address.getSpec().getAddress()).collect(Collectors.toList()); assertThat("Rest api does not return right addresses", response, is(destinationsNames.subList(1, 2))); LOGGER.info("address {} successfully deleted", d1.getSpec().getAddress()); resourcesManager.deleteAddresses(d2); //empty List<Address> listRes = AddressUtils.getAddresses(addressSpace); assertThat("Rest api returns addresses", listRes, is(Collections.emptyList())); LOGGER.info("addresses {} successfully deleted", d2.getSpec().getAddress()); resourcesManager.setAddresses(d1, d2); resourcesManager.deleteAddresses(d1, d2); listRes = AddressUtils.getAddresses(addressSpace); assertThat("Rest api returns addresses", listRes, is(Collections.emptyList())); LOGGER.info("addresses {} successfully deleted", Arrays.toString(destinationsNames.toArray())); } }
jenmalloy/enmasse
systemtests/src/test/java/io/enmasse/systemtest/bases/TestBase.java
Java
apache-2.0
17,060
using System; using System.Collections.Generic; using System.IO; using System.IO.Compression; using System.Linq; using System.Text; using System.Threading.Tasks; using Windows.Data.Xml.Dom; using Windows.Security.Cryptography; using Windows.Storage; using Windows.Storage.Streams; namespace BabylonJs.Framework.Converters { public class AmfConverter : BabylonConverter { public Stream Stream { get; private set; } public AmfConverter(Stream stream) { this.Stream = stream; } public async override Task<string> ToJsonAsync() { var state = new ConversionState(); //unzip xml var zipArchive = new ZipArchive(this.Stream, ZipArchiveMode.Read); var amfXml = zipArchive.Entries.FirstOrDefault(); if (amfXml == null) { throw new Exception("Invalid AMF stream"); } var filename = Guid.NewGuid().ToString() + ".tmp"; var file = await Windows.Storage.ApplicationData.Current.TemporaryFolder.CreateFileAsync(filename, CreationCollisionOption.GenerateUniqueName); using (var amfXmlStream = amfXml.Open()) { using (var outputStream = await file.OpenStreamForWriteAsync()) { await amfXmlStream.CopyToAsync(outputStream); await outputStream.FlushAsync(); } } //parse XML var xDoc = await XmlDocument.LoadFromFileAsync(file); var units = xDoc.DocumentElement.GetAttribute("unit"); var version = xDoc.DocumentElement.GetAttribute("version"); foreach (var node in xDoc.DocumentElement.ChildNodes) { if (node.NodeType != NodeType.ElementNode) { continue; } var xmlElement = node as XmlElement; switch(xmlElement.TagName) { case AmfConstants.Metadata: { var type = xmlElement.GetAttribute("type"); if (type == "name") { state.Name = xmlElement.NodeValue.ToString(); } break; } case AmfConstants.Object: { //TODO: support multiple objects break; } case AmfConstants.Material: { //TODO: support multiple mesh break; } case AmfConstants.Vertex: { break; } } } return await base.ToJsonAsync(state); } } }
craigomatic/BabylonJS-Framework
src/BabylonJs.Framework/Converters/AmfConverter.cs
C#
apache-2.0
3,054
<?php namespace Topikito\HueDashboard\Config\ServiceLoader; use app\config; /** * Class WebProfilerServiceLoader * * @package Topikito\HueDashboard\Config\ServiceLoader */ class WebProfilerServiceLoader extends config\ServiceLoader\BaseWebProfilerServiceLoader {}
topikito/hue-dashboard
src/Topikito/HueDashboard/Config/ServiceLoader/WebProfilerServiceLoader.php
PHP
apache-2.0
271
#!/usr/bin/env python # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import sys, glob import logging import random import os sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/gen-py') from radical_interface import RadicalPilotInterface import radical.pilot as rp from thrift.transport import TSocket from thrift.transport import TTransport from thrift.protocol import TBinaryProtocol from thrift.server import TServer def extract_configs(task_file): configs = {} index = 0 task_desc = open(task_file, 'r').readlines() # Set some default pilot confs pilot_confs = {'mongodb' : 'mongodb://127.0.0.1:50055', 'userpass' : 'userpass', 'cleanup' : False} while index < len(task_desc): if (task_desc[index].startswith("attr.radical-pilot.")): l = len("attr.radical-pilot.") [key,value] = task_desc[index][l:].strip('\n').split("=") pilot_confs[key]= value index += 1 configs['pilot_confs'] = pilot_confs print "Extracted configs : ", configs return configs def pilot_state_cb (pilot, state) : print "[Callback]: ComputePilot '%s' state: %s." % (pilot.uid, state) if not pilot: return if state == rp.FAILED : sys.exit (1) def unit_state_cb (unit, state) : if not unit: return print "[Callback]: ComputeUnit '%s' state: %s." % (unit.uid, state) def rp_radical_init (configs): print "[rp_radical_init]" try: session = rp.Session(database_url=configs['pilot_confs']['mongodb']) c = rp.Context(configs['pilot_confs']['userpass']) session.add_context(c) print "Initializing Pilot Manager ..." pmgr = rp.PilotManager(session=session) pmgr.register_callback(pilot_state_cb) # Combine the ComputePilot, the ComputeUnits and a scheduler via # a UnitManager object. print "Initializing Unit Manager ..." umgr = rp.UnitManager (session=session, scheduler=rp.SCHED_DIRECT_SUBMISSION) # Register our callback with the UnitManager. This callback will get # called every time any of the units managed by the UnitManager # change their state. umgr.register_callback(unit_state_cb) pdesc = rp.ComputePilotDescription () pdesc.resource = configs['pilot_confs']['resource'] pdesc.runtime = int(configs['pilot_confs']['runtime']) pdesc.cores = int(configs['pilot_confs']['cores']) pdesc.cleanup = True if configs['pilot_confs']['cleanup'] in ["true", "True"] else False # submit the pilot. print "Submitting Compute Pilot to Pilot Manager ..." pilot = pmgr.submit_pilots(pdesc) # Add the created ComputePilot to the UnitManager. print "Registering Compute Pilot with Unit Manager ..." umgr.add_pilots(pilot) #session = "session_name" #pmgr = "pmgr_foo" #umgr = "umpr_blah" return [session, pmgr, umgr] except Exception as e: print "An error occurred: %s" % ((str(e))) sys.exit (-1) def filepath_cleanup(filepath): fpath = filepath.strip('\n') if fpath.startswith('file://localhost/'): l = len('file://localhost/') fpath = fpath[l:] return fpath def rp_compose_compute_unit(task_filename): task_desc = open(task_filename, 'r').readlines() index = 0 args = [] stageins = [] stageouts = [] env_vars = {} while index < len(task_desc): # We don't process directory options. if (task_desc[index].startswith("directory=")): l = len("directory=") elif (task_desc[index].startswith("env.")): l = len("env.") [key,value] = task_desc[index][l:].strip('\n').split("=") env_vars[key] = value elif (task_desc[index].startswith("executable=")): l = len("executable=") executable = task_desc[index][l:].strip('\n') elif (task_desc[index].startswith("arg=")): l = len("arg=") args.append(task_desc[index][l:].strip('\n')) elif (task_desc[index].startswith("stagein.source=")): stagein_item = {} l = len("stagein.source=") stagein_item['source'] = filepath_cleanup(task_desc[index][l:]) index += 1 if (task_desc[index].startswith("stagein.destination=")): l = len("stagein.destination=") stagein_item['destination'] = filepath_cleanup(task_desc[index][l:]) index += 1 if (task_desc[index].startswith("stagein.mode=")): l = len("stagein.mode=") # Ignore mode for now #stagein_item['destination'] = task_desc[index][l:].strip('\n') #index += 1 else: index -= 1 else: printf("[ERROR] Stagein source must have a destination") stageins.append(stagein_item) elif (task_desc[index].startswith("stageout.source=")): stageout_item = {} l = len("stageout.source=") stageout_item['source'] = filepath_cleanup(task_desc[index][l:]) index += 1 if (task_desc[index].startswith("stageout.destination=")): l = len("stageout.destination=") stageout_item['destination'] = filepath_cleanup(task_desc[index][l:]) index += 1 if (task_desc[index].startswith("stageout.mode=")): l = len("stageout.mode=") # Ignore mode for now #stageout_item['destination'] = task_desc[index][l:].strip('\n') #index += 1 else: index -= 1 else: printf("[ERROR] Stageout source must have a destination") stageouts.append(stageout_item) else: logging.debug("ignoring option : {0}".format(task_desc[index].strip('\n'))) index += 1 logging.debug("ARGS : {0}".format(args)) logging.debug("EXEC : {0}".format(executable)) logging.debug("STAGEINS : {0}".format(stageins)) logging.debug("STAGEOUTS : {0}".format(stageouts)) cudesc = rp.ComputeUnitDescription() cudesc.environment = env_vars cudesc.executable = executable cudesc.arguments = args cudesc.cores = 1 cudesc.input_staging = stageins cudesc.output_staging = stageouts return [cudesc] def rp_submit_task(unit_manager, task_filename): cu_desc = rp_compose_compute_unit(task_filename) c_unit = unit_manager.submit_units(cu_desc) return c_unit class RadicalPilotHandler: def __init__(self): self.session = 'NULL' self.pmgr = 'NULL' self.umgr = 'NULL' self.log = {} self.configs = {} #self.rp_lock = threading.Lock() self.task_lookup = {} self.session = 'NULL' logging.debug("Init done") def submit_task(self, task_filename): print "[SUBMIT_TASK] :", task_filename # If self.configs is empty, this is the first task, which requires # radical pilots to be setup if not self.configs : logging.debug("[SUBMIT_TASK] : Starting radical.pilots") self.configs = extract_configs(task_filename) logging.debug("Extracting configs done") [self.session, self.pmgr, self.umgr] = rp_radical_init(self.configs) print [self.session, self.pmgr, self.umgr] logging.debug("done with radical_init") cu_list = rp_submit_task(self.umgr, task_filename) print cu_list[0] hash_id = str(len(self.task_lookup)) self.task_lookup[hash_id] = cu_list[0] return hash_id def cancel_task(self, task_name): logging.debug("Cancelling task :" + task_name) return "Cancelled task" def status_task(self, task_name): radical_states = { 'PendingExecution' : 'Q', 'Scheduling' : 'Q', 'Executing' : 'R', 'Done' : 'C', 'Failed' : 'F' } if task_name not in self.task_lookup: return str(task_name) + " F -1 Task id not in the Radical Pilot lookup registry" state = self.task_lookup[task_name].state if state not in radical_states : logging.debug( "[DEBUG] task_name:" + task_name + " state: " + state) return str(task_name) + " Q" logging.debug("[DEBUG] task_name:{0} state:{1}".format(task_name, state)) return str(task_name) + " " + radical_states[state] def server_die(self, die_string): logging.debug("Server terminating. Received message: " + die_string) exit(0) def getStruct(self, key): print 'getStruct(%d)' % (key) return self.log[key] def zip(self): print 'zip()' # Start logging if ( len(sys.argv) < 2 ): print "[ERROR] Missing log_file argument" logging.basicConfig(filename=sys.argv[1], level=logging.DEBUG) logging.debug('Starting the server...') handler = RadicalPilotHandler() processor = RadicalPilotInterface.Processor(handler) transport = TSocket.TServerSocket(port=9090) tfactory = TTransport.TBufferedTransportFactory() pfactory = TBinaryProtocol.TBinaryProtocolFactory() server = TServer.TSimpleServer(processor, transport, tfactory, pfactory) # You could do one of these for a multithreaded server #server = TServer.TThreadedServer(processor, transport, tfactory, pfactory) #server = TServer.TThreadPoolServer(processor, transport, tfactory, pfactory) server.serve() logging.debug('done.')
radical-cybertools/aimes.swiftrp
thrift_tests/server.py
Python
apache-2.0
9,371
var version="2.7";var _summarisedStatistics=[];var _recentStatistics=[];var _hasDetailedTableBeenRendered=false;var _hasSummaryTableBeenRendered=false;var _haveHistoricGraphsBeenRendered=false;var GraphTab={Recent:1,Historic:2};function convertTimeIntoSeconds(time) {var timeParts=time.split(":");return timeParts[0]*3600+timeParts[1]*60+parseInt(timeParts[2]);} function summariseStatistics() {var lastDate="";var statsGroupedByDay={};if(_statistics.length==0) {return statsGroupedByDay;} var projectDays=distinct(_statistics,"Date");for(var dayIndex=0;dayIndex<projectDays.length;dayIndex++) {var currentDate=projectDays[dayIndex];if(typeof(currentDate)=='string') {currentDate=new Date(currentDate);} statsGroupedByDay[currentDate.toDateString()]=[];} var currentStatistic=[];for(var i=0;i<_statistics.length;i++) {var statistic=_statistics[i];var statisticsDate=new Date(statistic.Date);var dayText=statisticsDate.toDateString();statsGroupedByDay[dayText].push(statistic);} return generateDailySummaries(statsGroupedByDay);} function getTimelineDays() {var firstDate=new Date(_statistics[0].Date);var lastDate=new Date(_statistics[_statistics.length-1].Date);return generateDateRange(firstDate,lastDate);} function prepareStatistics() {var usedStats=getUsedStatisticAttributes();for(var i=0;i<_statistics.length;i++) {var statistic=_statistics[i];statistic["index"]=i;statistic["DurationInSeconds"]=convertTimeIntoSeconds(statistic["Duration"]);statistic["TestsPassed"]=statistic["TestCount"]-statistic["TestFailures"]-statistic["TestIgnored"];for(var attributeIndex=0;attributeIndex<usedStats.length;attributeIndex++) {var attributeName=usedStats[attributeIndex];statistic[attributeName]=zeroIfInvalid(statistic[attributeName]);}}} function getUsedStatisticAttributes() {var usedStats={};for(var configIndex=0;configIndex<_recentGraphConfigurations.length;configIndex++) {var config=_recentGraphConfigurations[configIndex];for(var seriesIndex=0;seriesIndex<config.series.length;seriesIndex++) {var series=config.series[seriesIndex];usedStats[series.attributeName]='';}} var attributes=[];for(var attribute in usedStats) {attributes.push(attribute);} return attributes;} function zeroIfInvalid(dataItem) {if(dataItem==''||typeof(dataItem)=='undefined'||isNaN(dataItem)) {return'0';} else {return dataItem;}} function getRecentStatistics(numberOfBuilds) {var startIndex=Math.max(_statistics.length-numberOfBuilds,0);for(var i=startIndex;i<_statistics.length;i++) {var clonedStatistic=cloneObject(_statistics[i]);clonedStatistic["index"]=_recentStatistics.length;clonedStatistic["label"]=clonedStatistic["BuildLabel"];_recentStatistics.push(clonedStatistic);}} function cloneObject(sourceObject) {var clone={};for(var attribute in sourceObject) {clone[attribute]=sourceObject[attribute];} return clone;} function generateDateRange(startDate,endDate) {var dayDifference=24*60*60*1000;var currentDate=startDate;var dateRange=[];endDate.setHours(23);endDate.setMinutes(59);while(currentDate<=endDate) {dateRange.push(currentDate);currentDate=new Date(currentDate.getTime()+dayDifference);} return dateRange;} function generateDailySummaries(statsGroupedByDay) {var lastBuildLabel="";var index=0;for(var day in statsGroupedByDay) {var currentStatistics=statsGroupedByDay[day];var currentBuildLabel=getLastValue(currentStatistics,"BuildLabel");if(currentBuildLabel.length==0) {currentBuildLabel=lastBuildLabel;} var successfulBuilds=select(currentStatistics,successfulBuildsFilter);var failedBuilds=select(currentStatistics,failedBuildsFilter);var daySummary={day:day,index:index++,lastBuildLabel:currentBuildLabel};for(var attribute in _summaryConfiguration) {daySummary[attribute]=_summaryConfiguration[attribute](successfulBuilds,failedBuilds);} var dayDate=new Date(day);daySummary.label=daySummary.lastBuildLabel+"\n("+day+")";_summarisedStatistics.push(daySummary);lastBuildLabel=currentBuildLabel;}} function successfulBuildsFilter(item) {return(item["Status"]=="Success");} function failedBuildsFilter(item) {var status=item["Status"];return(status=="Failure"||status=="Exception");} function processGraphList(configurationList,containerElement) {for(var i=0;i<configurationList.length;i++) {var graphOptions=configurationList[i];graphOptions.containerElement=containerElement;createGraph(graphOptions);}} function createRecentGraphs() {processGraphList(_recentGraphConfigurations,dojo.byId("RecentBuildsContainerArea"));} function createHistoricGraphs() {processGraphList(_historicGraphConfigurations,dojo.byId("HistoricGraphContainerArea"));} function summaryDataTabChangeHandler() {if(!_hasSummaryTableBeenRendered) {ensureStatisticsHaveBeenSummarised();var tableContainerArea=dojo.byId("SummaryTableStatisticsContainerArea");generateStatisticsTable(tableContainerArea,"Build Summary Statistics",_summarisedStatistics,cellRenderer,true,summaryTableDrillDown);_hasSummaryTableBeenRendered=true;}} function detailedDataTabChangeHandler() {if(!_hasDetailedTableBeenRendered) {var tableContainerArea=dojo.byId("DetailedTableStatisticsContainerArea");generateStatisticsTable(tableContainerArea,"Build Detailed Statistics",_statistics,cellRenderer,false,null);_hasDetailedTableBeenRendered=true;}} function historicGraphsTabChangeHandler(evt) {if(!_haveHistoricGraphsBeenRendered) {ensureStatisticsHaveBeenSummarised();createHistoricGraphs();_haveHistoricGraphsBeenRendered=true;}} function ensureStatisticsHaveBeenSummarised() {if(_summarisedStatistics.length==0) {summariseStatistics();}} function setupLazyTabInitialization() {var historicalTabWidget=dojo.widget.byId("HistoricalTabWidget");var detailedTabularTabWidget=dojo.widget.byId("DetailedDataTabWidget");var summarisedTabularTabWidget=dojo.widget.byId("SummarisedDataTabWidget");dojo.event.connect("before",historicalTabWidget,"show",historicGraphsTabChangeHandler);dojo.event.connect("before",detailedTabularTabWidget,"show",detailedDataTabChangeHandler);dojo.event.connect("before",summarisedTabularTabWidget,"show",summaryDataTabChangeHandler);} dojo.addOnLoad(function() {prepareStatistics();getRecentStatistics(20);setupLazyTabInitialization();window.setTimeout("createRecentGraphs()",100);});
Gallio/infrastructure
ccnet/WebDashboard/javascript/StatisticsGraphs.js
JavaScript
apache-2.0
6,162
/* * Copyright 2011 William Bernardet * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.googlecode.japi.checker.model; import com.googlecode.japi.checker.ClassDataLoader; public class AttributeData extends JavaItem { public AttributeData(ClassDataLoader loader, ClassData owner, int access, String name) { super(loader, owner, access, name); } @Override public String getType() { return "attribute"; } }
seeburger-ag/japi-checker
japi-checker/src/main/java/com/googlecode/japi/checker/model/AttributeData.java
Java
apache-2.0
970
'use strict'; module.exports = function(sequelize, DataTypes) { var GuardianMetaUpdateCheckIn = sequelize.define('GuardianMetaUpdateCheckIn', { }, { classMethods: { associate: function(models) { GuardianMetaUpdateCheckIn.belongsTo(models.Guardian, {as: 'Guardian'}); GuardianMetaUpdateCheckIn.belongsTo(models.GuardianSoftwareVersion, {as: 'Version'}); } }, tableName: "GuardianMetaUpdateCheckIns" }); return GuardianMetaUpdateCheckIn; };
tanapop/rfcx-api
models/guardian-meta/guardian-meta-updatecheckin.js
JavaScript
apache-2.0
487
<?php App::uses('AppModel', 'Model'); APP::uses('ModelBehavior', 'Model'); class DateFormatBehavior extends ModelBehavior { public function afterFind(Model $model, $results, $primary = false) { $res = array(); foreach ($results as $key => $value) { if (is_array($value)) { $res[$key] = self::afterFind($model, $value, $primary); } else { $columns = $this->verifyColumns($model->getColumnTypes()); $this->convertDate($key, $columns, $value, $res); } } return $res; } private function verifyColumns($columns) { foreach ($columns as $column => $type) { if ($type != 'date') { unset($columns[$column]); } } return $columns; } private function convertDate($key, $columns, $value, &$res) { if (array_key_exists($key, $columns)) { foreach ($columns as $column => $type) { $res[$column] = $this->dateFormat($value); } } else { $res[$key] = $value; } } public function dateFormat($date, $fromFormat = 'Y-m-d', $toFormat = 'd/m/Y') { $schedule = $date; $schedule_format = str_replace(array('Y', 'm', 'd', 'H', 'i', 'a'), array('%Y', '%m', '%d', '%I', '%M', '%p'), $fromFormat); $ugly = strptime($schedule, $schedule_format); $ymd = sprintf( '%04d-%02d-%02d %02d:%02d:%02d', $ugly['tm_year'] + 1900, // This will be "111", so we need to add 1900. $ugly['tm_mon'] + 1, // This will be the month minus one, so we add one. $ugly['tm_mday'], $ugly['tm_hour'], $ugly['tm_min'], $ugly['tm_sec'] ); $new_schedule = new DateTime($ymd); return $new_schedule->format($toFormat); } }
robertinosantiago/flisol-sistema-eventos
app/Model/Behavior/DateFormatBehavior.php
PHP
apache-2.0
1,864
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on // an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. //------------------------------------------------------------------------------ // <auto-generated> // This code was generated by google-apis-code-generator 1.5.1 // C# generator version: 1.38.0 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ /** * \brief * Cloud Speech-to-Text API Version v1p1beta1 * * \section ApiInfo API Version Information * <table> * <tr><th>API * <td><a href='https://cloud.google.com/speech-to-text/docs/quickstart-protocol'>Cloud Speech-to-Text API</a> * <tr><th>API Version<td>v1p1beta1 * <tr><th>API Rev<td>20190201 (1492) * <tr><th>API Docs * <td><a href='https://cloud.google.com/speech-to-text/docs/quickstart-protocol'> * https://cloud.google.com/speech-to-text/docs/quickstart-protocol</a> * <tr><th>Discovery Name<td>speech * </table> * * \section ForMoreInfo For More Information * * The complete API documentation for using Cloud Speech-to-Text API can be found at * <a href='https://cloud.google.com/speech-to-text/docs/quickstart-protocol'>https://cloud.google.com/speech-to-text/docs/quickstart-protocol</a>. * * For more information about the Google APIs Client Library for .NET, see * <a href='https://developers.google.com/api-client-library/dotnet/get_started'> * https://developers.google.com/api-client-library/dotnet/get_started</a> */ namespace Google.Apis.Speech.v1p1beta1 { /// <summary>The Speech Service.</summary> public class SpeechService : Google.Apis.Services.BaseClientService { /// <summary>The API version.</summary> public const string Version = "v1p1beta1"; /// <summary>The discovery version used to generate this service.</summary> public static Google.Apis.Discovery.DiscoveryVersion DiscoveryVersionUsed = Google.Apis.Discovery.DiscoveryVersion.Version_1_0; /// <summary>Constructs a new service.</summary> public SpeechService() : this(new Google.Apis.Services.BaseClientService.Initializer()) {} /// <summary>Constructs a new service.</summary> /// <param name="initializer">The service initializer.</param> public SpeechService(Google.Apis.Services.BaseClientService.Initializer initializer) : base(initializer) { operations = new OperationsResource(this); projects = new ProjectsResource(this); speech = new SpeechResource(this); } /// <summary>Gets the service supported features.</summary> public override System.Collections.Generic.IList<string> Features { get { return new string[0]; } } /// <summary>Gets the service name.</summary> public override string Name { get { return "speech"; } } /// <summary>Gets the service base URI.</summary> public override string BaseUri { get { return "https://speech.googleapis.com/"; } } /// <summary>Gets the service base path.</summary> public override string BasePath { get { return ""; } } #if !NET40 /// <summary>Gets the batch base URI; <c>null</c> if unspecified.</summary> public override string BatchUri { get { return "https://speech.googleapis.com/batch"; } } /// <summary>Gets the batch base path; <c>null</c> if unspecified.</summary> public override string BatchPath { get { return "batch"; } } #endif /// <summary>Available OAuth 2.0 scopes for use with the Cloud Speech-to-Text API.</summary> public class Scope { /// <summary>View and manage your data across Google Cloud Platform services</summary> public static string CloudPlatform = "https://www.googleapis.com/auth/cloud-platform"; } /// <summary>Available OAuth 2.0 scope constants for use with the Cloud Speech-to-Text API.</summary> public static class ScopeConstants { /// <summary>View and manage your data across Google Cloud Platform services</summary> public const string CloudPlatform = "https://www.googleapis.com/auth/cloud-platform"; } private readonly OperationsResource operations; /// <summary>Gets the Operations resource.</summary> public virtual OperationsResource Operations { get { return operations; } } private readonly ProjectsResource projects; /// <summary>Gets the Projects resource.</summary> public virtual ProjectsResource Projects { get { return projects; } } private readonly SpeechResource speech; /// <summary>Gets the Speech resource.</summary> public virtual SpeechResource Speech { get { return speech; } } } ///<summary>A base abstract class for Speech requests.</summary> public abstract class SpeechBaseServiceRequest<TResponse> : Google.Apis.Requests.ClientServiceRequest<TResponse> { ///<summary>Constructs a new SpeechBaseServiceRequest instance.</summary> protected SpeechBaseServiceRequest(Google.Apis.Services.IClientService service) : base(service) { } /// <summary>V1 error format.</summary> [Google.Apis.Util.RequestParameterAttribute("$.xgafv", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<XgafvEnum> Xgafv { get; set; } /// <summary>V1 error format.</summary> public enum XgafvEnum { /// <summary>v1 error format</summary> [Google.Apis.Util.StringValueAttribute("1")] Value1, /// <summary>v2 error format</summary> [Google.Apis.Util.StringValueAttribute("2")] Value2, } /// <summary>OAuth access token.</summary> [Google.Apis.Util.RequestParameterAttribute("access_token", Google.Apis.Util.RequestParameterType.Query)] public virtual string AccessToken { get; set; } /// <summary>Data format for response.</summary> /// [default: json] [Google.Apis.Util.RequestParameterAttribute("alt", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<AltEnum> Alt { get; set; } /// <summary>Data format for response.</summary> public enum AltEnum { /// <summary>Responses with Content-Type of application/json</summary> [Google.Apis.Util.StringValueAttribute("json")] Json, /// <summary>Media download with context-dependent Content-Type</summary> [Google.Apis.Util.StringValueAttribute("media")] Media, /// <summary>Responses with Content-Type of application/x-protobuf</summary> [Google.Apis.Util.StringValueAttribute("proto")] Proto, } /// <summary>JSONP</summary> [Google.Apis.Util.RequestParameterAttribute("callback", Google.Apis.Util.RequestParameterType.Query)] public virtual string Callback { get; set; } /// <summary>Selector specifying which fields to include in a partial response.</summary> [Google.Apis.Util.RequestParameterAttribute("fields", Google.Apis.Util.RequestParameterType.Query)] public virtual string Fields { get; set; } /// <summary>API key. Your API key identifies your project and provides you with API access, quota, and reports. /// Required unless you provide an OAuth 2.0 token.</summary> [Google.Apis.Util.RequestParameterAttribute("key", Google.Apis.Util.RequestParameterType.Query)] public virtual string Key { get; set; } /// <summary>OAuth 2.0 token for the current user.</summary> [Google.Apis.Util.RequestParameterAttribute("oauth_token", Google.Apis.Util.RequestParameterType.Query)] public virtual string OauthToken { get; set; } /// <summary>Returns response with indentations and line breaks.</summary> /// [default: true] [Google.Apis.Util.RequestParameterAttribute("prettyPrint", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<bool> PrettyPrint { get; set; } /// <summary>Available to use for quota purposes for server-side applications. Can be any arbitrary string /// assigned to a user, but should not exceed 40 characters.</summary> [Google.Apis.Util.RequestParameterAttribute("quotaUser", Google.Apis.Util.RequestParameterType.Query)] public virtual string QuotaUser { get; set; } /// <summary>Legacy upload protocol for media (e.g. "media", "multipart").</summary> [Google.Apis.Util.RequestParameterAttribute("uploadType", Google.Apis.Util.RequestParameterType.Query)] public virtual string UploadType { get; set; } /// <summary>Upload protocol for media (e.g. "raw", "multipart").</summary> [Google.Apis.Util.RequestParameterAttribute("upload_protocol", Google.Apis.Util.RequestParameterType.Query)] public virtual string UploadProtocol { get; set; } /// <summary>Initializes Speech parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add( "$.xgafv", new Google.Apis.Discovery.Parameter { Name = "$.xgafv", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "access_token", new Google.Apis.Discovery.Parameter { Name = "access_token", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "alt", new Google.Apis.Discovery.Parameter { Name = "alt", IsRequired = false, ParameterType = "query", DefaultValue = "json", Pattern = null, }); RequestParameters.Add( "callback", new Google.Apis.Discovery.Parameter { Name = "callback", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "fields", new Google.Apis.Discovery.Parameter { Name = "fields", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "key", new Google.Apis.Discovery.Parameter { Name = "key", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "oauth_token", new Google.Apis.Discovery.Parameter { Name = "oauth_token", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "prettyPrint", new Google.Apis.Discovery.Parameter { Name = "prettyPrint", IsRequired = false, ParameterType = "query", DefaultValue = "true", Pattern = null, }); RequestParameters.Add( "quotaUser", new Google.Apis.Discovery.Parameter { Name = "quotaUser", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "uploadType", new Google.Apis.Discovery.Parameter { Name = "uploadType", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "upload_protocol", new Google.Apis.Discovery.Parameter { Name = "upload_protocol", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); } } /// <summary>The "operations" collection of methods.</summary> public class OperationsResource { private const string Resource = "operations"; /// <summary>The service which this resource belongs to.</summary> private readonly Google.Apis.Services.IClientService service; /// <summary>Constructs a new resource.</summary> public OperationsResource(Google.Apis.Services.IClientService service) { this.service = service; } /// <summary>Gets the latest state of a long-running operation. Clients can use this method to poll the /// operation result at intervals as recommended by the API service.</summary> /// <param name="name">The name of the operation resource.</param> public virtual GetRequest Get(string name) { return new GetRequest(service, name); } /// <summary>Gets the latest state of a long-running operation. Clients can use this method to poll the /// operation result at intervals as recommended by the API service.</summary> public class GetRequest : SpeechBaseServiceRequest<Google.Apis.Speech.v1p1beta1.Data.Operation> { /// <summary>Constructs a new Get request.</summary> public GetRequest(Google.Apis.Services.IClientService service, string name) : base(service) { Name = name; InitParameters(); } /// <summary>The name of the operation resource.</summary> [Google.Apis.Util.RequestParameterAttribute("name", Google.Apis.Util.RequestParameterType.Path)] public virtual string Name { get; private set; } ///<summary>Gets the method name.</summary> public override string MethodName { get { return "get"; } } ///<summary>Gets the HTTP method.</summary> public override string HttpMethod { get { return "GET"; } } ///<summary>Gets the REST path.</summary> public override string RestPath { get { return "v1p1beta1/operations/{+name}"; } } /// <summary>Initializes Get parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add( "name", new Google.Apis.Discovery.Parameter { Name = "name", IsRequired = true, ParameterType = "path", DefaultValue = null, Pattern = @"^.+$", }); } } /// <summary>Lists operations that match the specified filter in the request. If the server doesn't support this /// method, it returns `UNIMPLEMENTED`. /// /// NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, /// such as `users/operations`. To override the binding, API services can add a binding such as /// `"/v1/{name=users}/operations"` to their service configuration. For backwards compatibility, the default /// name includes the operations collection id, however overriding users must ensure the name binding is the /// parent resource, without the operations collection id.</summary> public virtual ListRequest List() { return new ListRequest(service); } /// <summary>Lists operations that match the specified filter in the request. If the server doesn't support this /// method, it returns `UNIMPLEMENTED`. /// /// NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, /// such as `users/operations`. To override the binding, API services can add a binding such as /// `"/v1/{name=users}/operations"` to their service configuration. For backwards compatibility, the default /// name includes the operations collection id, however overriding users must ensure the name binding is the /// parent resource, without the operations collection id.</summary> public class ListRequest : SpeechBaseServiceRequest<Google.Apis.Speech.v1p1beta1.Data.ListOperationsResponse> { /// <summary>Constructs a new List request.</summary> public ListRequest(Google.Apis.Services.IClientService service) : base(service) { InitParameters(); } /// <summary>The name of the operation's parent resource.</summary> [Google.Apis.Util.RequestParameterAttribute("name", Google.Apis.Util.RequestParameterType.Query)] public virtual string Name { get; set; } /// <summary>The standard list page token.</summary> [Google.Apis.Util.RequestParameterAttribute("pageToken", Google.Apis.Util.RequestParameterType.Query)] public virtual string PageToken { get; set; } /// <summary>The standard list page size.</summary> [Google.Apis.Util.RequestParameterAttribute("pageSize", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<int> PageSize { get; set; } /// <summary>The standard list filter.</summary> [Google.Apis.Util.RequestParameterAttribute("filter", Google.Apis.Util.RequestParameterType.Query)] public virtual string Filter { get; set; } ///<summary>Gets the method name.</summary> public override string MethodName { get { return "list"; } } ///<summary>Gets the HTTP method.</summary> public override string HttpMethod { get { return "GET"; } } ///<summary>Gets the REST path.</summary> public override string RestPath { get { return "v1p1beta1/operations"; } } /// <summary>Initializes List parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add( "name", new Google.Apis.Discovery.Parameter { Name = "name", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "pageToken", new Google.Apis.Discovery.Parameter { Name = "pageToken", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "pageSize", new Google.Apis.Discovery.Parameter { Name = "pageSize", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "filter", new Google.Apis.Discovery.Parameter { Name = "filter", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); } } } /// <summary>The "projects" collection of methods.</summary> public class ProjectsResource { private const string Resource = "projects"; /// <summary>The service which this resource belongs to.</summary> private readonly Google.Apis.Services.IClientService service; /// <summary>Constructs a new resource.</summary> public ProjectsResource(Google.Apis.Services.IClientService service) { this.service = service; locations = new LocationsResource(service); operations = new OperationsResource(service); } private readonly LocationsResource locations; /// <summary>Gets the Locations resource.</summary> public virtual LocationsResource Locations { get { return locations; } } /// <summary>The "locations" collection of methods.</summary> public class LocationsResource { private const string Resource = "locations"; /// <summary>The service which this resource belongs to.</summary> private readonly Google.Apis.Services.IClientService service; /// <summary>Constructs a new resource.</summary> public LocationsResource(Google.Apis.Services.IClientService service) { this.service = service; operations = new OperationsResource(service); } private readonly OperationsResource operations; /// <summary>Gets the Operations resource.</summary> public virtual OperationsResource Operations { get { return operations; } } /// <summary>The "operations" collection of methods.</summary> public class OperationsResource { private const string Resource = "operations"; /// <summary>The service which this resource belongs to.</summary> private readonly Google.Apis.Services.IClientService service; /// <summary>Constructs a new resource.</summary> public OperationsResource(Google.Apis.Services.IClientService service) { this.service = service; } /// <summary>Gets the latest state of a long-running operation. Clients can use this method to poll the /// operation result at intervals as recommended by the API service.</summary> /// <param name="name">The name of the operation resource.</param> public virtual GetRequest Get(string name) { return new GetRequest(service, name); } /// <summary>Gets the latest state of a long-running operation. Clients can use this method to poll the /// operation result at intervals as recommended by the API service.</summary> public class GetRequest : SpeechBaseServiceRequest<Google.Apis.Speech.v1p1beta1.Data.Operation> { /// <summary>Constructs a new Get request.</summary> public GetRequest(Google.Apis.Services.IClientService service, string name) : base(service) { Name = name; InitParameters(); } /// <summary>The name of the operation resource.</summary> [Google.Apis.Util.RequestParameterAttribute("name", Google.Apis.Util.RequestParameterType.Path)] public virtual string Name { get; private set; } ///<summary>Gets the method name.</summary> public override string MethodName { get { return "get"; } } ///<summary>Gets the HTTP method.</summary> public override string HttpMethod { get { return "GET"; } } ///<summary>Gets the REST path.</summary> public override string RestPath { get { return "v1p1beta1/{+name}"; } } /// <summary>Initializes Get parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add( "name", new Google.Apis.Discovery.Parameter { Name = "name", IsRequired = true, ParameterType = "path", DefaultValue = null, Pattern = @"^projects/[^/]+/locations/[^/]+/operations/[^/]+$", }); } } /// <summary>Lists operations that match the specified filter in the request. If the server doesn't /// support this method, it returns `UNIMPLEMENTED`. /// /// NOTE: the `name` binding allows API services to override the binding to use different resource name /// schemes, such as `users/operations`. To override the binding, API services can add a binding such as /// `"/v1/{name=users}/operations"` to their service configuration. For backwards compatibility, the /// default name includes the operations collection id, however overriding users must ensure the name /// binding is the parent resource, without the operations collection id.</summary> /// <param name="name">The name of the operation's parent resource.</param> public virtual ListRequest List(string name) { return new ListRequest(service, name); } /// <summary>Lists operations that match the specified filter in the request. If the server doesn't /// support this method, it returns `UNIMPLEMENTED`. /// /// NOTE: the `name` binding allows API services to override the binding to use different resource name /// schemes, such as `users/operations`. To override the binding, API services can add a binding such as /// `"/v1/{name=users}/operations"` to their service configuration. For backwards compatibility, the /// default name includes the operations collection id, however overriding users must ensure the name /// binding is the parent resource, without the operations collection id.</summary> public class ListRequest : SpeechBaseServiceRequest<Google.Apis.Speech.v1p1beta1.Data.ListOperationsResponse> { /// <summary>Constructs a new List request.</summary> public ListRequest(Google.Apis.Services.IClientService service, string name) : base(service) { Name = name; InitParameters(); } /// <summary>The name of the operation's parent resource.</summary> [Google.Apis.Util.RequestParameterAttribute("name", Google.Apis.Util.RequestParameterType.Path)] public virtual string Name { get; private set; } /// <summary>The standard list page token.</summary> [Google.Apis.Util.RequestParameterAttribute("pageToken", Google.Apis.Util.RequestParameterType.Query)] public virtual string PageToken { get; set; } /// <summary>The standard list page size.</summary> [Google.Apis.Util.RequestParameterAttribute("pageSize", Google.Apis.Util.RequestParameterType.Query)] public virtual System.Nullable<int> PageSize { get; set; } /// <summary>The standard list filter.</summary> [Google.Apis.Util.RequestParameterAttribute("filter", Google.Apis.Util.RequestParameterType.Query)] public virtual string Filter { get; set; } ///<summary>Gets the method name.</summary> public override string MethodName { get { return "list"; } } ///<summary>Gets the HTTP method.</summary> public override string HttpMethod { get { return "GET"; } } ///<summary>Gets the REST path.</summary> public override string RestPath { get { return "v1p1beta1/{+name}/operations"; } } /// <summary>Initializes List parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add( "name", new Google.Apis.Discovery.Parameter { Name = "name", IsRequired = true, ParameterType = "path", DefaultValue = null, Pattern = @"^projects/[^/]+/locations/[^/]+$", }); RequestParameters.Add( "pageToken", new Google.Apis.Discovery.Parameter { Name = "pageToken", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "pageSize", new Google.Apis.Discovery.Parameter { Name = "pageSize", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); RequestParameters.Add( "filter", new Google.Apis.Discovery.Parameter { Name = "filter", IsRequired = false, ParameterType = "query", DefaultValue = null, Pattern = null, }); } } } } private readonly OperationsResource operations; /// <summary>Gets the Operations resource.</summary> public virtual OperationsResource Operations { get { return operations; } } /// <summary>The "operations" collection of methods.</summary> public class OperationsResource { private const string Resource = "operations"; /// <summary>The service which this resource belongs to.</summary> private readonly Google.Apis.Services.IClientService service; /// <summary>Constructs a new resource.</summary> public OperationsResource(Google.Apis.Services.IClientService service) { this.service = service; manualRecognitionTasks = new ManualRecognitionTasksResource(service); } private readonly ManualRecognitionTasksResource manualRecognitionTasks; /// <summary>Gets the ManualRecognitionTasks resource.</summary> public virtual ManualRecognitionTasksResource ManualRecognitionTasks { get { return manualRecognitionTasks; } } /// <summary>The "manualRecognitionTasks" collection of methods.</summary> public class ManualRecognitionTasksResource { private const string Resource = "manualRecognitionTasks"; /// <summary>The service which this resource belongs to.</summary> private readonly Google.Apis.Services.IClientService service; /// <summary>Constructs a new resource.</summary> public ManualRecognitionTasksResource(Google.Apis.Services.IClientService service) { this.service = service; } /// <summary>Gets the latest state of a long-running operation. Clients can use this method to poll the /// operation result at intervals as recommended by the API service.</summary> /// <param name="name">The name of the operation resource.</param> public virtual GetRequest Get(string name) { return new GetRequest(service, name); } /// <summary>Gets the latest state of a long-running operation. Clients can use this method to poll the /// operation result at intervals as recommended by the API service.</summary> public class GetRequest : SpeechBaseServiceRequest<Google.Apis.Speech.v1p1beta1.Data.Operation> { /// <summary>Constructs a new Get request.</summary> public GetRequest(Google.Apis.Services.IClientService service, string name) : base(service) { Name = name; InitParameters(); } /// <summary>The name of the operation resource.</summary> [Google.Apis.Util.RequestParameterAttribute("name", Google.Apis.Util.RequestParameterType.Path)] public virtual string Name { get; private set; } ///<summary>Gets the method name.</summary> public override string MethodName { get { return "get"; } } ///<summary>Gets the HTTP method.</summary> public override string HttpMethod { get { return "GET"; } } ///<summary>Gets the REST path.</summary> public override string RestPath { get { return "v1p1beta1/{+name}"; } } /// <summary>Initializes Get parameter list.</summary> protected override void InitParameters() { base.InitParameters(); RequestParameters.Add( "name", new Google.Apis.Discovery.Parameter { Name = "name", IsRequired = true, ParameterType = "path", DefaultValue = null, Pattern = @"^projects/[^/]+/operations/manualRecognitionTasks/[^/]+$", }); } } } } } /// <summary>The "speech" collection of methods.</summary> public class SpeechResource { private const string Resource = "speech"; /// <summary>The service which this resource belongs to.</summary> private readonly Google.Apis.Services.IClientService service; /// <summary>Constructs a new resource.</summary> public SpeechResource(Google.Apis.Services.IClientService service) { this.service = service; } /// <summary>Performs asynchronous speech recognition: receive results via the google.longrunning.Operations /// interface. Returns either an `Operation.error` or an `Operation.response` which contains a /// `LongRunningRecognizeResponse` message.</summary> /// <param name="body">The body of the request.</param> public virtual LongrunningrecognizeRequest Longrunningrecognize(Google.Apis.Speech.v1p1beta1.Data.LongRunningRecognizeRequest body) { return new LongrunningrecognizeRequest(service, body); } /// <summary>Performs asynchronous speech recognition: receive results via the google.longrunning.Operations /// interface. Returns either an `Operation.error` or an `Operation.response` which contains a /// `LongRunningRecognizeResponse` message.</summary> public class LongrunningrecognizeRequest : SpeechBaseServiceRequest<Google.Apis.Speech.v1p1beta1.Data.Operation> { /// <summary>Constructs a new Longrunningrecognize request.</summary> public LongrunningrecognizeRequest(Google.Apis.Services.IClientService service, Google.Apis.Speech.v1p1beta1.Data.LongRunningRecognizeRequest body) : base(service) { Body = body; InitParameters(); } /// <summary>Gets or sets the body of this request.</summary> Google.Apis.Speech.v1p1beta1.Data.LongRunningRecognizeRequest Body { get; set; } ///<summary>Returns the body of the request.</summary> protected override object GetBody() { return Body; } ///<summary>Gets the method name.</summary> public override string MethodName { get { return "longrunningrecognize"; } } ///<summary>Gets the HTTP method.</summary> public override string HttpMethod { get { return "POST"; } } ///<summary>Gets the REST path.</summary> public override string RestPath { get { return "v1p1beta1/speech:longrunningrecognize"; } } /// <summary>Initializes Longrunningrecognize parameter list.</summary> protected override void InitParameters() { base.InitParameters(); } } /// <summary>Performs synchronous speech recognition: receive results after all audio has been sent and /// processed.</summary> /// <param name="body">The body of the request.</param> public virtual RecognizeRequest Recognize(Google.Apis.Speech.v1p1beta1.Data.RecognizeRequest body) { return new RecognizeRequest(service, body); } /// <summary>Performs synchronous speech recognition: receive results after all audio has been sent and /// processed.</summary> public class RecognizeRequest : SpeechBaseServiceRequest<Google.Apis.Speech.v1p1beta1.Data.RecognizeResponse> { /// <summary>Constructs a new Recognize request.</summary> public RecognizeRequest(Google.Apis.Services.IClientService service, Google.Apis.Speech.v1p1beta1.Data.RecognizeRequest body) : base(service) { Body = body; InitParameters(); } /// <summary>Gets or sets the body of this request.</summary> Google.Apis.Speech.v1p1beta1.Data.RecognizeRequest Body { get; set; } ///<summary>Returns the body of the request.</summary> protected override object GetBody() { return Body; } ///<summary>Gets the method name.</summary> public override string MethodName { get { return "recognize"; } } ///<summary>Gets the HTTP method.</summary> public override string HttpMethod { get { return "POST"; } } ///<summary>Gets the REST path.</summary> public override string RestPath { get { return "v1p1beta1/speech:recognize"; } } /// <summary>Initializes Recognize parameter list.</summary> protected override void InitParameters() { base.InitParameters(); } } } } namespace Google.Apis.Speech.v1p1beta1.Data { /// <summary>The response message for Operations.ListOperations.</summary> public class ListOperationsResponse : Google.Apis.Requests.IDirectResponseSchema { /// <summary>The standard List next-page token.</summary> [Newtonsoft.Json.JsonPropertyAttribute("nextPageToken")] public virtual string NextPageToken { get; set; } /// <summary>A list of operations that matches the specified filter in the request.</summary> [Newtonsoft.Json.JsonPropertyAttribute("operations")] public virtual System.Collections.Generic.IList<Operation> Operations { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>Describes the progress of a long-running `LongRunningRecognize` call. It is included in the `metadata` /// field of the `Operation` returned by the `GetOperation` call of the `google::longrunning::Operations` /// service.</summary> public class LongRunningRecognizeMetadata : Google.Apis.Requests.IDirectResponseSchema { /// <summary>Time of the most recent processing update.</summary> [Newtonsoft.Json.JsonPropertyAttribute("lastUpdateTime")] public virtual object LastUpdateTime { get; set; } /// <summary>Approximate percentage of audio processed thus far. Guaranteed to be 100 when the audio is fully /// processed and the results are available.</summary> [Newtonsoft.Json.JsonPropertyAttribute("progressPercent")] public virtual System.Nullable<int> ProgressPercent { get; set; } /// <summary>Time when the request was received.</summary> [Newtonsoft.Json.JsonPropertyAttribute("startTime")] public virtual object StartTime { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>The top-level message sent by the client for the `LongRunningRecognize` method.</summary> public class LongRunningRecognizeRequest : Google.Apis.Requests.IDirectResponseSchema { /// <summary>*Required* The audio data to be recognized.</summary> [Newtonsoft.Json.JsonPropertyAttribute("audio")] public virtual RecognitionAudio Audio { get; set; } /// <summary>*Required* Provides information to the recognizer that specifies how to process the /// request.</summary> [Newtonsoft.Json.JsonPropertyAttribute("config")] public virtual RecognitionConfig Config { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>The only message returned to the client by the `LongRunningRecognize` method. It contains the result as /// zero or more sequential `SpeechRecognitionResult` messages. It is included in the `result.response` field of the /// `Operation` returned by the `GetOperation` call of the `google::longrunning::Operations` service.</summary> public class LongRunningRecognizeResponse : Google.Apis.Requests.IDirectResponseSchema { /// <summary>Output only. Sequential list of transcription results corresponding to sequential portions of /// audio.</summary> [Newtonsoft.Json.JsonPropertyAttribute("results")] public virtual System.Collections.Generic.IList<SpeechRecognitionResult> Results { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>This resource represents a long-running operation that is the result of a network API call.</summary> public class Operation : Google.Apis.Requests.IDirectResponseSchema { /// <summary>If the value is `false`, it means the operation is still in progress. If `true`, the operation is /// completed, and either `error` or `response` is available.</summary> [Newtonsoft.Json.JsonPropertyAttribute("done")] public virtual System.Nullable<bool> Done { get; set; } /// <summary>The error result of the operation in case of failure or cancellation.</summary> [Newtonsoft.Json.JsonPropertyAttribute("error")] public virtual Status Error { get; set; } /// <summary>Service-specific metadata associated with the operation. It typically contains progress /// information and common metadata such as create time. Some services might not provide such metadata. Any /// method that returns a long-running operation should document the metadata type, if any.</summary> [Newtonsoft.Json.JsonPropertyAttribute("metadata")] public virtual System.Collections.Generic.IDictionary<string,object> Metadata { get; set; } /// <summary>The server-assigned name, which is only unique within the same service that originally returns it. /// If you use the default HTTP mapping, the `name` should have the format of /// `operations/some/unique/name`.</summary> [Newtonsoft.Json.JsonPropertyAttribute("name")] public virtual string Name { get; set; } /// <summary>The normal response of the operation in case of success. If the original method returns no data on /// success, such as `Delete`, the response is `google.protobuf.Empty`. If the original method is standard /// `Get`/`Create`/`Update`, the response should be the resource. For other methods, the response should have /// the type `XxxResponse`, where `Xxx` is the original method name. For example, if the original method name /// is `TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.</summary> [Newtonsoft.Json.JsonPropertyAttribute("response")] public virtual System.Collections.Generic.IDictionary<string,object> Response { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>Contains audio data in the encoding specified in the `RecognitionConfig`. Either `content` or `uri` /// must be supplied. Supplying both or neither returns google.rpc.Code.INVALID_ARGUMENT. See [content limits /// ](/speech-to-text/quotas#content).</summary> public class RecognitionAudio : Google.Apis.Requests.IDirectResponseSchema { /// <summary>The audio data bytes encoded as specified in `RecognitionConfig`. Note: as with all bytes fields, /// protobuffers use a pure binary representation, whereas JSON representations use base64.</summary> [Newtonsoft.Json.JsonPropertyAttribute("content")] public virtual string Content { get; set; } /// <summary>URI that points to a file that contains audio data bytes as specified in `RecognitionConfig`. The /// file must not be compressed (for example, gzip). Currently, only Google Cloud Storage URIs are supported, /// which must be specified in the following format: `gs://bucket_name/object_name` (other URI formats return /// google.rpc.Code.INVALID_ARGUMENT). For more information, see [Request /// URIs](https://cloud.google.com/storage/docs/reference-uris).</summary> [Newtonsoft.Json.JsonPropertyAttribute("uri")] public virtual string Uri { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>Provides information to the recognizer that specifies how to process the request.</summary> public class RecognitionConfig : Google.Apis.Requests.IDirectResponseSchema { /// <summary>*Optional* A list of up to 3 additional [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) /// language tags, listing possible alternative languages of the supplied audio. See [Language Support](/speech- /// to-text/docs/languages) for a list of the currently supported language codes. If alternative languages are /// listed, recognition result will contain recognition in the most likely language detected including the main /// language_code. The recognition result will include the language tag of the language detected in the audio. /// Note: This feature is only supported for Voice Command and Voice Search use cases and performance may vary /// for other use cases (e.g., phone call transcription).</summary> [Newtonsoft.Json.JsonPropertyAttribute("alternativeLanguageCodes")] public virtual System.Collections.Generic.IList<string> AlternativeLanguageCodes { get; set; } /// <summary>*Optional* The number of channels in the input audio data. ONLY set this for MULTI-CHANNEL /// recognition. Valid values for LINEAR16 and FLAC are `1`-`8`. Valid values for OGG_OPUS are '1'-'254'. Valid /// value for MULAW, AMR, AMR_WB and SPEEX_WITH_HEADER_BYTE is only `1`. If `0` or omitted, defaults to one /// channel (mono). Note: We only recognize the first channel by default. To perform independent recognition on /// each channel set `enable_separate_recognition_per_channel` to 'true'.</summary> [Newtonsoft.Json.JsonPropertyAttribute("audioChannelCount")] public virtual System.Nullable<int> AudioChannelCount { get; set; } /// <summary>*Optional* Config to enable speaker diarization and set additional parameters to make diarization /// better suited for your application. Note: When this is enabled, we send all the words from the beginning of /// the audio for the top alternative in every consecutive STREAMING responses. This is done in order to improve /// our speaker tags as our models learn to identify the speakers in the conversation over time. For non- /// streaming requests, the diarization results will be provided only in the top alternative of the FINAL /// SpeechRecognitionResult.</summary> [Newtonsoft.Json.JsonPropertyAttribute("diarizationConfig")] public virtual SpeakerDiarizationConfig DiarizationConfig { get; set; } /// <summary>*Optional* If set, specifies the estimated number of speakers in the conversation. If not set, /// defaults to '2'. Ignored unless enable_speaker_diarization is set to true." Note: Use diarization_config /// instead. This field will be DEPRECATED soon.</summary> [Newtonsoft.Json.JsonPropertyAttribute("diarizationSpeakerCount")] public virtual System.Nullable<int> DiarizationSpeakerCount { get; set; } /// <summary>*Optional* If 'true', adds punctuation to recognition result hypotheses. This feature is only /// available in select languages. Setting this for requests in other languages has no effect at all. The /// default 'false' value does not add punctuation to result hypotheses. Note: This is currently offered as an /// experimental service, complimentary to all users. In the future this may be exclusively available as a /// premium feature.</summary> [Newtonsoft.Json.JsonPropertyAttribute("enableAutomaticPunctuation")] public virtual System.Nullable<bool> EnableAutomaticPunctuation { get; set; } /// <summary>This needs to be set to `true` explicitly and `audio_channel_count` > 1 to get each channel /// recognized separately. The recognition result will contain a `channel_tag` field to state which channel that /// result belongs to. If this is not true, we will only recognize the first channel. The request is billed /// cumulatively for all channels recognized: `audio_channel_count` multiplied by the length of the /// audio.</summary> [Newtonsoft.Json.JsonPropertyAttribute("enableSeparateRecognitionPerChannel")] public virtual System.Nullable<bool> EnableSeparateRecognitionPerChannel { get; set; } /// <summary>*Optional* If 'true', enables speaker detection for each recognized word in the top alternative of /// the recognition result using a speaker_tag provided in the WordInfo. Note: Use diarization_config instead. /// This field will be DEPRECATED soon.</summary> [Newtonsoft.Json.JsonPropertyAttribute("enableSpeakerDiarization")] public virtual System.Nullable<bool> EnableSpeakerDiarization { get; set; } /// <summary>*Optional* If `true`, the top result includes a list of words and the confidence for those words. /// If `false`, no word-level confidence information is returned. The default is `false`.</summary> [Newtonsoft.Json.JsonPropertyAttribute("enableWordConfidence")] public virtual System.Nullable<bool> EnableWordConfidence { get; set; } /// <summary>*Optional* If `true`, the top result includes a list of words and the start and end time offsets /// (timestamps) for those words. If `false`, no word-level time offset information is returned. The default is /// `false`.</summary> [Newtonsoft.Json.JsonPropertyAttribute("enableWordTimeOffsets")] public virtual System.Nullable<bool> EnableWordTimeOffsets { get; set; } /// <summary>Encoding of audio data sent in all `RecognitionAudio` messages. This field is optional for `FLAC` /// and `WAV` audio files and required for all other audio formats. For details, see AudioEncoding.</summary> [Newtonsoft.Json.JsonPropertyAttribute("encoding")] public virtual string Encoding { get; set; } /// <summary>*Required* The language of the supplied audio as a [BCP-47](https://www.rfc- /// editor.org/rfc/bcp/bcp47.txt) language tag. Example: "en-US". See [Language Support](/speech-to- /// text/docs/languages) for a list of the currently supported language codes.</summary> [Newtonsoft.Json.JsonPropertyAttribute("languageCode")] public virtual string LanguageCode { get; set; } /// <summary>*Optional* Maximum number of recognition hypotheses to be returned. Specifically, the maximum /// number of `SpeechRecognitionAlternative` messages within each `SpeechRecognitionResult`. The server may /// return fewer than `max_alternatives`. Valid values are `0`-`30`. A value of `0` or `1` will return a maximum /// of one. If omitted, will return a maximum of one.</summary> [Newtonsoft.Json.JsonPropertyAttribute("maxAlternatives")] public virtual System.Nullable<int> MaxAlternatives { get; set; } /// <summary>*Optional* Metadata regarding this request.</summary> [Newtonsoft.Json.JsonPropertyAttribute("metadata")] public virtual RecognitionMetadata Metadata { get; set; } /// <summary>*Optional* Which model to select for the given request. Select the model best suited to your domain /// to get best results. If a model is not explicitly specified, then we auto-select a model based on the /// parameters in the RecognitionConfig. /// /// Model Description /// /// command_and_search Best for short queries such as voice commands or voice search. /// /// phone_call Best for audio that originated from a phone call (typically recorded at an 8khz sampling rate). /// /// video Best for audio that originated from from video or includes multiple speakers. Ideally the audio is /// recorded at a 16khz or greater sampling rate. This is a premium model that costs more than the standard /// rate. /// /// default Best for audio that is not one of the specific audio models. For example, long-form audio. Ideally /// the audio is high-fidelity, recorded at a 16khz or greater sampling rate. /// /// </summary> [Newtonsoft.Json.JsonPropertyAttribute("model")] public virtual string Model { get; set; } /// <summary>*Optional* If set to `true`, the server will attempt to filter out profanities, replacing all but /// the initial character in each filtered word with asterisks, e.g. "f***". If set to `false` or omitted, /// profanities won't be filtered out.</summary> [Newtonsoft.Json.JsonPropertyAttribute("profanityFilter")] public virtual System.Nullable<bool> ProfanityFilter { get; set; } /// <summary>Sample rate in Hertz of the audio data sent in all `RecognitionAudio` messages. Valid values are: /// 8000-48000. 16000 is optimal. For best results, set the sampling rate of the audio source to 16000 Hz. If /// that's not possible, use the native sample rate of the audio source (instead of re-sampling). This field is /// optional for `FLAC`, `WAV`. and 'MP3' audio files, and is required for all other audio formats. For /// details, see AudioEncoding.</summary> [Newtonsoft.Json.JsonPropertyAttribute("sampleRateHertz")] public virtual System.Nullable<int> SampleRateHertz { get; set; } /// <summary>*Optional* array of SpeechContext. A means to provide context to assist the speech recognition. For /// more information, see [Phrase Hints](/speech-to-text/docs/basics#phrase-hints).</summary> [Newtonsoft.Json.JsonPropertyAttribute("speechContexts")] public virtual System.Collections.Generic.IList<SpeechContext> SpeechContexts { get; set; } /// <summary>*Optional* Set to true to use an enhanced model for speech recognition. If `use_enhanced` is set to /// true and the `model` field is not set, then an appropriate enhanced model is chosen if: 1. project is /// eligible for requesting enhanced models 2. an enhanced model exists for the audio /// /// If `use_enhanced` is true and an enhanced version of the specified model does not exist, then the speech is /// recognized using the standard version of the specified model. /// /// Enhanced speech models require that you opt-in to data logging using instructions in the [documentation /// ](/speech-to-text/docs/enable-data-logging). If you set `use_enhanced` to true and you have not enabled /// audio logging, then you will receive an error.</summary> [Newtonsoft.Json.JsonPropertyAttribute("useEnhanced")] public virtual System.Nullable<bool> UseEnhanced { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>Description of audio data to be recognized.</summary> public class RecognitionMetadata : Google.Apis.Requests.IDirectResponseSchema { /// <summary>Description of the content. Eg. "Recordings of federal supreme court hearings from 2012".</summary> [Newtonsoft.Json.JsonPropertyAttribute("audioTopic")] public virtual string AudioTopic { get; set; } /// <summary>The industry vertical to which this speech recognition request most closely applies. This is most /// indicative of the topics contained in the audio. Use the 6-digit NAICS code to identify the industry /// vertical - see https://www.naics.com/search/.</summary> [Newtonsoft.Json.JsonPropertyAttribute("industryNaicsCodeOfAudio")] public virtual System.Nullable<long> IndustryNaicsCodeOfAudio { get; set; } /// <summary>The use case most closely describing the audio content to be recognized.</summary> [Newtonsoft.Json.JsonPropertyAttribute("interactionType")] public virtual string InteractionType { get; set; } /// <summary>The audio type that most closely describes the audio being recognized.</summary> [Newtonsoft.Json.JsonPropertyAttribute("microphoneDistance")] public virtual string MicrophoneDistance { get; set; } /// <summary>Obfuscated (privacy-protected) ID of the user, to identify number of unique users using the /// service.</summary> [Newtonsoft.Json.JsonPropertyAttribute("obfuscatedId")] public virtual System.Nullable<long> ObfuscatedId { get; set; } /// <summary>The original media the speech was recorded on.</summary> [Newtonsoft.Json.JsonPropertyAttribute("originalMediaType")] public virtual string OriginalMediaType { get; set; } /// <summary>Mime type of the original audio file. For example `audio/m4a`, `audio/x-alaw-basic`, `audio/mp3`, /// `audio/3gpp`. A list of possible audio mime types is maintained at http://www.iana.org/assignments/media- /// types/media-types.xhtml#audio</summary> [Newtonsoft.Json.JsonPropertyAttribute("originalMimeType")] public virtual string OriginalMimeType { get; set; } /// <summary>The device used to make the recording. Examples 'Nexus 5X' or 'Polycom SoundStation IP 6000' or /// 'POTS' or 'VoIP' or 'Cardioid Microphone'.</summary> [Newtonsoft.Json.JsonPropertyAttribute("recordingDeviceName")] public virtual string RecordingDeviceName { get; set; } /// <summary>The type of device the speech was recorded with.</summary> [Newtonsoft.Json.JsonPropertyAttribute("recordingDeviceType")] public virtual string RecordingDeviceType { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>The top-level message sent by the client for the `Recognize` method.</summary> public class RecognizeRequest : Google.Apis.Requests.IDirectResponseSchema { /// <summary>*Required* The audio data to be recognized.</summary> [Newtonsoft.Json.JsonPropertyAttribute("audio")] public virtual RecognitionAudio Audio { get; set; } /// <summary>*Required* Provides information to the recognizer that specifies how to process the /// request.</summary> [Newtonsoft.Json.JsonPropertyAttribute("config")] public virtual RecognitionConfig Config { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>The only message returned to the client by the `Recognize` method. It contains the result as zero or /// more sequential `SpeechRecognitionResult` messages.</summary> public class RecognizeResponse : Google.Apis.Requests.IDirectResponseSchema { /// <summary>Output only. Sequential list of transcription results corresponding to sequential portions of /// audio.</summary> [Newtonsoft.Json.JsonPropertyAttribute("results")] public virtual System.Collections.Generic.IList<SpeechRecognitionResult> Results { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } public class SpeakerDiarizationConfig : Google.Apis.Requests.IDirectResponseSchema { /// <summary>*Optional* If 'true', enables speaker detection for each recognized word in the top alternative of /// the recognition result using a speaker_tag provided in the WordInfo.</summary> [Newtonsoft.Json.JsonPropertyAttribute("enableSpeakerDiarization")] public virtual System.Nullable<bool> EnableSpeakerDiarization { get; set; } /// <summary>*Optional* Only used if diarization_speaker_count is not set. Maximum number of speakers in the /// conversation. This range gives you more flexibility by allowing the system to automatically determine the /// correct number of speakers. If not set, the default value is 6.</summary> [Newtonsoft.Json.JsonPropertyAttribute("maxSpeakerCount")] public virtual System.Nullable<int> MaxSpeakerCount { get; set; } /// <summary>*Optional* Only used if diarization_speaker_count is not set. Minimum number of speakers in the /// conversation. This range gives you more flexibility by allowing the system to automatically determine the /// correct number of speakers. If not set, the default value is 2.</summary> [Newtonsoft.Json.JsonPropertyAttribute("minSpeakerCount")] public virtual System.Nullable<int> MinSpeakerCount { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>Provides "hints" to the speech recognizer to favor specific words and phrases in the results.</summary> public class SpeechContext : Google.Apis.Requests.IDirectResponseSchema { /// <summary>*Optional* A list of strings containing words and phrases "hints" so that the speech recognition is /// more likely to recognize them. This can be used to improve the accuracy for specific words and phrases, for /// example, if specific commands are typically spoken by the user. This can also be used to add additional /// words to the vocabulary of the recognizer. See [usage limits](/speech-to-text/quotas#content).</summary> [Newtonsoft.Json.JsonPropertyAttribute("phrases")] public virtual System.Collections.Generic.IList<string> Phrases { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>Alternative hypotheses (a.k.a. n-best list).</summary> public class SpeechRecognitionAlternative : Google.Apis.Requests.IDirectResponseSchema { /// <summary>Output only. The confidence estimate between 0.0 and 1.0. A higher number indicates an estimated /// greater likelihood that the recognized words are correct. This field is set only for the top alternative of /// a non-streaming result or, of a streaming result where `is_final=true`. This field is not guaranteed to be /// accurate and users should not rely on it to be always provided. The default of 0.0 is a sentinel value /// indicating `confidence` was not set.</summary> [Newtonsoft.Json.JsonPropertyAttribute("confidence")] public virtual System.Nullable<float> Confidence { get; set; } /// <summary>Output only. Transcript text representing the words that the user spoke.</summary> [Newtonsoft.Json.JsonPropertyAttribute("transcript")] public virtual string Transcript { get; set; } /// <summary>Output only. A list of word-specific information for each recognized word. Note: When /// `enable_speaker_diarization` is true, you will see all the words from the beginning of the audio.</summary> [Newtonsoft.Json.JsonPropertyAttribute("words")] public virtual System.Collections.Generic.IList<WordInfo> Words { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>A speech recognition result corresponding to a portion of the audio.</summary> public class SpeechRecognitionResult : Google.Apis.Requests.IDirectResponseSchema { /// <summary>Output only. May contain one or more recognition hypotheses (up to the maximum specified in /// `max_alternatives`). These alternatives are ordered in terms of accuracy, with the top (first) alternative /// being the most probable, as ranked by the recognizer.</summary> [Newtonsoft.Json.JsonPropertyAttribute("alternatives")] public virtual System.Collections.Generic.IList<SpeechRecognitionAlternative> Alternatives { get; set; } /// <summary>For multi-channel audio, this is the channel number corresponding to the recognized result for the /// audio from that channel. For audio_channel_count = N, its output values can range from '1' to 'N'.</summary> [Newtonsoft.Json.JsonPropertyAttribute("channelTag")] public virtual System.Nullable<int> ChannelTag { get; set; } /// <summary>Output only. The [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of the /// language in this result. This language code was detected to have the most likelihood of being spoken in the /// audio.</summary> [Newtonsoft.Json.JsonPropertyAttribute("languageCode")] public virtual string LanguageCode { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>The `Status` type defines a logical error model that is suitable for different programming /// environments, including REST APIs and RPC APIs. It is used by [gRPC](https://github.com/grpc). The error model /// is designed to be: /// /// - Simple to use and understand for most users - Flexible enough to meet unexpected needs /// /// # Overview /// /// The `Status` message contains three pieces of data: error code, error message, and error details. The error code /// should be an enum value of google.rpc.Code, but it may accept additional error codes if needed. The error /// message should be a developer-facing English message that helps developers *understand* and *resolve* the error. /// If a localized user-facing error message is needed, put the localized message in the error details or localize /// it in the client. The optional error details may contain arbitrary information about the error. There is a /// predefined set of error detail types in the package `google.rpc` that can be used for common error conditions. /// /// # Language mapping /// /// The `Status` message is the logical representation of the error model, but it is not necessarily the actual wire /// format. When the `Status` message is exposed in different client libraries and different wire protocols, it can /// be mapped differently. For example, it will likely be mapped to some exceptions in Java, but more likely mapped /// to some error codes in C. /// /// # Other uses /// /// The error model and the `Status` message can be used in a variety of environments, either with or without APIs, /// to provide a consistent developer experience across different environments. /// /// Example uses of this error model include: /// /// - Partial errors. If a service needs to return partial errors to the client, it may embed the `Status` in the /// normal response to indicate the partial errors. /// /// - Workflow errors. A typical workflow has multiple steps. Each step may have a `Status` message for error /// reporting. /// /// - Batch operations. If a client uses batch request and batch response, the `Status` message should be used /// directly inside batch response, one for each error sub-response. /// /// - Asynchronous operations. If an API call embeds asynchronous operation results in its response, the status of /// those operations should be represented directly using the `Status` message. /// /// - Logging. If some API errors are stored in logs, the message `Status` could be used directly after any /// stripping needed for security/privacy reasons.</summary> public class Status : Google.Apis.Requests.IDirectResponseSchema { /// <summary>The status code, which should be an enum value of google.rpc.Code.</summary> [Newtonsoft.Json.JsonPropertyAttribute("code")] public virtual System.Nullable<int> Code { get; set; } /// <summary>A list of messages that carry the error details. There is a common set of message types for APIs /// to use.</summary> [Newtonsoft.Json.JsonPropertyAttribute("details")] public virtual System.Collections.Generic.IList<System.Collections.Generic.IDictionary<string,object>> Details { get; set; } /// <summary>A developer-facing error message, which should be in English. Any user-facing error message should /// be localized and sent in the google.rpc.Status.details field, or localized by the client.</summary> [Newtonsoft.Json.JsonPropertyAttribute("message")] public virtual string Message { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } /// <summary>Word-specific information for recognized words.</summary> public class WordInfo : Google.Apis.Requests.IDirectResponseSchema { /// <summary>Output only. The confidence estimate between 0.0 and 1.0. A higher number indicates an estimated /// greater likelihood that the recognized words are correct. This field is set only for the top alternative of /// a non-streaming result or, of a streaming result where `is_final=true`. This field is not guaranteed to be /// accurate and users should not rely on it to be always provided. The default of 0.0 is a sentinel value /// indicating `confidence` was not set.</summary> [Newtonsoft.Json.JsonPropertyAttribute("confidence")] public virtual System.Nullable<float> Confidence { get; set; } /// <summary>Output only. Time offset relative to the beginning of the audio, and corresponding to the end of /// the spoken word. This field is only set if `enable_word_time_offsets=true` and only in the top hypothesis. /// This is an experimental feature and the accuracy of the time offset can vary.</summary> [Newtonsoft.Json.JsonPropertyAttribute("endTime")] public virtual object EndTime { get; set; } /// <summary>Output only. A distinct integer value is assigned for every speaker within the audio. This field /// specifies which one of those speakers was detected to have spoken this word. Value ranges from '1' to /// diarization_speaker_count. speaker_tag is set if enable_speaker_diarization = 'true' and only in the top /// alternative.</summary> [Newtonsoft.Json.JsonPropertyAttribute("speakerTag")] public virtual System.Nullable<int> SpeakerTag { get; set; } /// <summary>Output only. Time offset relative to the beginning of the audio, and corresponding to the start of /// the spoken word. This field is only set if `enable_word_time_offsets=true` and only in the top hypothesis. /// This is an experimental feature and the accuracy of the time offset can vary.</summary> [Newtonsoft.Json.JsonPropertyAttribute("startTime")] public virtual object StartTime { get; set; } /// <summary>Output only. The word corresponding to this set of information.</summary> [Newtonsoft.Json.JsonPropertyAttribute("word")] public virtual string Word { get; set; } /// <summary>The ETag of the item.</summary> public virtual string ETag { get; set; } } }
Duikmeester/google-api-dotnet-client
Src/Generated/Google.Apis.Speech.v1p1beta1/Google.Apis.Speech.v1p1beta1.cs
C#
apache-2.0
77,359
/** * Created by mdylag on 17/03/15. */ function callCallback(callback) { console.log("Function callCallback"); if (callback && typeof callback === "function") { callback(); } } function myCallBack() { console.log("Function myCallBack"); } writeCode(myCallBack);
ms-courses/JavaScriptAdvance
functions/callback.js
JavaScript
apache-2.0
292
package com.lenovo.leoss.utils; /** * Created by zhangyl27 on 2014/10/16. */ public class HSqls { public static final String USER_BASE_INFO_UPDATE = "update User set " + "contact_name=?," + "phone=?," + "company_type=?," + "company_name=?," + "company_site=? " + "where id=?"; }
ZhaoX/Spring-Hibernate-Example
src/main/java/com/lenovo/leoss/utils/HSqls.java
Java
apache-2.0
420
#!/usr/bin/env python2.7 from __future__ import absolute_import from __future__ import division from __future__ import print_function from argparse import ArgumentParser from collections import OrderedDict from contextlib import contextmanager import json import logging import multiprocessing import os import shutil import subprocess import sys import cv2 import numpy LOG_LEVELS = ( logging.CRITICAL, logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG ) LOG_LEVEL_TO_NAMES = OrderedDict((level, logging.getLevelName(level).lower()) for level in LOG_LEVELS) LOG_NAME_TO_LEVEL = OrderedDict((name, level) for level, name in LOG_LEVEL_TO_NAMES.items()) VIDEO_EXTENSION = 'webm' YOUTUBE_VIDEO_FORMAT = '242' YOUTUBE_AUDIO_FORMAT = '171' THRESHOLD = 30 CLIPS_OUTPUT_DIR = os.path.join('html', 'clips') MIN_CLIP_LENGTH = 1 MAX_CLIP_LENGTH = 5 LISTINGS_PATH = os.path.join('html', 'listings.json') def main(argv=None): args = parse_args(argv=argv) configure_logger(args) command = args.command if command == 'bulk': bulk(args) elif command == 'download': download_trailer(args) elif command == 'find': find_scenes(args) elif command == 'render': render_clips(args) elif command == 'listing': make_listing(args) else: raise RuntimeError('Invalid command {}'.format(args.command)) def parse_args(argv=None): if argv is None: argv = sys.argv parser = ArgumentParser() parser.add_argument('-l', '--log-level', choices=LOG_NAME_TO_LEVEL.keys(), default=LOG_LEVEL_TO_NAMES[logging.INFO]) subparsers = parser.add_subparsers(dest='command') bulk = subparsers.add_parser('bulk') bulk.add_argument('-m', '--max-length', default=MAX_CLIP_LENGTH, type=float) bulk.add_argument('-n', '--min-length', default=MIN_CLIP_LENGTH, type=float) bulk.add_argument('-c', '--trailers_config_path', default='trailers.json') bulk.add_argument('-l', '--listings_path', default=LISTINGS_PATH) bulk.add_argument('-o', '--trailers_output_dir', default='trailers') bulk.add_argument('-s', '--scenes_output_dir', default='scenes') bulk.add_argument('-t', '--clips_output_dir', default=CLIPS_OUTPUT_DIR) bulk.add_argument('-d', '--download', dest='download', action='store_true') bulk.add_argument('-D', '--skip-download', dest='download', action='store_false') bulk.set_defaults(download=True) bulk.add_argument('-u', '--search-scenes', dest='search_scenes', action='store_true') bulk.add_argument('-U', '--skip-search-scenes', dest='search_scenes', action='store_false') bulk.set_defaults(search_scenes=True) bulk.add_argument('-r', '--render', dest='render', action='store_true') bulk.add_argument('-R', '--skip-render', dest='render', action='store_false') bulk.set_defaults(render=True) download = subparsers.add_parser('download') download.add_argument('youtube_id') download.add_argument('output_filename') download.add_argument('-v', '--video_format', default=YOUTUBE_VIDEO_FORMAT) download.add_argument('-a', '--audio_format', default=YOUTUBE_AUDIO_FORMAT) find = subparsers.add_parser('find') find.add_argument('-t', '--threshold', default=THRESHOLD, type=int) find.add_argument('video_path') find.add_argument('output_dir') render = subparsers.add_parser('render') render.add_argument('-m', '--max-length', default=MAX_CLIP_LENGTH, type=float) render.add_argument('-n', '--min-length', default=MIN_CLIP_LENGTH, type=float) render.add_argument('scenes_path') render.add_argument('video_path') render.add_argument('output_dir') listing = subparsers.add_parser('listing') listing.add_argument('clips_dir') listing.add_argument('listing_path') return parser.parse_args(args=argv[1:]) def configure_logger(args): global logger logging.basicConfig(datefmt='%H:%M:%S', format='[%(levelname).1s %(asctime)s] %(message)s', level=LOG_NAME_TO_LEVEL[args.log_level]) logger = logging.getLogger(__name__) def bulk(args): with open(args.trailers_config_path) as trailers_config_file: trailers_config = json.load(trailers_config_file) trailers_output_dir = args.trailers_output_dir ensure_dir(trailers_output_dir) scenes_output_dir = args.scenes_output_dir ensure_dir(scenes_output_dir) clips_output_dir = args.clips_output_dir ensure_dir(clips_output_dir) # XXX: Only run task so OpenCV doesn't corrupt itself up, had problems when # opening another video in the same process, would open the video and # immediately close. pool = multiprocessing.Pool(maxtasksperchild=1) for trailer in trailers_config['trailers']: pool.apply_async(create_clips_for_trailer, [trailer, trailers_output_dir, scenes_output_dir, clips_output_dir, args.download, args.search_scenes]) pool.close() pool.join() for trailer in trailers_config['trailers']: video_path = get_video_file_name(trailers_output_dir, trailer['name']) scene_file = get_scenes_file_name(video_path, scenes_output_dir) if args.render: _render_clips(video_path, clips_output_dir, scene_file, min_length=args.min_length, max_length=args.max_length) _make_listing(os.path.join(clips_output_dir, '..')) def get_video_file_name(output_dir, name): return os.path.join(output_dir, name) def create_clips_for_trailer(trailer, trailers_output_dir, scenes_output_dir, clips_output_dir, download=True, search_scenes=True): output_path = get_video_file_name(trailers_output_dir, trailer['name']) if download: _download_trailer(output_path, trailer['youtube_id']) logger.info('Searching %s', output_path) if search_scenes: _find_scenes(output_path, scenes_output_dir) def download_trailer(args): _download_trailer(args.output_filename, args.youtube_id, video_format=args.video_format, audio_format=args.audio_format) def _download_trailer( output_filename, youtube_id, video_format=YOUTUBE_VIDEO_FORMAT, audio_format=YOUTUBE_AUDIO_FORMAT): logger.info('Downloading %s ...', output_filename) subprocess.check_call([ 'youtube-dl', '-o', '{}'.format(output_filename), 'https://www.youtube.com/watch?v={}'.format(youtube_id), '-f', '{}+{}'.format(video_format, audio_format) ]) # XXX: youtube-dl leaves some artifacts of the audio and video streams it # downloaded so we'll delete them. def unlink_download_artifacts(output_filename, dl_format): extension = os.path.splitext(output_filename)[1] output_dir = os.path.dirname(os.path.realpath(output_filename)) output_basename = os.path.basename(os.path.realpath(output_filename)) basename = os.path.splitext(output_basename)[0] artifact = '{}.f{}{}'.format(basename, dl_format, extension) os.unlink(os.path.join(output_dir, artifact)) unlink_download_artifacts(output_filename, video_format) unlink_download_artifacts(output_filename, audio_format) def find_scenes(args): _find_scenes(args.video_path, args.output_dir, threshold=args.threshold) def get_scenes_file_name(video_path, output_dir): video_name = os.path.basename(video_path) video_stem, video_ext = os.path.splitext(video_name) scenes_name = '{stem}.json'.format(stem=video_stem) return os.path.join(output_dir, scenes_name) def _find_scenes(video_path, output_dir, threshold=THRESHOLD): ensure_dir(output_dir) scenes_path = get_scenes_file_name(video_path, output_dir) with video_capture(video_path) as cap: scene_splitter = SceneFinder(cap, threshold) scenes = scene_splitter.find_scenes() if len(scenes) == 0: logger.error('No scenes found for %s' % video_path) with open(scenes_path, 'w') as scenes_file: json.dump(scenes, scenes_file) return scenes_path @contextmanager def video_capture(vido_path): cap = cv2.VideoCapture(vido_path) yield cap cap.release() class SceneFinder(object): def __init__(self, cap, threshold): self._cap = cap self._threshold = threshold self._find_scenes_called = False self._in_fade = False self._scenes = [] self._start_index = 0 self._video_width = self._get_int_prop('FRAME_WIDTH') self._video_height = self._get_int_prop('FRAME_HEIGHT') self._video_fps = self._get_int_prop('FPS') def _get_int_prop(self, prop_name): name = 'CV_CAP_PROP_{prop_name}'.format(prop_name=prop_name) return int(self._cap.get(getattr(cv2.cv, name))) def find_scenes(self): if not self._find_scenes_called: self._find_scenes_called = True for index, frame in enumerate(self._frames()): self._check_frame(index, frame) return self._scenes def _frames(self): while True: ret, frame = self._cap.read() if not ret: logger.info('Stopping on frame %d' % self._start_index) if self._start_index == 0: logger.error('Not able to read any frames') raise StopIteration yield cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) def _check_frame(self, index, frame): if self._count_light_pixels(frame) == 0: if not self._in_fade: self._in_fade = True self._add_frame(self._start_index, index) elif self._in_fade: self._in_fade = False self._start_index = index def _count_light_pixels(self, frame): return numpy.count_nonzero(frame > self._threshold) def _add_frame(self, start_index, stop_index): def timestamp(index): return index / self._video_fps scene = (timestamp(start_index), timestamp(stop_index)) logger.info('Scene: %.1f %.1f', *scene) self._scenes.append(scene) def render_clips(args): _render_clips( args.video_path, args.output_dir, args.scenes_path, min_length=args.min_length, max_length=args.max_length ) def _render_clips(video_path, output_dir, scenes_path, min_length=MIN_CLIP_LENGTH, max_length=MAX_CLIP_LENGTH): video_name = os.path.basename(video_path) video_stem, video_ext = os.path.splitext(video_name) clips_dir = os.path.join(output_dir, video_stem) ensure_dir(output_dir) if os.path.isdir(clips_dir): shutil.rmtree(clips_dir) os.mkdir(clips_dir) with open(scenes_path) as scenes_file: scenes = json.load(scenes_file) def min_max_length(scene): return min_length < scene[1] - scene[0] < max_length scenes = filter(min_max_length, scenes) pool = multiprocessing.Pool() for index, (start_time, stop_time) in enumerate(scenes): clip_name = '{}-{}.{}'.format(video_stem, index, VIDEO_EXTENSION) clip_path = os.path.join(clips_dir, clip_name) if os.path.exists(clip_path): os.remove(clip_path) pool.apply_async(render_clip, [video_path, clip_path, start_time, stop_time]) pool.close() pool.join() def render_clip(video_path, clip_path, start_time, stop_time): logger.info('Rendering %s ...', clip_path) subprocess.check_call([ '/usr/bin/ffmpeg', '-ss', str(start_time), '-t', str(stop_time - start_time), '-i', video_path, '-c:v', 'libvpx', '-c:a', 'libvorbis', clip_path, ]) def ensure_dir(path): if not os.path.exists(path): os.mkdir(path) def make_listing(args): _make_listing(args.clips_dir, listing_path=args.listing_path) def _make_listing(clips_dir, listing_path=LISTINGS_PATH): listing = {'videos': []} for root, dirs, files in os.walk(clips_dir): for file_ in files: if os.path.splitext(file_)[1] != '.{}'.format(VIDEO_EXTENSION): continue common_prefix = os.path.commonprefix([clips_dir, root]) path = os.path.join(root[len(common_prefix) + 1:], file_) listing['videos'].append(path) with open(listing_path, 'w') as listing_file: json.dump(listing, listing_file) if __name__ == '__main__': sys.exit(main())
mariosgohan/infinite-trailer
infinite_trailer.py
Python
apache-2.0
12,916
# # Author:: Seth Falcon (<seth@opscode.com>) # Copyright:: Copyright 2010-2011 Opscode, Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # require 'yaml' require 'yajl' require 'openssl' require 'base64' require 'digest/sha2' require 'seth/encrypted_data_bag_item' require 'seth/encrypted_data_bag_item/unsupported_encrypted_data_bag_item_format' require 'seth/encrypted_data_bag_item/unacceptable_encrypted_data_bag_item_format' require 'seth/encrypted_data_bag_item/decryption_failure' require 'seth/encrypted_data_bag_item/unsupported_cipher' class Seth::EncryptedDataBagItem #=== Decryptor # For backwards compatibility, Seth implements decryption/deserialization for # older encrypted data bag item formats in addition to the current version. # Each decryption/deserialization strategy is implemented as a class in this # namespace. For convenience the factory method +Decryptor.for()+ can be used # to create an instance of the appropriate strategy for the given encrypted # data bag value. module Decryptor # Detects the encrypted data bag item format version and instantiates a # decryptor object for that version. Call #for_decrypted_item on the # resulting object to decrypt and deserialize it. def self.for(encrypted_value, key) format_version = format_version_of(encrypted_value) assert_format_version_acceptable!(format_version) case format_version when 2 Version2Decryptor.new(encrypted_value, key) when 1 Version1Decryptor.new(encrypted_value, key) when 0 Version0Decryptor.new(encrypted_value, key) else raise UnsupportedEncryptedDataBagItemFormat, "This version of seth does not support encrypted data bag item format version '#{format_version}'" end end def self.format_version_of(encrypted_value) if encrypted_value.respond_to?(:key?) encrypted_value["version"] else 0 end end def self.assert_format_version_acceptable!(format_version) unless format_version.kind_of?(Integer) and format_version >= Seth::Config[:data_bag_decrypt_minimum_version] raise UnacceptableEncryptedDataBagItemFormat, "The encrypted data bag item has format version `#{format_version}', " + "but the config setting 'data_bag_decrypt_minimum_version' requires version `#{Seth::Config[:data_bag_decrypt_minimum_version]}'" end end class Version0Decryptor attr_reader :encrypted_data attr_reader :key def initialize(encrypted_data, key) @encrypted_data = encrypted_data @key = key end def for_decrypted_item YAML.load(decrypted_data) end def decrypted_data @decrypted_data ||= begin plaintext = openssl_decryptor.update(encrypted_bytes) plaintext << openssl_decryptor.final rescue OpenSSL::Cipher::CipherError => e raise DecryptionFailure, "Error decrypting data bag value: '#{e.message}'. Most likely the provided key is incorrect" end end def encrypted_bytes Base64.decode64(@encrypted_data) end def openssl_decryptor @openssl_decryptor ||= begin d = OpenSSL::Cipher::Cipher.new(ALGORITHM) d.decrypt d.pkcs5_keyivgen(key) d end end end class Version1Decryptor attr_reader :encrypted_data attr_reader :key def initialize(encrypted_data, key) @encrypted_data = encrypted_data @key = key end def for_decrypted_item Yajl::Parser.parse(decrypted_data)["json_wrapper"] rescue Yajl::ParseError # convert to a DecryptionFailure error because the most likely scenario # here is that the decryption step was unsuccessful but returned bad # data rather than raising an error. raise DecryptionFailure, "Error decrypting data bag value. Most likely the provided key is incorrect" end def encrypted_bytes Base64.decode64(@encrypted_data["encrypted_data"]) end def iv Base64.decode64(@encrypted_data["iv"]) end def decrypted_data @decrypted_data ||= begin plaintext = openssl_decryptor.update(encrypted_bytes) plaintext << openssl_decryptor.final rescue OpenSSL::Cipher::CipherError => e raise DecryptionFailure, "Error decrypting data bag value: '#{e.message}'. Most likely the provided key is incorrect" end end def openssl_decryptor @openssl_decryptor ||= begin assert_valid_cipher! d = OpenSSL::Cipher::Cipher.new(ALGORITHM) d.decrypt d.key = Digest::SHA256.digest(key) d.iv = iv d end end def assert_valid_cipher! # In the future, seth may support configurable ciphers. For now, only # aes-256-cbc is supported. requested_cipher = @encrypted_data["cipher"] unless requested_cipher == ALGORITHM raise UnsupportedCipher, "Cipher '#{requested_cipher}' is not supported by this version of Seth. Available ciphers: ['#{ALGORITHM}']" end end end class Version2Decryptor < Version1Decryptor def decrypted_data validate_hmac! unless @decrypted_data super end def validate_hmac! digest = OpenSSL::Digest::Digest.new("sha256") raw_hmac = OpenSSL::HMAC.digest(digest, key, @encrypted_data["encrypted_data"]) if candidate_hmac_matches?(raw_hmac) true else raise DecryptionFailure, "Error decrypting data bag value: invalid hmac. Most likely the provided key is incorrect" end end private def candidate_hmac_matches?(expected_hmac) return false unless @encrypted_data["hmac"] expected_bytes = expected_hmac.bytes.to_a candidate_hmac_bytes = Base64.decode64(@encrypted_data["hmac"]).bytes.to_a valid = expected_bytes.size ^ candidate_hmac_bytes.size expected_bytes.zip(candidate_hmac_bytes) { |x, y| valid |= x ^ y.to_i } valid == 0 end end end end
danielsdeleo/seth
lib/seth/encrypted_data_bag_item/decryptor.rb
Ruby
apache-2.0
6,776
/** * Copyright 2012 The Bitcoin Developers * Copyright 2012 Matt Corallo * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.spreadcoinj.core; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static org.spreadcoinj.core.Utils.*; /** * <p>A data structure that contains proofs of block inclusion for one or more transactions, in an efficient manner.</p> * * <p>The encoding works as follows: we traverse the tree in depth-first order, storing a bit for each traversed node, * signifying whether the node is the parent of at least one matched leaf txid (or a matched txid itself). In case we * are at the leaf level, or this bit is 0, its merkle node hash is stored, and its children are not explored further. * Otherwise, no hash is stored, but we recurse into both (or the only) child branch. During decoding, the same * depth-first traversal is performed, consuming bits and hashes as they were written during encoding.</p> * * <p>The serialization is fixed and provides a hard guarantee about the encoded size, * <tt>SIZE <= 10 + ceil(32.25*N)</tt> where N represents the number of leaf nodes of the partial tree. N itself * is bounded by:</p> * * <p> * N <= total_transactions<br> * N <= 1 + matched_transactions*tree_height * </p> * * <p><pre>The serialization format: * - uint32 total_transactions (4 bytes) * - varint number of hashes (1-3 bytes) * - uint256[] hashes in depth-first order (<= 32*N bytes) * - varint number of bytes of flag bits (1-3 bytes) * - byte[] flag bits, packed per 8 in a byte, least significant bit first (<= 2*N-1 bits) * The size constraints follow from this.</pre></p> */ public class PartialMerkleTree extends Message { // the total number of transactions in the block private int transactionCount; // node-is-parent-of-matched-txid bits private byte[] matchedChildBits; // txids and internal hashes private List<Sha256Hash> hashes; public PartialMerkleTree(NetworkParameters params, byte[] payloadBytes, int offset) throws ProtocolException { super(params, payloadBytes, offset); } /** * Constructs a new PMT with the given bit set (little endian) and the raw list of hashes including internal hashes, * taking ownership of the list. */ public PartialMerkleTree(NetworkParameters params, byte[] bits, List<Sha256Hash> hashes, int origTxCount) { super(params); this.matchedChildBits = bits; this.hashes = hashes; this.transactionCount = origTxCount; } /** * Calculates a PMT given the list of leaf hashes and which leaves need to be included. The relevant interior hashes * are calculated and a new PMT returned. */ public static PartialMerkleTree buildFromLeaves(NetworkParameters params, byte[] includeBits, List<Sha256Hash> allLeafHashes) { // Calculate height of the tree. int height = 0; while (getTreeWidth(allLeafHashes.size(), height) > 1) height++; List<Boolean> bitList = new ArrayList<Boolean>(); List<Sha256Hash> hashes = new ArrayList<Sha256Hash>(); traverseAndBuild(height, 0, allLeafHashes, includeBits, bitList, hashes); byte[] bits = new byte[(int)Math.ceil(bitList.size() / 8.0)]; for (int i = 0; i < bitList.size(); i++) if (bitList.get(i)) Utils.setBitLE(bits, i); return new PartialMerkleTree(params, bits, hashes, allLeafHashes.size()); } @Override public void bitcoinSerializeToStream(OutputStream stream) throws IOException { uint32ToByteStreamLE(transactionCount, stream); stream.write(new VarInt(hashes.size()).encode()); for (Sha256Hash hash : hashes) stream.write(reverseBytes(hash.getBytes())); stream.write(new VarInt(matchedChildBits.length).encode()); stream.write(matchedChildBits); } @Override void parse() throws ProtocolException { transactionCount = (int)readUint32(); int nHashes = (int) readVarInt(); hashes = new ArrayList<Sha256Hash>(nHashes); for (int i = 0; i < nHashes; i++) hashes.add(readHash()); int nFlagBytes = (int) readVarInt(); matchedChildBits = readBytes(nFlagBytes); length = cursor - offset; } // Based on CPartialMerkleTree::TraverseAndBuild in Bitcoin Core. private static void traverseAndBuild(int height, int pos, List<Sha256Hash> allLeafHashes, byte[] includeBits, List<Boolean> matchedChildBits, List<Sha256Hash> resultHashes) { boolean parentOfMatch = false; // Is this node a parent of at least one matched hash? for (int p = pos << height; p < (pos+1) << height && p < allLeafHashes.size(); p++) { if (Utils.checkBitLE(includeBits, p)) { parentOfMatch = true; break; } } // Store as a flag bit. matchedChildBits.add(parentOfMatch); if (height == 0 || !parentOfMatch) { // If at height 0, or nothing interesting below, store hash and stop. resultHashes.add(calcHash(height, pos, allLeafHashes)); } else { // Otherwise descend into the subtrees. int h = height - 1; int p = pos * 2; traverseAndBuild(h, p, allLeafHashes, includeBits, matchedChildBits, resultHashes); if (p + 1 < getTreeWidth(allLeafHashes.size(), h)) traverseAndBuild(h, p + 1, allLeafHashes, includeBits, matchedChildBits, resultHashes); } } private static Sha256Hash calcHash(int height, int pos, List<Sha256Hash> hashes) { if (height == 0) { // Hash at height 0 is just the regular tx hash itself. return hashes.get(pos); } int h = height - 1; int p = pos * 2; Sha256Hash left = calcHash(h, p, hashes); // Calculate right hash if not beyond the end of the array - copy left hash otherwise. Sha256Hash right; if (p + 1 < getTreeWidth(hashes.size(), h)) { right = calcHash(h, p + 1, hashes); } else { right = left; } return combineLeftRight(left.getBytes(), right.getBytes()); } @Override protected void parseLite() { } // helper function to efficiently calculate the number of nodes at given height in the merkle tree private static int getTreeWidth(int transactionCount, int height) { return (transactionCount + (1 << height) - 1) >> height; } private static class ValuesUsed { public int bitsUsed = 0, hashesUsed = 0; } // recursive function that traverses tree nodes, consuming the bits and hashes produced by TraverseAndBuild. // it returns the hash of the respective node. private Sha256Hash recursiveExtractHashes(int height, int pos, ValuesUsed used, List<Sha256Hash> matchedHashes) throws VerificationException { if (used.bitsUsed >= matchedChildBits.length*8) { // overflowed the bits array - failure throw new VerificationException("CPartialMerkleTree overflowed its bits array"); } boolean parentOfMatch = checkBitLE(matchedChildBits, used.bitsUsed++); if (height == 0 || !parentOfMatch) { // if at height 0, or nothing interesting below, use stored hash and do not descend if (used.hashesUsed >= hashes.size()) { // overflowed the hash array - failure throw new VerificationException("CPartialMerkleTree overflowed its hash array"); } Sha256Hash hash = hashes.get(used.hashesUsed++); if (height == 0 && parentOfMatch) // in case of height 0, we have a matched txid matchedHashes.add(hash); return hash; } else { // otherwise, descend into the subtrees to extract matched txids and hashes byte[] left = recursiveExtractHashes(height - 1, pos * 2, used, matchedHashes).getBytes(), right; if (pos * 2 + 1 < getTreeWidth(transactionCount, height-1)) right = recursiveExtractHashes(height - 1, pos * 2 + 1, used, matchedHashes).getBytes(); else right = left; // and combine them before returning return combineLeftRight(left, right); } } private static Sha256Hash combineLeftRight(byte[] left, byte[] right) { return new Sha256Hash(reverseBytes(doubleDigestTwoBuffers( reverseBytes(left), 0, 32, reverseBytes(right), 0, 32))); } /** * Extracts tx hashes that are in this merkle tree * and returns the merkle root of this tree. * * The returned root should be checked against the * merkle root contained in the block header for security. * * @param matchedHashes A list which will contain the matched txn (will be cleared) * Required to be a LinkedHashSet in order to retain order or transactions in the block * @return the merkle root of this merkle tree * @throws ProtocolException if this partial merkle tree is invalid */ public Sha256Hash getTxnHashAndMerkleRoot(List<Sha256Hash> matchedHashes) throws VerificationException { matchedHashes.clear(); // An empty set will not work if (transactionCount == 0) throw new VerificationException("Got a CPartialMerkleTree with 0 transactions"); // check for excessively high numbers of transactions if (transactionCount > Block.MAX_BLOCK_SIZE / 60) // 60 is the lower bound for the size of a serialized CTransaction throw new VerificationException("Got a CPartialMerkleTree with more transactions than is possible"); // there can never be more hashes provided than one for every txid if (hashes.size() > transactionCount) throw new VerificationException("Got a CPartialMerkleTree with more hashes than transactions"); // there must be at least one bit per node in the partial tree, and at least one node per hash if (matchedChildBits.length*8 < hashes.size()) throw new VerificationException("Got a CPartialMerkleTree with fewer matched bits than hashes"); // calculate height of tree int height = 0; while (getTreeWidth(transactionCount, height) > 1) height++; // traverse the partial tree ValuesUsed used = new ValuesUsed(); Sha256Hash merkleRoot = recursiveExtractHashes(height, 0, used, matchedHashes); // verify that all bits were consumed (except for the padding caused by serializing it as a byte sequence) if ((used.bitsUsed+7)/8 != matchedChildBits.length || // verify that all hashes were consumed used.hashesUsed != hashes.size()) throw new VerificationException("Got a CPartialMerkleTree that didn't need all the data it provided"); return merkleRoot; } public int getTransactionCount() { return transactionCount; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PartialMerkleTree tree = (PartialMerkleTree) o; if (transactionCount != tree.transactionCount) return false; if (!hashes.equals(tree.hashes)) return false; if (!Arrays.equals(matchedChildBits, tree.matchedChildBits)) return false; return true; } @Override public int hashCode() { int result = transactionCount; result = 31 * result + Arrays.hashCode(matchedChildBits); result = 31 * result + hashes.hashCode(); return result; } @Override public String toString() { return "PartialMerkleTree{" + "transactionCount=" + transactionCount + ", matchedChildBits=" + Arrays.toString(matchedChildBits) + ", hashes=" + hashes + '}'; } }
bitbandi/spreadcoinj
core/src/main/java/org/spreadcoinj/core/PartialMerkleTree.java
Java
apache-2.0
12,811
# Copyright (C) 2016 Ross Wightman. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # ============================================================================== from __future__ import print_function from six import iteritems from cv_bridge import CvBridge, CvBridgeError from collections import defaultdict import os import sys import fnmatch import subprocess import cv2 import yaml import rosbag import datetime SEC_PER_NANOSEC = 1e9 MIN_PER_NANOSEC = 6e10 LEFT_CAMERA_TOPIC = "/left_camera/image_color" CENTER_CAMERA_TOPIC = "/center_camera/image_color" RIGHT_CAMERA_TOPIC = "/right_camera/image_color" LEFT_CAMERA_COMPRESSED_TOPIC = LEFT_CAMERA_TOPIC + "/compressed" CENTER_CAMERA_COMPRESSED_TOPIC = CENTER_CAMERA_TOPIC + "/compressed" RIGHT_CAMERA_COMPRESSED_TOPIC = RIGHT_CAMERA_TOPIC + "/compressed" CAMERA_TOPICS = [LEFT_CAMERA_TOPIC, CENTER_CAMERA_TOPIC, RIGHT_CAMERA_TOPIC, LEFT_CAMERA_COMPRESSED_TOPIC, CENTER_CAMERA_COMPRESSED_TOPIC, RIGHT_CAMERA_COMPRESSED_TOPIC] CENTER_CAMERA_TOPICS = [CENTER_CAMERA_TOPIC, CENTER_CAMERA_COMPRESSED_TOPIC] STEERING_TOPIC = "/vehicle/steering_report" GPS_FIX_TOPIC = "/vehicle/gps/fix" GPS_FIX_NEW_TOPIC = "/fix" WHEEL_SPEED_TOPIC = "/vehicle/wheel_speed_report" THROTTLE_TOPIC = "/vehicle/throttle_report" BRAKE_TOPIC = "/vehicle/brake_report" GEAR_TOPIC = "/vehicle/gear_report" IMU_TOPIC = "/vehicle/imu/data_raw" OTHER_TOPICS = [ WHEEL_SPEED_TOPIC, THROTTLE_TOPIC, BRAKE_TOPIC, GEAR_TOPIC, IMU_TOPIC] CAMERA_REMAP_LCCL = { LEFT_CAMERA_TOPIC: CENTER_CAMERA_TOPIC, LEFT_CAMERA_COMPRESSED_TOPIC: CENTER_CAMERA_COMPRESSED_TOPIC, CENTER_CAMERA_TOPIC: LEFT_CAMERA_TOPIC, CENTER_CAMERA_COMPRESSED_TOPIC: LEFT_CAMERA_COMPRESSED_TOPIC, 'left_camera': 'center_camera', 'center_camera': 'left_camera', } def check_remap_hack(filename): if fnmatch.fnmatch(filename, "2016-10-25*.bag"): print(filename, 'matches remap hack.') return CAMERA_REMAP_LCCL else: return {} def get_bag_info(bag_file, nanosec=True): info = yaml.load(subprocess.Popen( ['rosbag', 'info', '--yaml', bag_file], stdout=subprocess.PIPE).communicate()[0]) if nanosec: if 'start' in info: info['start'] = int(info['start']*1e9) if 'end' in info: info['end'] = int(info['end']*1e9) if 'duration' in info: info['duration'] = int(info['duration']*1e9) return info def get_topic_names(bag_info_yaml): topic_names = [] topics = bag_info_yaml['topics'] for t in topics: topic_names.append(t['topic']) return topic_names def ns_to_str(timestamp_ns): secs = timestamp_ns / 1e9 dt = datetime.datetime.fromtimestamp(secs) return dt.strftime('%Y-%m-%dT%H:%M:%S.%f') class BagReader(object): def __init__(self, bagfiles, topics, remap_camera={}): self.bagfiles = bagfiles self.topics = topics self._remap_camera = remap_camera def read_messages(self): for f in self.bagfiles: with rosbag.Bag(f, "r") as bag: for topic, msg, _ in bag.read_messages(topics=self.topics): if self._remap_camera and topic in self._remap_camera: topic = self._remap_camera[topic] msg.header.frame_id = self._remap_camera[msg.header.frame_id] yield topic, msg JOIN_THRESH_NS = 10 * MIN_PER_NANOSEC class BagSet(object): def __init__(self, name, bagfiles, filter_topics, remap_camera={}): self.name = name self.files = sorted(bagfiles) self.infos = [] self.topic_map = defaultdict(list) self.start_time = None self.end_time = None self._remap_camera = remap_camera self._process_infos(filter_topics) def _process_infos(self, filter_topics): for f in self.files: print("Extracting bag info %s" % f) sys.stdout.flush() info = get_bag_info(f) if 'start' not in info or 'end' not in info: print('Ignoring info %s without start/end time' % info['path']) continue if self._remap_camera and check_remap_hack(os.path.basename(f)): info['remap'] = self._remap_camera info_start = info['start'] info_end = info['end'] if not self.start_time or not self.end_time: self._extend_range(info_start, info_end) elif (info_start - JOIN_THRESH_NS) <= self.end_time and self.start_time <= (info_end + JOIN_THRESH_NS): self._extend_range(info_start, info_end) else: print('Orphaned bag info time range, are there multiple datasets in same folder?') continue self.infos.append(info) if self._remap_camera: filter_topics = self._filter_topics_remap(filter_topics) filtered = [x['topic'] for x in info['topics'] if not filter_topics or x['topic'] in filter_topics] gps_fix_replace = False if GPS_FIX_NEW_TOPIC in filtered and GPS_FIX_TOPIC in filtered: print("New GPS fix topic %s replacing old %s" % (GPS_FIX_NEW_TOPIC, GPS_FIX_TOPIC)) gps_fix_replace = True for x in filtered: if gps_fix_replace and x == GPS_FIX_TOPIC: # skip old gps topic continue self.topic_map[x].append((info['start'], info['path'])) self.topic_map[x] = sorted(self.topic_map[x]) def _extend_range(self, start_time, end_time): if not self.start_time or start_time < self.start_time: self.start_time = start_time if not self.end_time or end_time > self.end_time: self.end_time = end_time def _filter_topics_remap(self, filters): return [self._remap_camera[x] if x in self._remap_camera else x for x in filters] def write_infos(self, dest): for info in self.infos: info_path = os.path.splitext(os.path.basename(info['path']))[0] write_file = os.path.join(dest, info_path + '.yaml') with open(write_file, 'w') as f: yaml.dump(info, f) def get_message_count(self, topic_filter=[]): count = 0 for info in self.infos: if self._remap_camera: topic_filter = self._filter_topics_remap(topic_filter) filtered = [x['topic'] for x in info['topics'] if not topic_filter or x['topic'] in topic_filter] gps_fix_replace = False if GPS_FIX_NEW_TOPIC in filtered and GPS_FIX_TOPIC in filtered: gps_fix_replace = True for topic in info['topics']: if ((not topic_filter or topic['topic'] in topic_filter) and (not gps_fix_replace or topic['topic'] != GPS_FIX_TOPIC)): count += topic['messages'] return count def get_readers(self): readers = [] for topic, timestamp_files in iteritems(self.topic_map): starts, files = zip(*timestamp_files) merged = False for r in readers: if r.bagfiles == files: r.topics.append(topic) merged = True if not merged: readers.append(BagReader(bagfiles=files, topics=[topic], remap_camera=self._remap_camera)) return readers def __repr__(self): return "start: %s, end: %s, topic_map: %s" % (self.start_time, self.end_time, str(self.topic_map)) def find_bagsets(directory, filter_topics=[], pattern="*.bag"): sets = [] for root, dirs, files in os.walk(directory): matched_files = [] remap_camera = {} for basename in files: if fnmatch.fnmatch(basename, pattern): if not remap_camera: remap_camera = check_remap_hack(basename) filename = os.path.join(root, basename) matched_files.append(filename) if matched_files: set_name = os.path.relpath(root, directory) bag_set = BagSet(set_name, matched_files, filter_topics, remap_camera) sets.append(bag_set) return sets class BagCursor(object): def __init__(self, reader): self.latest_timestamp = None self.read_count = 0 self.done = False self.vals = [] self.reader = reader self._iter = reader.read_messages() def __bool__(self): return not self.done __nonzero__ = __bool__ # Advance cursor by one element, store element vals list def advance(self, n=1): if self.done: return False try: while n > 0: topic, msg = next(self._iter) self.read_count += 1 timestamp = msg.header.stamp.to_nsec() if not self.latest_timestamp or timestamp > self.latest_timestamp: self.latest_timestamp = timestamp self.vals.append((timestamp, topic, msg)) n -= 1 except StopIteration: self.done = True return not self.done # Advance cursor by relative time duration in nanoseconds def advance_by(self, duration_ns): if not self.latest_timestamp and not self.advance(): return False start_time_ns = self.latest_timestamp while self.advance(): elapsed = self.latest_timestamp - start_time_ns if elapsed >= duration_ns: break return not self.done # Advance cursor until specified absolute time in nanoseconds def advance_until(self, end_time_ns): while self.advance(): if self.latest_timestamp >= end_time_ns: break return not self.done def collect_vals(self, dest): dest.extend(self.vals) self.vals = [] def clear_vals(self): self.vals = [] def __repr__(self): return "Cursor for bags: %s, topics: %s" % (str(self.reader.bagfiles), str(self.reader.topics)) class CursorGroup(object): def __init__(self, readers=[], cursors=[]): # a group can be created from readers or existing cursors, if readers: assert not cursors self.cursors = [BagCursor(r) for r in readers] elif cursors: self.cursors = cursors def __bool__(self): for c in self.cursors: if c: return True return False __nonzero__ = __bool__ def advance(self, n=1): all_done = True for c in self.cursors: if c and c.advance(n): all_done = False return not all_done # Advance all cursors by specified duration # Risk of cursors drifting over time from each other def advance_by(self, duration_ns=1*SEC_PER_NANOSEC): all_done = True for c in self.cursors: if c and c.advance_by(duration_ns): all_done = False return not all_done # Advance all cursors up to same end time def advance_until(self, end_time_ns): all_done = True for c in self.cursors: if c and c.advance_until(end_time_ns): all_done = False return not all_done # Advance the first ready cursor in group by specified amount and bring the reset # up to same resulting end time. # Risk of pulling in large amounts of data if leading stream has a large gap. def advance_by_until(self, duration_ns=1*SEC_PER_NANOSEC): all_done = True end_time_ns = None for c in self.cursors: ready = False if c: if not end_time_ns: ready = c.advance_by(duration_ns) end_time_ns = c.latest_timestamp else: ready = c.advance_until(end_time_ns) if ready: all_done = False return not all_done def collect_vals(self, dest): for c in self.cursors: c.collect_vals(dest)
rwightman/udacity-driving-reader
script/bagutils.py
Python
apache-2.0
12,425
# Copyright (c) 2012 Rackspace Hosting # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Cells Scheduler """ import random import time from oslo.config import cfg from nova import compute from nova.compute import vm_states from nova.db import base from nova import exception from nova.openstack.common import log as logging from nova.scheduler import rpcapi as scheduler_rpcapi cell_scheduler_opts = [ cfg.IntOpt('scheduler_retries', default=10, help='How many retries when no cells are available.'), cfg.IntOpt('scheduler_retry_delay', default=2, help='How often to retry in seconds when no cells are ' 'available.') ] LOG = logging.getLogger(__name__) CONF = cfg.CONF CONF.register_opts(cell_scheduler_opts, group='cells') class CellsScheduler(base.Base): """The cells scheduler.""" def __init__(self, msg_runner): super(CellsScheduler, self).__init__() self.msg_runner = msg_runner self.state_manager = msg_runner.state_manager self.compute_api = compute.API() self.scheduler_rpcapi = scheduler_rpcapi.SchedulerAPI() def _create_instances_here(self, ctxt, request_spec): instance_values = request_spec['instance_properties'] num_instances = len(request_spec['instance_uuids']) for i, instance_uuid in enumerate(request_spec['instance_uuids']): instance_values['uuid'] = instance_uuid instance = self.compute_api.create_db_entry_for_new_instance( ctxt, request_spec['instance_type'], request_spec['image'], instance_values, request_spec['security_group'], request_spec['block_device_mapping'], num_instances, i) self.msg_runner.instance_update_at_top(ctxt, instance) def _get_possible_cells(self): cells = set(self.state_manager.get_child_cells()) our_cell = self.state_manager.get_my_state() # Include our cell in the list, if we have any capacity info if not cells or our_cell.capacities: cells.add(our_cell) return cells def _run_instance(self, message, host_sched_kwargs): """Attempt to schedule instance(s). If we have no cells to try, raise exception.NoCellsAvailable """ ctxt = message.ctxt request_spec = host_sched_kwargs['request_spec'] # The message we might forward to a child cell cells = self._get_possible_cells() if not cells: raise exception.NoCellsAvailable() cells = list(cells) # Random selection for now random.shuffle(cells) target_cell = cells[0] LOG.debug(_("Scheduling with routing_path=%(routing_path)s"), locals()) if target_cell.is_me: # Need to create instance DB entries as the host scheduler # expects that the instance(s) already exists. self._create_instances_here(ctxt, request_spec) self.scheduler_rpcapi.run_instance(ctxt, **host_sched_kwargs) return self.msg_runner.schedule_run_instance(ctxt, target_cell, host_sched_kwargs) def run_instance(self, message, host_sched_kwargs): """Pick a cell where we should create a new instance.""" try: for i in xrange(max(0, CONF.cells.scheduler_retries) + 1): try: return self._run_instance(message, host_sched_kwargs) except exception.NoCellsAvailable: if i == max(0, CONF.cells.scheduler_retries): raise sleep_time = max(1, CONF.cells.scheduler_retry_delay) LOG.info(_("No cells available when scheduling. Will " "retry in %(sleep_time)s second(s)"), locals()) time.sleep(sleep_time) continue except Exception: request_spec = host_sched_kwargs['request_spec'] instance_uuids = request_spec['instance_uuids'] LOG.exception(_("Error scheduling instances %(instance_uuids)s"), locals()) ctxt = message.ctxt for instance_uuid in instance_uuids: self.msg_runner.instance_update_at_top(ctxt, {'uuid': instance_uuid, 'vm_state': vm_states.ERROR}) try: self.db.instance_update(ctxt, instance_uuid, {'vm_state': vm_states.ERROR}) except Exception: pass
sridevikoushik31/openstack
nova/cells/scheduler.py
Python
apache-2.0
5,442
/******************************************************************************* * Copyright 2018 * Language Technology Lab * University of Duisburg-Essen * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package de.unidue.ltl.evaluation.measures.categorial; import java.util.HashSet; import java.util.Map; import java.util.Set; import de.unidue.ltl.evaluation.core.EvaluationData; import de.unidue.ltl.evaluation.core.EvaluationEntry; public abstract class CategoricalMeasure<T> { protected EvaluationData<T> data; public CategoricalMeasure(EvaluationData<T> data) { this.data = data; } protected Category getCategoryBaseValues(T category) { int tp = 0; int fp = 0; int fn = 0; int tn = 0; for (EvaluationEntry<T> entry : data) { T gold = entry.getGold(); T pred = entry.getPredicted(); if (gold.equals(category)) { if (gold.equals(pred)) { tp++; } else { fn++; } } else { if (pred.equals(category)) { fp++; } else { tn++; } } } return new Category(tp, fp, fn, tn); } protected Set<T> getDistinctLabels(EvaluationData<T> data) { Set<T> labels = new HashSet<T>(); for (EvaluationEntry<T> entry : data) { labels.add(entry.getGold()); labels.add(entry.getPredicted()); } return labels; } protected void verifyLabelKnown(T label, Map<T, Double> m) { if (!m.keySet().contains(label)) { throw new IllegalArgumentException("The label [" + label + "] is unknown"); } } }
Horsmann/ltlab-evaluation
ltlab-evaluation-measures/src/main/java/de/unidue/ltl/evaluation/measures/categorial/CategoricalMeasure.java
Java
apache-2.0
2,523
package org.cucina.engine.repository.jpa; import org.apache.commons.lang3.ArrayUtils; import org.cucina.core.InstanceFactory; import org.cucina.core.model.PersistableEntity; import org.cucina.engine.model.HistoryRecord; import org.cucina.engine.model.ProcessToken; import org.cucina.engine.repository.TokenRepository; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.BeanWrapper; import org.springframework.beans.BeanWrapperImpl; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Persistable; import org.springframework.transaction.annotation.Transactional; import org.springframework.util.Assert; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.persistence.Tuple; import javax.persistence.criteria.*; import java.io.Serializable; import java.util.Collection; import java.util.List; /** * JAVADOC for Class Level * * @author $Author: $ * @version $Revision: $ */ public class TokenRepositoryImpl extends WorkflowRepositorySupport implements TokenRepository { private static final Logger LOG = LoggerFactory.getLogger(TokenRepositoryImpl.class); @PersistenceContext private EntityManager entityManager; private InstanceFactory instanceFactory; /** * Creates a new TokenRepositoryImpl object. * * @param entityManager JAVADOC. */ @Autowired public TokenRepositoryImpl(InstanceFactory instanceFactory) { this.instanceFactory = instanceFactory; } /** * JAVADOC Method Level Comments * * @param entityManager JAVADOC. */ public void setEntityManager(EntityManager entityManager) { this.entityManager = entityManager; } /** * JAVADOC Method Level Comments * * @param token JAVADOC. */ @Override public void delete(ProcessToken token) { entityManager.remove(token); } /** * JAVADOC Method Level Comments * * @param token JAVADOC. */ @Override public void deleteDeep(ProcessToken token) { Object domain = token.getDomainObject(); entityManager.remove(token); entityManager.remove(domain); } /** * JAVADOC Method Level Comments * * @param ids JAVADOC. * @param applicationType JAVADOC. * @return JAVADOC. */ @Override @Transactional public Collection<ProcessToken> findByApplicationTypeAndIds(String applicationType, Serializable... ids) { Assert.notNull(applicationType, "type cannot be null"); if (ArrayUtils.isEmpty(ids)) { LOG.debug("Array ids is empty, returning null"); return null; } CriteriaBuilder cb = entityManager.getCriteriaBuilder(); CriteriaQuery<Tuple> tcq = cb.createTupleQuery(); Root<ProcessToken> rt = tcq.from(ProcessToken.class); Path<Serializable> pid = rt.get("domainObjectId"); Predicate tokp = cb.and(cb.equal(rt.get("domainObjectType"), applicationType), pid.in(ids)); Class<? extends PersistableEntity> clazz = resolveClass(applicationType); Root<? extends PersistableEntity> rc = tcq.from(clazz); Predicate clap = cb.equal(pid, rc.get("id")); tcq.multiselect(rt, rc).where(cb.and(clap, tokp)); Collection<Tuple> results = entityManager.createQuery(tcq).getResultList(); if ((results == null) || (results.size() == 0)) { LOG.warn("Failed to find workflow instances for the objects:" + applicationType + ":" + ids); throw new IllegalArgumentException("Failed to find workflow instances for the objects:" + applicationType + ":" + ids); } return populate(results); } /** * JAVADOC Method Level Comments * * @param domain JAVADOC. * @return JAVADOC. */ @Override @Transactional public ProcessToken findByDomain(Persistable<?> domain) { Assert.notNull(domain, "Domain is null"); BeanWrapper bw = new BeanWrapperImpl(domain); Assert.isTrue(bw.isReadableProperty("id"), "No 'id' property on object of type '" + domain.getClass() + "'"); CriteriaBuilder cb = entityManager.getCriteriaBuilder(); CriteriaQuery<ProcessToken> cq = cb.createQuery(ProcessToken.class); Root<ProcessToken> token = cq.from(ProcessToken.class); cq.where(cb.and(cb.equal(token.get("domainObjectType"), domain.getClass().getSimpleName()), cb.equal(token.get("domainObjectId"), bw.getPropertyValue("id")))); try { ProcessToken wt = entityManager.createQuery(cq).getSingleResult(); wt.setDomainObject(domain); return wt; } catch (Exception e) { if (LOG.isDebugEnabled()) { LOG.debug("Ignoring this", e); } return null; } } /** * JAVADOC Method Level Comments * * @param workflowId JAVADOC. * @param placeId JAVADOC. * @param applicationType JAVADOC. * @return JAVADOC. */ @Override @Transactional public Collection<Long> findDomainIdsByWorkflowIdPlaceIdApplicationType( final String workflowId, final String placeId, final String applicationType) { CriteriaBuilder cb = entityManager.getCriteriaBuilder(); CriteriaQuery<Long> cq = cb.createQuery(Long.class); Root<ProcessToken> token = cq.from(ProcessToken.class); Predicate wi = cb.equal(token.get("workflowDefinitionId"), workflowId); Predicate pi = cb.equal(token.get("placeId"), placeId); Predicate at = cb.equal(token.get("domainObjectType"), applicationType); Predicate and = cb.and(wi, pi, at); cq.select(token.<Long>get("domainObjectId")); return entityManager.createQuery(cq.where(and)).getResultList(); } @Override public List<HistoryRecord> findHistoryRecordsByDomainObjectIdAndDomainObjectType( Serializable id, String applicationType) { CriteriaBuilder cb = entityManager.getCriteriaBuilder(); CriteriaQuery<HistoryRecord> cq = cb.createQuery(HistoryRecord.class); Root<ProcessToken> token = cq.from(ProcessToken.class); Predicate pi = cb.equal(token.get("domainObjectId"), id); Predicate at = cb.equal(token.get("domainObjectType"), applicationType); Predicate and = cb.and(pi, at); cq.select(token.<HistoryRecord>get("histories")); // cq.orderBy(cb.desc(token.get("histories").get("modifiedDate"))); return entityManager.createQuery(cq.where(and)).getResultList(); } /** * Creates or updates depending on whether the token is new. */ @Override public void save(ProcessToken token) { if (token.isNew()) { create(token); } else { update(token); } } /** * JAVADOC Method Level Comments * * @param applicationType JAVADOC. * @return JAVADOC. */ protected Class<? extends PersistableEntity> resolveClass(String applicationType) { return instanceFactory.getClassType(applicationType); } private void create(ProcessToken token) { Assert.notNull(token, "token cannot be null"); Assert.isNull(token.getId(), "This must be a new token"); Assert.notNull(token.getDomainObject(), "token must have a domainObject"); if (token.getDomainObject().getId() == null) { Persistable<? extends Serializable> domain = token.getDomainObject(); entityManager.persist(domain); Assert.notNull(domain.getId(), "id must now be set on domainObject"); token.setDomainObject(domain); } else { entityManager.merge(token.getDomainObject()); } // Now set id and type of domainObject on Token token.setDomainObjectId(token.getDomainObject().getId()); BeanWrapper beanWrapper = new BeanWrapperImpl(token.getDomainObject()); token.setDomainObjectType((String) beanWrapper.getPropertyValue("applicationType")); entityManager.persist(token); } private void update(ProcessToken token) { Assert.notNull(token, "token cannot be null"); Assert.notNull(token.getId(), "This must not be a new token"); Assert.notNull(token.getDomainObject(), "token must have a domainObject"); Assert.notNull(token.getDomainObject().getId(), "domainObject must have an id"); Assert.isTrue(token.getDomainObject().getId().equals(token.getDomainObjectId()), "domainObject's id must match the reference on token"); entityManager.merge(token.getDomainObject()); entityManager.merge(token); } }
cucina/opencucina
engine/src/main/java/org/cucina/engine/repository/jpa/TokenRepositoryImpl.java
Java
apache-2.0
7,978
const root = '../../'; jest.useFakeTimers(); describe('call-function plugin', function(){ beforeEach(function(){ require(root + 'jspsych.js'); require(root + 'plugins/jspsych-call-function.js'); }); test('loads correctly', function(){ expect(typeof window.jsPsych.plugins['call-function']).not.toBe('undefined'); }); // SKIP FOR NOW test.skip('calls function', function(){ var myFunc = function() { return 1; } var trial = { type: 'call-function', func: myFunc } jsPsych.init({ timeline: [trial] }); expect(jsPsych.getDisplayElement().innerHTML).toBe(""); }); });
JATOS/JATOS_examples
study_assets_root/clock_drawing/jsPsych/tests/plugins/plugin-call-function.test.js
JavaScript
apache-2.0
612
/* * Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.simulator.protocol.connector; import com.hazelcast.simulator.protocol.core.ClientConnectorManager; import com.hazelcast.simulator.protocol.core.Response; import com.hazelcast.simulator.protocol.core.ResponseFuture; import com.hazelcast.simulator.protocol.core.ResponseType; import com.hazelcast.simulator.protocol.core.SimulatorAddress; import com.hazelcast.simulator.protocol.core.SimulatorMessage; import com.hazelcast.simulator.protocol.core.SimulatorProtocolException; import com.hazelcast.simulator.protocol.operation.OperationTypeCounter; import com.hazelcast.simulator.protocol.operation.SimulatorOperation; import com.hazelcast.simulator.utils.ThreadSpawner; import io.netty.bootstrap.ServerBootstrap; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelPipeline; import io.netty.channel.EventLoopGroup; import io.netty.channel.group.ChannelGroup; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.socket.SocketChannel; import io.netty.channel.socket.nio.NioServerSocketChannel; import org.apache.log4j.Logger; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.List; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import static com.hazelcast.simulator.protocol.core.ResponseFuture.createFutureKey; import static com.hazelcast.simulator.protocol.core.ResponseFuture.createInstance; import static com.hazelcast.simulator.protocol.core.ResponseType.EXCEPTION_DURING_OPERATION_EXECUTION; import static com.hazelcast.simulator.protocol.core.ResponseType.FAILURE_AGENT_NOT_FOUND; import static com.hazelcast.simulator.protocol.core.ResponseType.SUCCESS; import static com.hazelcast.simulator.protocol.core.SimulatorAddress.COORDINATOR; import static com.hazelcast.simulator.protocol.operation.OperationCodec.toJson; import static com.hazelcast.simulator.protocol.operation.OperationType.getOperationType; import static com.hazelcast.simulator.utils.CommonUtils.awaitTermination; import static com.hazelcast.simulator.utils.CommonUtils.joinThread; import static com.hazelcast.simulator.utils.CommonUtils.sleepMillis; import static com.hazelcast.simulator.utils.ExecutorFactory.createScheduledThreadPool; import static java.lang.Math.max; import static java.lang.Runtime.getRuntime; import static java.lang.String.format; import static java.util.Collections.singletonList; import static java.util.concurrent.TimeUnit.SECONDS; /** * Abstract {@link ServerConnector} class for Simulator Agent and Worker. */ abstract class AbstractServerConnector implements ServerConnector { private static final int MIN_THREAD_POOL_SIZE = 10; private static final int DEFAULT_THREAD_POOL_SIZE = max(MIN_THREAD_POOL_SIZE, getRuntime().availableProcessors() * 2); private static final Logger LOGGER = Logger.getLogger(AbstractServerConnector.class); private static final SimulatorMessage POISON_PILL = new SimulatorMessage(null, null, 0, null, null); protected final ConcurrentMap<String, ResponseFuture> futureMap = new ConcurrentHashMap<String, ResponseFuture>(); protected final SimulatorAddress localAddress; private final String className = getClass().getSimpleName(); private final AtomicBoolean isStarted = new AtomicBoolean(); private final ClientConnectorManager clientConnectorManager = new ClientConnectorManager(); private final AtomicLong messageIds = new AtomicLong(); private final ConcurrentMap<String, ResponseFuture> messageQueueFutures = new ConcurrentHashMap<String, ResponseFuture>(); private final BlockingQueue<SimulatorMessage> messageQueue = new LinkedBlockingQueue<SimulatorMessage>(); private final MessageQueueThread messageQueueThread = new MessageQueueThread(); private final int addressIndex; private final int port; private final EventLoopGroup group; private final ScheduledExecutorService executorService; private Channel channel; AbstractServerConnector(SimulatorAddress localAddress, int port, int threadPoolSize) { this(localAddress, port, threadPoolSize, createScheduledThreadPool(threadPoolSize, "AbstractServerConnector")); } AbstractServerConnector(SimulatorAddress localAddress, int port, int threadPoolSize, ScheduledExecutorService executorService) { this.localAddress = localAddress; this.addressIndex = COORDINATOR.equals(localAddress) ? 0 : localAddress.getAddressIndex(); this.port = port; this.group = new NioEventLoopGroup(threadPoolSize); this.executorService = executorService; } abstract void configureServerPipeline(ChannelPipeline pipeline, ServerConnector serverConnector); abstract ChannelGroup getChannelGroup(); @Override public void start() { if (!isStarted.compareAndSet(false, true)) { throw new SimulatorProtocolException(format("%s cannot be started twice or after shutdown!", className)); } messageQueueThread.start(); if (port > 0) { ServerBootstrap bootstrap = getServerBootstrap(); ChannelFuture future = bootstrap.bind().syncUninterruptibly(); channel = future.channel(); LOGGER.info(format("%s %s listens on %s", className, localAddress, channel.localAddress())); } } private ServerBootstrap getServerBootstrap() { return new ServerBootstrap() .group(group) .channel(NioServerSocketChannel.class) .localAddress(new InetSocketAddress(port)) .childHandler(new ChannelInitializer<SocketChannel>() { @Override public void initChannel(SocketChannel channel) { channel.config().setReuseAddress(true); configureServerPipeline(channel.pipeline(), AbstractServerConnector.this); } }); } @Override public void close() { LOGGER.info(format("Shutdown of %s...", className)); if (!isStarted.compareAndSet(true, false)) { throw new SimulatorProtocolException(format("%s cannot be shutdown twice or if not been started!", className)); } ThreadSpawner spawner = new ThreadSpawner("shutdownClientConnectors", true); for (final ClientConnector client : clientConnectorManager.getClientConnectors()) { spawner.spawn(new Runnable() { @Override public void run() { client.shutdown(); } }); } spawner.awaitCompletion(); messageQueueThread.shutdown(); if (channel != null) { channel.close().syncUninterruptibly(); } group.shutdownGracefully(DEFAULT_SHUTDOWN_QUIET_PERIOD, DEFAULT_SHUTDOWN_TIMEOUT, SECONDS).syncUninterruptibly(); executorService.shutdown(); awaitTermination(executorService, 1, TimeUnit.MINUTES); } @Override public SimulatorAddress getAddress() { return localAddress; } @Override public int getPort() { return port; } @Override public ConcurrentMap<String, ResponseFuture> getFutureMap() { return futureMap; } @Override public ResponseFuture submit(SimulatorAddress destination, SimulatorOperation op) { checkNoWildcardAllowed(destination); return submit(localAddress, destination, op); } @Override public Response invoke(SimulatorAddress destination, SimulatorOperation op) { return invoke(localAddress, destination, op); } @Override public Response invoke(SimulatorAddress source, SimulatorAddress destination, SimulatorOperation op) { SimulatorMessage message = createSimulatorMessage(source, destination, op); Response response = new Response(message); List<ResponseFuture> futureList = invokeAsync(message); try { for (ResponseFuture future : futureList) { response.addAllParts(future.get()); } } catch (InterruptedException e) { throw new SimulatorProtocolException("ResponseFuture.get() got interrupted!", e); } return response; } @Override public ResponseFuture invokeAsync(SimulatorAddress destination, SimulatorOperation op) { return invokeAsync(localAddress, destination, op); } @Override public ResponseFuture invokeAsync(SimulatorAddress source, SimulatorAddress destination, SimulatorOperation op) { checkNoWildcardAllowed(destination); SimulatorMessage message = createSimulatorMessage(source, destination, op); return invokeAsync(message).get(0); } static int getDefaultThreadPoolSize() { return DEFAULT_THREAD_POOL_SIZE; } ClientConnectorManager getClientConnectorManager() { return clientConnectorManager; } EventLoopGroup getEventLoopGroup() { return group; } ScheduledExecutorService getScheduledExecutor() { return executorService; } int getMessageQueueSizeInternal() { return messageQueue.size(); } @Override public ResponseFuture submit(SimulatorAddress source, SimulatorAddress destination, SimulatorOperation op) { SimulatorMessage message = createSimulatorMessage(source, destination, op); String futureKey = createFutureKey(source, message.getMessageId(), 0); ResponseFuture responseFuture = createInstance(messageQueueFutures, futureKey); messageQueue.add(message); return responseFuture; } private SimulatorMessage createSimulatorMessage(SimulatorAddress src, SimulatorAddress dst, SimulatorOperation op) { return new SimulatorMessage(dst, src, messageIds.incrementAndGet(), getOperationType(op), toJson(op)); } private List<ResponseFuture> invokeAsync(SimulatorMessage message) { if (localAddress.getAddressLevel().isParentAddressLevel(message.getDestination().getAddressLevel())) { // we have to send the message to the connected children return writeAsyncToChildren(message, message.getDestination().getAgentIndex()); } else { // we have to send the message to the connected parents return singletonList(writeAsyncToParents(message)); } } private List<ResponseFuture> writeAsyncToChildren(SimulatorMessage message, int agentAddressIndex) { List<ResponseFuture> futureList = new ArrayList<ResponseFuture>(); if (agentAddressIndex == 0) { for (ClientConnector agent : getClientConnectorManager().getClientConnectors()) { futureList.add(agent.writeAsync(message)); } } else { ClientConnector agent = getClientConnectorManager().get(agentAddressIndex); if (agent == null) { futureList.add(createResponseFuture(message, FAILURE_AGENT_NOT_FOUND)); } else { futureList.add(agent.writeAsync(message)); } } return futureList; } private ResponseFuture writeAsyncToParents(SimulatorMessage message) { long messageId = message.getMessageId(); String futureKey = createFutureKey(message.getSource(), messageId, addressIndex); ResponseFuture future = createInstance(futureMap, futureKey); if (LOGGER.isTraceEnabled()) { LOGGER.trace(format("[%d] %s created ResponseFuture %s", messageId, localAddress, futureKey)); } OperationTypeCounter.sent(message.getOperationType()); getChannelGroup().writeAndFlush(message); return future; } private ResponseFuture createResponseFuture(SimulatorMessage message, ResponseType responseType) { long messageId = message.getMessageId(); SimulatorAddress destination = message.getDestination(); String futureKey = createFutureKey(message.getSource(), messageId, destination.getAddressIndex()); ResponseFuture future = createInstance(futureMap, futureKey); future.set(new Response(messageId, destination, message.getSource(), responseType)); return future; } private void checkNoWildcardAllowed(SimulatorAddress destination) { if (destination.containsWildcard()) { throw new IllegalArgumentException("This method is not allowed for a wildcard destination!"); } } private final class MessageQueueThread extends Thread { private static final int WAIT_FOR_EMPTY_QUEUE_MILLIS = 100; private MessageQueueThread() { super("MessageQueueThread"); } @Override public void run() { while (true) { SimulatorMessage message = null; ResponseFuture responseFuture = null; Response response = null; try { message = messageQueue.take(); if (POISON_PILL.equals(message)) { LOGGER.info("MessageQueueThread received POISON_PILL and will stop..."); break; } String futureKey = createFutureKey(message.getSource(), message.getMessageId(), 0); responseFuture = messageQueueFutures.get(futureKey); response = invokeAsync(message).get(0).get(); } catch (Exception e) { LOGGER.error("Error while sending message from messageQueue", e); if (message != null) { response = new Response(message, EXCEPTION_DURING_OPERATION_EXECUTION); } } if (response != null) { if (responseFuture != null) { responseFuture.set(response); } ResponseType responseType = response.getFirstErrorResponseType(); if (!responseType.equals(SUCCESS)) { LOGGER.error("Got response type " + responseType + " for " + message); } } } } public void shutdown() { messageQueue.add(POISON_PILL); SimulatorMessage message = messageQueue.peek(); while (message != null) { if (!POISON_PILL.equals(message)) { int queueSize = messageQueue.size(); LOGGER.debug(format("%d messages pending on messageQueue, first message: %s", queueSize, message)); } sleepMillis(WAIT_FOR_EMPTY_QUEUE_MILLIS); message = messageQueue.peek(); } joinThread(messageQueueThread); } } }
Donnerbart/hazelcast-simulator
simulator/src/main/java/com/hazelcast/simulator/protocol/connector/AbstractServerConnector.java
Java
apache-2.0
15,847
// Copyright 2017 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // The CloudStack provider fetches configurations from the userdata available in // the config-drive. // NOTE: This provider is still EXPERIMENTAL. package cloudstack import ( "bufio" "context" "fmt" "io/ioutil" "net" "net/url" "os" "os/exec" "path/filepath" "strings" "time" "github.com/coreos/ignition/v2/config/v3_4_experimental/types" "github.com/coreos/ignition/v2/internal/distro" "github.com/coreos/ignition/v2/internal/log" "github.com/coreos/ignition/v2/internal/providers/util" "github.com/coreos/ignition/v2/internal/resource" ut "github.com/coreos/ignition/v2/internal/util" "github.com/coreos/vcontext/report" ) const ( configDriveUserdataPath = "/cloudstack/userdata/user_data.txt" LeaseRetryInterval = 500 * time.Millisecond ) func FetchConfig(f *resource.Fetcher) (types.Config, report.Report, error) { // The fetch-offline approach doesn't work well here because of the "split // personality" of this provider. See: // https://github.com/coreos/ignition/issues/1081 if f.Offline { return types.Config{}, report.Report{}, resource.ErrNeedNet } var data []byte errChan := make(chan error) ctx, cancel := context.WithCancel(context.Background()) dispatchCount := 0 dispatch := func(name string, fn func() ([]byte, error)) { dispatchCount++ go func() { raw, err := fn() if err != nil { switch err { case context.Canceled: default: f.Logger.Err("failed to fetch config from %s: %v", name, err) } errChan <- err return } data = raw cancel() }() } dispatch("config drive (config)", func() ([]byte, error) { return fetchConfigFromDevice(f.Logger, ctx, "config-2") }) dispatch("config drive (CONFIG)", func() ([]byte, error) { return fetchConfigFromDevice(f.Logger, ctx, "CONFIG-2") }) dispatch("metadata service", func() ([]byte, error) { return fetchConfigFromMetadataService(f) }) Loop: for { select { case <-ctx.Done(): break Loop case <-errChan: dispatchCount-- if dispatchCount == 0 { f.Logger.Info("couldn't fetch config") break Loop } } } return util.ParseConfig(f.Logger, data) } func fileExists(path string) bool { _, err := os.Stat(path) return (err == nil) } func labelExists(label string) bool { _, err := getPath(label) return (err == nil) } func getPath(label string) (string, error) { path := filepath.Join(distro.DiskByLabelDir(), label) if fileExists(path) { return path, nil } return "", fmt.Errorf("label not found: %s", label) } func findLease() (*os.File, error) { ifaces, err := net.Interfaces() if err != nil { return nil, fmt.Errorf("could not list interfaces: %v", err) } for { for _, iface := range ifaces { lease, err := os.Open(fmt.Sprintf("/run/systemd/netif/leases/%d", iface.Index)) if os.IsNotExist(err) { continue } else if err != nil { return nil, err } else { return lease, nil } } fmt.Printf("No leases found. Waiting...") time.Sleep(LeaseRetryInterval) } } func getDHCPServerAddress() (string, error) { lease, err := findLease() if err != nil { return "", err } defer lease.Close() var address string line := bufio.NewScanner(lease) for line.Scan() { parts := strings.Split(line.Text(), "=") if parts[0] == "SERVER_ADDRESS" && len(parts) == 2 { address = parts[1] break } } if len(address) == 0 { return "", fmt.Errorf("dhcp server address not found in leases") } return address, nil } func fetchConfigFromDevice(logger *log.Logger, ctx context.Context, label string) ([]byte, error) { for !labelExists(label) { logger.Debug("config drive (%q) not found. Waiting...", label) select { case <-time.After(time.Second): case <-ctx.Done(): return nil, ctx.Err() } } path, err := getPath(label) if err != nil { return nil, err } logger.Debug("creating temporary mount point") mnt, err := ioutil.TempDir("", "ignition-configdrive") if err != nil { return nil, fmt.Errorf("failed to create temp directory: %v", err) } defer os.Remove(mnt) cmd := exec.Command(distro.MountCmd(), "-o", "ro", "-t", "auto", path, mnt) if _, err := logger.LogCmd(cmd, "mounting config drive"); err != nil { return nil, err } defer func() { _ = logger.LogOp( func() error { return ut.UmountPath(mnt) }, "unmounting %q at %q", path, mnt, ) }() if !fileExists(filepath.Join(mnt, configDriveUserdataPath)) { return nil, nil } return ioutil.ReadFile(filepath.Join(mnt, configDriveUserdataPath)) } func fetchConfigFromMetadataService(f *resource.Fetcher) ([]byte, error) { addr, err := getDHCPServerAddress() if err != nil { return nil, err } metadataServiceUrl := url.URL{ Scheme: "http", Host: addr, Path: "/latest/user-data", } res, err := f.FetchToBuffer(metadataServiceUrl, resource.FetchOptions{}) // the metadata server exists but doesn't contain any actual metadata, // assume that there is no config specified if err == resource.ErrNotFound { return nil, nil } return res, err }
coreos/ignition
internal/providers/cloudstack/cloudstack.go
GO
apache-2.0
5,617
/** * Copyright 2017 Google Inc. * * Licensed under the Apache License, Version 2.0 (the 'License'); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an 'AS IS' BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import 'style/exit.scss' import {GA} from 'utils/GA' import {isMobile, is360} from 'utils/Helpers' export class ExitButton { constructor(){ const element = this.element = document.querySelector('#exitButton') element.addEventListener('click', () => { GA('ui', 'click', 'exit') window.location.reload() }) const scene = document.querySelector('a-scene') scene.addEventListener('enter-360', () => this.show()) scene.addEventListener('enter-vr', () => this.show()) scene.addEventListener('exit-vr', () => this.hide()) } show(){ setTimeout(() => { if (!isMobile() || (isMobile() && is360())){ this.element.classList.add('visible') } }, 10) } hide(){ window.location.reload() this.element.classList.remove('visible') } }
googlecreativelab/inside-music
src/interface/ExitButton.js
JavaScript
apache-2.0
1,360
/* * © Copyright IBM Corp. 2012 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.ibm.domino.osgi.debug.launch; /** * @author dtaieb * */ public class DominoOSGILaunchConfiguration extends AbstractDominoOSGILaunchConfiguration { /** * */ public DominoOSGILaunchConfiguration() { } @Override protected String getWorkspaceRelativePath() { return "domino/workspace"; } @Override protected String getOSGIDirectoryName() { return "osgi"; } @Override protected String getSystemFragmentFileName() { return "com.ibm.domino.osgi.sharedlib_1.0.0.jar"; } @Override public String[] getProfiles() { return null; } @Override public void setProfile(String selectedProfile) { } /* (non-Javadoc) * @see com.ibm.domino.osgi.debug.launch.AbstractDominoOSGILaunchConfiguration#getName() */ @Override public String getName() { return "Domino OSGi Framework"; } }
camac/DominoDebugPlugin
sources/src/com/ibm/domino/osgi/debug/launch/DominoOSGILaunchConfiguration.java
Java
apache-2.0
1,459
/* * Copyright 2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.integtests.fixtures.executer; import java.util.List; import java.util.Set; public interface ExecutionResult { String getOutput(); String getError(); ExecutionResult assertOutputEquals(String expectedOutput, boolean ignoreExtraLines, boolean ignoreLineOrder); ExecutionResult assertOutputContains(String expectedOutput); /** * Returns the tasks have been executed in order (includes tasks that were skipped). Note: ignores buildSrc tasks. */ List<String> getExecutedTasks(); /** * Asserts that exactly the given set of tasks have been executed in the given order. Note: ignores buildSrc tasks. */ ExecutionResult assertTasksExecuted(String... taskPaths); /** * Returns the tasks that were skipped, in an undefined order. Note: ignores buildSrc tasks. */ Set<String> getSkippedTasks(); /** * Asserts that exactly the given set of tasks have been skipped. Note: ignores buildSrc tasks. */ ExecutionResult assertTasksSkipped(String... taskPaths); /** * Asserts the given task has been skipped. Note: ignores buildSrc tasks. */ ExecutionResult assertTaskSkipped(String taskPath); /** * Asserts that exactly the given set of tasks have not been skipped. Note: ignores buildSrc tasks. */ ExecutionResult assertTasksNotSkipped(String... taskPaths); /** * Asserts that the given task has not been skipped. Note: ignores buildSrc tasks. */ ExecutionResult assertTaskNotSkipped(String taskPath); }
FinishX/coolweather
gradle/gradle-2.8/src/internal-integ-testing/org/gradle/integtests/fixtures/executer/ExecutionResult.java
Java
apache-2.0
2,176
/* * Copyright 2012 gitblit.com. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gitblit.service; import static org.eclipse.jgit.treewalk.filter.TreeFilter.ANY_DIFF; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.text.MessageFormat; import java.text.ParseException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.document.DateTools; import org.apache.lucene.document.DateTools.Resolution; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.index.MultiReader; import org.apache.lucene.index.Term; import org.apache.lucene.queryparser.classic.QueryParser; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopScoreDocCollector; import org.apache.lucene.search.highlight.Fragmenter; import org.apache.lucene.search.highlight.Highlighter; import org.apache.lucene.search.highlight.InvalidTokenOffsetsException; import org.apache.lucene.search.highlight.QueryScorer; import org.apache.lucene.search.highlight.SimpleHTMLFormatter; import org.apache.lucene.search.highlight.SimpleSpanFragmenter; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FSDirectory; import org.apache.lucene.util.Version; import org.eclipse.jgit.diff.DiffEntry.ChangeType; import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.FileMode; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectLoader; import org.eclipse.jgit.lib.ObjectReader; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.lib.RepositoryCache.FileKey; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevTree; import org.eclipse.jgit.revwalk.RevWalk; import org.eclipse.jgit.storage.file.FileBasedConfig; import org.eclipse.jgit.treewalk.EmptyTreeIterator; import org.eclipse.jgit.treewalk.TreeWalk; import org.eclipse.jgit.util.FS; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.gitblit.Constants.SearchObjectType; import com.gitblit.IStoredSettings; import com.gitblit.Keys; import com.gitblit.manager.IRepositoryManager; import com.gitblit.models.PathModel.PathChangeModel; import com.gitblit.models.RefModel; import com.gitblit.models.RepositoryModel; import com.gitblit.models.SearchResult; import com.gitblit.utils.ArrayUtils; import com.gitblit.utils.JGitUtils; import com.gitblit.utils.StringUtils; /** * The Lucene service handles indexing and searching repositories. * * @author James Moger * */ public class LuceneService implements Runnable { private static final int INDEX_VERSION = 5; private static final String FIELD_OBJECT_TYPE = "type"; private static final String FIELD_PATH = "path"; private static final String FIELD_COMMIT = "commit"; private static final String FIELD_BRANCH = "branch"; private static final String FIELD_SUMMARY = "summary"; private static final String FIELD_CONTENT = "content"; private static final String FIELD_AUTHOR = "author"; private static final String FIELD_COMMITTER = "committer"; private static final String FIELD_DATE = "date"; private static final String FIELD_TAG = "tag"; private static final String CONF_FILE = "lucene.conf"; private static final String LUCENE_DIR = "lucene"; private static final String CONF_INDEX = "index"; private static final String CONF_VERSION = "version"; private static final String CONF_ALIAS = "aliases"; private static final String CONF_BRANCH = "branches"; private static final Version LUCENE_VERSION = Version.LUCENE_46; private final Logger logger = LoggerFactory.getLogger(LuceneService.class); private final IStoredSettings storedSettings; private final IRepositoryManager repositoryManager; private final File repositoriesFolder; private final Map<String, IndexSearcher> searchers = new ConcurrentHashMap<String, IndexSearcher>(); private final Map<String, IndexWriter> writers = new ConcurrentHashMap<String, IndexWriter>(); private final String luceneIgnoreExtensions = "7z arc arj bin bmp dll doc docx exe gif gz jar jpg lib lzh odg odf odt pdf ppt png so swf xcf xls xlsx zip"; private Set<String> excludedExtensions; public LuceneService( IStoredSettings settings, IRepositoryManager repositoryManager) { this.storedSettings = settings; this.repositoryManager = repositoryManager; this.repositoriesFolder = repositoryManager.getRepositoriesFolder(); String exts = luceneIgnoreExtensions; if (settings != null) { exts = settings.getString(Keys.web.luceneIgnoreExtensions, exts); } excludedExtensions = new TreeSet<String>(StringUtils.getStringsFromValue(exts)); } /** * Run is executed by the Gitblit executor service. Because this is called * by an executor service, calls will queue - i.e. there can never be * concurrent execution of repository index updates. */ @Override public void run() { if (!storedSettings.getBoolean(Keys.web.allowLuceneIndexing, true)) { // Lucene indexing is disabled return; } // reload the excluded extensions String exts = storedSettings.getString(Keys.web.luceneIgnoreExtensions, luceneIgnoreExtensions); excludedExtensions = new TreeSet<String>(StringUtils.getStringsFromValue(exts)); if (repositoryManager.isCollectingGarbage()) { // busy collecting garbage, try again later return; } for (String repositoryName: repositoryManager.getRepositoryList()) { RepositoryModel model = repositoryManager.getRepositoryModel(repositoryName); if (model.hasCommits && !ArrayUtils.isEmpty(model.indexedBranches)) { Repository repository = repositoryManager.getRepository(model.name); if (repository == null) { if (repositoryManager.isCollectingGarbage(model.name)) { logger.info(MessageFormat.format("Skipping Lucene index of {0}, busy garbage collecting", repositoryName)); } continue; } index(model, repository); repository.close(); System.gc(); } } } /** * Synchronously indexes a repository. This may build a complete index of a * repository or it may update an existing index. * * @param name * the name of the repository * @param repository * the repository object */ private void index(RepositoryModel model, Repository repository) { try { if (shouldReindex(repository)) { // (re)build the entire index IndexResult result = reindex(model, repository); if (result.success) { if (result.commitCount > 0) { String msg = "Built {0} Lucene index from {1} commits and {2} files across {3} branches in {4} secs"; logger.info(MessageFormat.format(msg, model.name, result.commitCount, result.blobCount, result.branchCount, result.duration())); } } else { String msg = "Could not build {0} Lucene index!"; logger.error(MessageFormat.format(msg, model.name)); } } else { // update the index with latest commits IndexResult result = updateIndex(model, repository); if (result.success) { if (result.commitCount > 0) { String msg = "Updated {0} Lucene index with {1} commits and {2} files across {3} branches in {4} secs"; logger.info(MessageFormat.format(msg, model.name, result.commitCount, result.blobCount, result.branchCount, result.duration())); } } else { String msg = "Could not update {0} Lucene index!"; logger.error(MessageFormat.format(msg, model.name)); } } } catch (Throwable t) { logger.error(MessageFormat.format("Lucene indexing failure for {0}", model.name), t); } } /** * Close the writer/searcher objects for a repository. * * @param repositoryName */ public synchronized void close(String repositoryName) { try { IndexSearcher searcher = searchers.remove(repositoryName); if (searcher != null) { searcher.getIndexReader().close(); } } catch (Exception e) { logger.error("Failed to close index searcher for " + repositoryName, e); } try { IndexWriter writer = writers.remove(repositoryName); if (writer != null) { writer.close(); } } catch (Exception e) { logger.error("Failed to close index writer for " + repositoryName, e); } } /** * Close all Lucene indexers. * */ public synchronized void close() { // close all writers for (String writer : writers.keySet()) { try { writers.get(writer).close(true); } catch (Throwable t) { logger.error("Failed to close Lucene writer for " + writer, t); } } writers.clear(); // close all searchers for (String searcher : searchers.keySet()) { try { searchers.get(searcher).getIndexReader().close(); } catch (Throwable t) { logger.error("Failed to close Lucene searcher for " + searcher, t); } } searchers.clear(); } /** * Deletes the Lucene index for the specified repository. * * @param repositoryName * @return true, if successful */ public boolean deleteIndex(String repositoryName) { try { // close any open writer/searcher close(repositoryName); // delete the index folder File repositoryFolder = FileKey.resolve(new File(repositoriesFolder, repositoryName), FS.DETECTED); File luceneIndex = new File(repositoryFolder, LUCENE_DIR); if (luceneIndex.exists()) { org.eclipse.jgit.util.FileUtils.delete(luceneIndex, org.eclipse.jgit.util.FileUtils.RECURSIVE); } // delete the config file File luceneConfig = new File(repositoryFolder, CONF_FILE); if (luceneConfig.exists()) { luceneConfig.delete(); } return true; } catch (IOException e) { throw new RuntimeException(e); } } /** * Returns the author for the commit, if this information is available. * * @param commit * @return an author or unknown */ private String getAuthor(RevCommit commit) { String name = "unknown"; try { name = commit.getAuthorIdent().getName(); if (StringUtils.isEmpty(name)) { name = commit.getAuthorIdent().getEmailAddress(); } } catch (NullPointerException n) { } return name; } /** * Returns the committer for the commit, if this information is available. * * @param commit * @return an committer or unknown */ private String getCommitter(RevCommit commit) { String name = "unknown"; try { name = commit.getCommitterIdent().getName(); if (StringUtils.isEmpty(name)) { name = commit.getCommitterIdent().getEmailAddress(); } } catch (NullPointerException n) { } return name; } /** * Get the tree associated with the given commit. * * @param walk * @param commit * @return tree * @throws IOException */ private RevTree getTree(final RevWalk walk, final RevCommit commit) throws IOException { final RevTree tree = commit.getTree(); if (tree != null) { return tree; } walk.parseHeaders(commit); return commit.getTree(); } /** * Construct a keyname from the branch. * * @param branchName * @return a keyname appropriate for the Git config file format */ private String getBranchKey(String branchName) { return StringUtils.getSHA1(branchName); } /** * Returns the Lucene configuration for the specified repository. * * @param repository * @return a config object */ private FileBasedConfig getConfig(Repository repository) { File file = new File(repository.getDirectory(), CONF_FILE); FileBasedConfig config = new FileBasedConfig(file, FS.detect()); return config; } /** * Reads the Lucene config file for the repository to check the index * version. If the index version is different, then rebuild the repository * index. * * @param repository * @return true of the on-disk index format is different than INDEX_VERSION */ private boolean shouldReindex(Repository repository) { try { FileBasedConfig config = getConfig(repository); config.load(); int indexVersion = config.getInt(CONF_INDEX, CONF_VERSION, 0); // reindex if versions do not match return indexVersion != INDEX_VERSION; } catch (Throwable t) { } return true; } /** * This completely indexes the repository and will destroy any existing * index. * * @param repositoryName * @param repository * @return IndexResult */ public IndexResult reindex(RepositoryModel model, Repository repository) { IndexResult result = new IndexResult(); if (!deleteIndex(model.name)) { return result; } try { String [] encodings = storedSettings.getStrings(Keys.web.blobEncodings).toArray(new String[0]); FileBasedConfig config = getConfig(repository); Set<String> indexedCommits = new TreeSet<String>(); IndexWriter writer = getIndexWriter(model.name); // build a quick lookup of tags Map<String, List<String>> tags = new HashMap<String, List<String>>(); for (RefModel tag : JGitUtils.getTags(repository, false, -1)) { if (!tag.isAnnotatedTag()) { // skip non-annotated tags continue; } if (!tags.containsKey(tag.getObjectId())) { tags.put(tag.getReferencedObjectId().getName(), new ArrayList<String>()); } tags.get(tag.getReferencedObjectId().getName()).add(tag.displayName); } ObjectReader reader = repository.newObjectReader(); // get the local branches List<RefModel> branches = JGitUtils.getLocalBranches(repository, true, -1); // sort them by most recently updated Collections.sort(branches, new Comparator<RefModel>() { @Override public int compare(RefModel ref1, RefModel ref2) { return ref2.getDate().compareTo(ref1.getDate()); } }); // reorder default branch to first position RefModel defaultBranch = null; ObjectId defaultBranchId = JGitUtils.getDefaultBranch(repository); for (RefModel branch : branches) { if (branch.getObjectId().equals(defaultBranchId)) { defaultBranch = branch; break; } } branches.remove(defaultBranch); branches.add(0, defaultBranch); // walk through each branch for (RefModel branch : branches) { boolean indexBranch = false; if (model.indexedBranches.contains(com.gitblit.Constants.DEFAULT_BRANCH) && branch.equals(defaultBranch)) { // indexing "default" branch indexBranch = true; } else if (branch.getName().startsWith(com.gitblit.Constants.R_GITBLIT)) { // skip Gitblit internal branches indexBranch = false; } else { // normal explicit branch check indexBranch = model.indexedBranches.contains(branch.getName()); } // if this branch is not specifically indexed then skip if (!indexBranch) { continue; } String branchName = branch.getName(); RevWalk revWalk = new RevWalk(reader); RevCommit tip = revWalk.parseCommit(branch.getObjectId()); String tipId = tip.getId().getName(); String keyName = getBranchKey(branchName); config.setString(CONF_ALIAS, null, keyName, branchName); config.setString(CONF_BRANCH, null, keyName, tipId); // index the blob contents of the tree TreeWalk treeWalk = new TreeWalk(repository); treeWalk.addTree(tip.getTree()); treeWalk.setRecursive(true); Map<String, ObjectId> paths = new TreeMap<String, ObjectId>(); while (treeWalk.next()) { // ensure path is not in a submodule if (treeWalk.getFileMode(0) != FileMode.GITLINK) { paths.put(treeWalk.getPathString(), treeWalk.getObjectId(0)); } } ByteArrayOutputStream os = new ByteArrayOutputStream(); byte[] tmp = new byte[32767]; RevWalk commitWalk = new RevWalk(reader); commitWalk.markStart(tip); RevCommit commit; while ((paths.size() > 0) && (commit = commitWalk.next()) != null) { TreeWalk diffWalk = new TreeWalk(reader); int parentCount = commit.getParentCount(); switch (parentCount) { case 0: diffWalk.addTree(new EmptyTreeIterator()); break; case 1: diffWalk.addTree(getTree(commitWalk, commit.getParent(0))); break; default: // skip merge commits continue; } diffWalk.addTree(getTree(commitWalk, commit)); diffWalk.setFilter(ANY_DIFF); diffWalk.setRecursive(true); while ((paths.size() > 0) && diffWalk.next()) { String path = diffWalk.getPathString(); if (!paths.containsKey(path)) { continue; } // remove path from set ObjectId blobId = paths.remove(path); result.blobCount++; // index the blob metadata String blobAuthor = getAuthor(commit); String blobCommitter = getCommitter(commit); String blobDate = DateTools.timeToString(commit.getCommitTime() * 1000L, Resolution.MINUTE); Document doc = new Document(); doc.add(new Field(FIELD_OBJECT_TYPE, SearchObjectType.blob.name(), StringField.TYPE_STORED)); doc.add(new Field(FIELD_BRANCH, branchName, TextField.TYPE_STORED)); doc.add(new Field(FIELD_COMMIT, commit.getName(), TextField.TYPE_STORED)); doc.add(new Field(FIELD_PATH, path, TextField.TYPE_STORED)); doc.add(new Field(FIELD_DATE, blobDate, StringField.TYPE_STORED)); doc.add(new Field(FIELD_AUTHOR, blobAuthor, TextField.TYPE_STORED)); doc.add(new Field(FIELD_COMMITTER, blobCommitter, TextField.TYPE_STORED)); // determine extension to compare to the extension // blacklist String ext = null; String name = path.toLowerCase(); if (name.indexOf('.') > -1) { ext = name.substring(name.lastIndexOf('.') + 1); } // index the blob content if (StringUtils.isEmpty(ext) || !excludedExtensions.contains(ext)) { ObjectLoader ldr = repository.open(blobId, Constants.OBJ_BLOB); InputStream in = ldr.openStream(); int n; while ((n = in.read(tmp)) > 0) { os.write(tmp, 0, n); } in.close(); byte[] content = os.toByteArray(); String str = StringUtils.decodeString(content, encodings); doc.add(new Field(FIELD_CONTENT, str, TextField.TYPE_STORED)); os.reset(); } // add the blob to the index writer.addDocument(doc); } } os.close(); // index the tip commit object if (indexedCommits.add(tipId)) { Document doc = createDocument(tip, tags.get(tipId)); doc.add(new Field(FIELD_BRANCH, branchName, TextField.TYPE_STORED)); writer.addDocument(doc); result.commitCount += 1; result.branchCount += 1; } // traverse the log and index the previous commit objects RevWalk historyWalk = new RevWalk(reader); historyWalk.markStart(historyWalk.parseCommit(tip.getId())); RevCommit rev; while ((rev = historyWalk.next()) != null) { String hash = rev.getId().getName(); if (indexedCommits.add(hash)) { Document doc = createDocument(rev, tags.get(hash)); doc.add(new Field(FIELD_BRANCH, branchName, TextField.TYPE_STORED)); writer.addDocument(doc); result.commitCount += 1; } } } // finished reader.release(); // commit all changes and reset the searcher config.setInt(CONF_INDEX, null, CONF_VERSION, INDEX_VERSION); config.save(); writer.commit(); resetIndexSearcher(model.name); result.success(); } catch (Exception e) { logger.error("Exception while reindexing " + model.name, e); } return result; } /** * Incrementally update the index with the specified commit for the * repository. * * @param repositoryName * @param repository * @param branch * the fully qualified branch name (e.g. refs/heads/master) * @param commit * @return true, if successful */ private IndexResult index(String repositoryName, Repository repository, String branch, RevCommit commit) { IndexResult result = new IndexResult(); try { String [] encodings = storedSettings.getStrings(Keys.web.blobEncodings).toArray(new String[0]); List<PathChangeModel> changedPaths = JGitUtils.getFilesInCommit(repository, commit); String revDate = DateTools.timeToString(commit.getCommitTime() * 1000L, Resolution.MINUTE); IndexWriter writer = getIndexWriter(repositoryName); for (PathChangeModel path : changedPaths) { if (path.isSubmodule()) { continue; } // delete the indexed blob deleteBlob(repositoryName, branch, path.name); // re-index the blob if (!ChangeType.DELETE.equals(path.changeType)) { result.blobCount++; Document doc = new Document(); doc.add(new Field(FIELD_OBJECT_TYPE, SearchObjectType.blob.name(), StringField.TYPE_STORED)); doc.add(new Field(FIELD_BRANCH, branch, TextField.TYPE_STORED)); doc.add(new Field(FIELD_COMMIT, commit.getName(), TextField.TYPE_STORED)); doc.add(new Field(FIELD_PATH, path.path, TextField.TYPE_STORED)); doc.add(new Field(FIELD_DATE, revDate, StringField.TYPE_STORED)); doc.add(new Field(FIELD_AUTHOR, getAuthor(commit), TextField.TYPE_STORED)); doc.add(new Field(FIELD_COMMITTER, getCommitter(commit), TextField.TYPE_STORED)); // determine extension to compare to the extension // blacklist String ext = null; String name = path.name.toLowerCase(); if (name.indexOf('.') > -1) { ext = name.substring(name.lastIndexOf('.') + 1); } if (StringUtils.isEmpty(ext) || !excludedExtensions.contains(ext)) { // read the blob content String str = JGitUtils.getStringContent(repository, commit.getTree(), path.path, encodings); if (str != null) { doc.add(new Field(FIELD_CONTENT, str, TextField.TYPE_STORED)); writer.addDocument(doc); } } } } writer.commit(); // get any annotated commit tags List<String> commitTags = new ArrayList<String>(); for (RefModel ref : JGitUtils.getTags(repository, false, -1)) { if (ref.isAnnotatedTag() && ref.getReferencedObjectId().equals(commit.getId())) { commitTags.add(ref.displayName); } } // create and write the Lucene document Document doc = createDocument(commit, commitTags); doc.add(new Field(FIELD_BRANCH, branch, TextField.TYPE_STORED)); result.commitCount++; result.success = index(repositoryName, doc); } catch (Exception e) { logger.error(MessageFormat.format("Exception while indexing commit {0} in {1}", commit.getId().getName(), repositoryName), e); } return result; } /** * Delete a blob from the specified branch of the repository index. * * @param repositoryName * @param branch * @param path * @throws Exception * @return true, if deleted, false if no record was deleted */ public boolean deleteBlob(String repositoryName, String branch, String path) throws Exception { String pattern = MessageFormat.format("{0}:'{'0} AND {1}:\"'{'1'}'\" AND {2}:\"'{'2'}'\"", FIELD_OBJECT_TYPE, FIELD_BRANCH, FIELD_PATH); String q = MessageFormat.format(pattern, SearchObjectType.blob.name(), branch, path); BooleanQuery query = new BooleanQuery(); StandardAnalyzer analyzer = new StandardAnalyzer(LUCENE_VERSION); QueryParser qp = new QueryParser(LUCENE_VERSION, FIELD_SUMMARY, analyzer); query.add(qp.parse(q), Occur.MUST); IndexWriter writer = getIndexWriter(repositoryName); int numDocsBefore = writer.numDocs(); writer.deleteDocuments(query); writer.commit(); int numDocsAfter = writer.numDocs(); if (numDocsBefore == numDocsAfter) { logger.debug(MessageFormat.format("no records found to delete {0}", query.toString())); return false; } else { logger.debug(MessageFormat.format("deleted {0} records with {1}", numDocsBefore - numDocsAfter, query.toString())); return true; } } /** * Updates a repository index incrementally from the last indexed commits. * * @param model * @param repository * @return IndexResult */ private IndexResult updateIndex(RepositoryModel model, Repository repository) { IndexResult result = new IndexResult(); try { FileBasedConfig config = getConfig(repository); config.load(); // build a quick lookup of annotated tags Map<String, List<String>> tags = new HashMap<String, List<String>>(); for (RefModel tag : JGitUtils.getTags(repository, false, -1)) { if (!tag.isAnnotatedTag()) { // skip non-annotated tags continue; } if (!tags.containsKey(tag.getObjectId())) { tags.put(tag.getReferencedObjectId().getName(), new ArrayList<String>()); } tags.get(tag.getReferencedObjectId().getName()).add(tag.displayName); } // detect branch deletion // first assume all branches are deleted and then remove each // existing branch from deletedBranches during indexing Set<String> deletedBranches = new TreeSet<String>(); for (String alias : config.getNames(CONF_ALIAS)) { String branch = config.getString(CONF_ALIAS, null, alias); deletedBranches.add(branch); } // get the local branches List<RefModel> branches = JGitUtils.getLocalBranches(repository, true, -1); // sort them by most recently updated Collections.sort(branches, new Comparator<RefModel>() { @Override public int compare(RefModel ref1, RefModel ref2) { return ref2.getDate().compareTo(ref1.getDate()); } }); // reorder default branch to first position RefModel defaultBranch = null; ObjectId defaultBranchId = JGitUtils.getDefaultBranch(repository); for (RefModel branch : branches) { if (branch.getObjectId().equals(defaultBranchId)) { defaultBranch = branch; break; } } branches.remove(defaultBranch); branches.add(0, defaultBranch); // walk through each branches for (RefModel branch : branches) { String branchName = branch.getName(); boolean indexBranch = false; if (model.indexedBranches.contains(com.gitblit.Constants.DEFAULT_BRANCH) && branch.equals(defaultBranch)) { // indexing "default" branch indexBranch = true; } else if (branch.getName().startsWith(com.gitblit.Constants.R_GITBLIT)) { // ignore internal Gitblit branches indexBranch = false; } else { // normal explicit branch check indexBranch = model.indexedBranches.contains(branch.getName()); } // if this branch is not specifically indexed then skip if (!indexBranch) { continue; } // remove this branch from the deletedBranches set deletedBranches.remove(branchName); // determine last commit String keyName = getBranchKey(branchName); String lastCommit = config.getString(CONF_BRANCH, null, keyName); List<RevCommit> revs; if (StringUtils.isEmpty(lastCommit)) { // new branch/unindexed branch, get all commits on branch revs = JGitUtils.getRevLog(repository, branchName, 0, -1); } else { // pre-existing branch, get changes since last commit revs = JGitUtils.getRevLog(repository, lastCommit, branchName); } if (revs.size() > 0) { result.branchCount += 1; } // reverse the list of commits so we start with the first commit Collections.reverse(revs); for (RevCommit commit : revs) { // index a commit result.add(index(model.name, repository, branchName, commit)); } // update the config config.setInt(CONF_INDEX, null, CONF_VERSION, INDEX_VERSION); config.setString(CONF_ALIAS, null, keyName, branchName); config.setString(CONF_BRANCH, null, keyName, branch.getObjectId().getName()); config.save(); } // the deletedBranches set will normally be empty by this point // unless a branch really was deleted and no longer exists if (deletedBranches.size() > 0) { for (String branch : deletedBranches) { IndexWriter writer = getIndexWriter(model.name); writer.deleteDocuments(new Term(FIELD_BRANCH, branch)); writer.commit(); } } result.success = true; } catch (Throwable t) { logger.error(MessageFormat.format("Exception while updating {0} Lucene index", model.name), t); } return result; } /** * Creates a Lucene document for a commit * * @param commit * @param tags * @return a Lucene document */ private Document createDocument(RevCommit commit, List<String> tags) { Document doc = new Document(); doc.add(new Field(FIELD_OBJECT_TYPE, SearchObjectType.commit.name(), StringField.TYPE_STORED)); doc.add(new Field(FIELD_COMMIT, commit.getName(), TextField.TYPE_STORED)); doc.add(new Field(FIELD_DATE, DateTools.timeToString(commit.getCommitTime() * 1000L, Resolution.MINUTE), StringField.TYPE_STORED)); doc.add(new Field(FIELD_AUTHOR, getAuthor(commit), TextField.TYPE_STORED)); doc.add(new Field(FIELD_COMMITTER, getCommitter(commit), TextField.TYPE_STORED)); doc.add(new Field(FIELD_SUMMARY, commit.getShortMessage(), TextField.TYPE_STORED)); doc.add(new Field(FIELD_CONTENT, commit.getFullMessage(), TextField.TYPE_STORED)); if (!ArrayUtils.isEmpty(tags)) { doc.add(new Field(FIELD_TAG, StringUtils.flattenStrings(tags), TextField.TYPE_STORED)); } return doc; } /** * Incrementally index an object for the repository. * * @param repositoryName * @param doc * @return true, if successful */ private boolean index(String repositoryName, Document doc) { try { IndexWriter writer = getIndexWriter(repositoryName); writer.addDocument(doc); writer.commit(); resetIndexSearcher(repositoryName); return true; } catch (Exception e) { logger.error(MessageFormat.format("Exception while incrementally updating {0} Lucene index", repositoryName), e); } return false; } private SearchResult createSearchResult(Document doc, float score, int hitId, int totalHits) throws ParseException { SearchResult result = new SearchResult(); result.hitId = hitId; result.totalHits = totalHits; result.score = score; result.date = DateTools.stringToDate(doc.get(FIELD_DATE)); result.summary = doc.get(FIELD_SUMMARY); result.author = doc.get(FIELD_AUTHOR); result.committer = doc.get(FIELD_COMMITTER); result.type = SearchObjectType.fromName(doc.get(FIELD_OBJECT_TYPE)); result.branch = doc.get(FIELD_BRANCH); result.commitId = doc.get(FIELD_COMMIT); result.path = doc.get(FIELD_PATH); if (doc.get(FIELD_TAG) != null) { result.tags = StringUtils.getStringsFromValue(doc.get(FIELD_TAG)); } return result; } private synchronized void resetIndexSearcher(String repository) throws IOException { IndexSearcher searcher = searchers.remove(repository); if (searcher != null) { searcher.getIndexReader().close(); } } /** * Gets an index searcher for the repository. * * @param repository * @return * @throws IOException */ private IndexSearcher getIndexSearcher(String repository) throws IOException { IndexSearcher searcher = searchers.get(repository); if (searcher == null) { IndexWriter writer = getIndexWriter(repository); searcher = new IndexSearcher(DirectoryReader.open(writer, true)); searchers.put(repository, searcher); } return searcher; } /** * Gets an index writer for the repository. The index will be created if it * does not already exist or if forceCreate is specified. * * @param repository * @return an IndexWriter * @throws IOException */ private IndexWriter getIndexWriter(String repository) throws IOException { IndexWriter indexWriter = writers.get(repository); File repositoryFolder = FileKey.resolve(new File(repositoriesFolder, repository), FS.DETECTED); File indexFolder = new File(repositoryFolder, LUCENE_DIR); Directory directory = FSDirectory.open(indexFolder); if (indexWriter == null) { if (!indexFolder.exists()) { indexFolder.mkdirs(); } StandardAnalyzer analyzer = new StandardAnalyzer(LUCENE_VERSION); IndexWriterConfig config = new IndexWriterConfig(LUCENE_VERSION, analyzer); config.setOpenMode(OpenMode.CREATE_OR_APPEND); indexWriter = new IndexWriter(directory, config); writers.put(repository, indexWriter); } return indexWriter; } /** * Searches the specified repositories for the given text or query * * @param text * if the text is null or empty, null is returned * @param page * the page number to retrieve. page is 1-indexed. * @param pageSize * the number of elements to return for this page * @param repositories * a list of repositories to search. if no repositories are * specified null is returned. * @return a list of SearchResults in order from highest to the lowest score * */ public List<SearchResult> search(String text, int page, int pageSize, List<String> repositories) { if (ArrayUtils.isEmpty(repositories)) { return null; } return search(text, page, pageSize, repositories.toArray(new String[0])); } /** * Searches the specified repositories for the given text or query * * @param text * if the text is null or empty, null is returned * @param page * the page number to retrieve. page is 1-indexed. * @param pageSize * the number of elements to return for this page * @param repositories * a list of repositories to search. if no repositories are * specified null is returned. * @return a list of SearchResults in order from highest to the lowest score * */ public List<SearchResult> search(String text, int page, int pageSize, String... repositories) { if (StringUtils.isEmpty(text)) { return null; } if (ArrayUtils.isEmpty(repositories)) { return null; } Set<SearchResult> results = new LinkedHashSet<SearchResult>(); StandardAnalyzer analyzer = new StandardAnalyzer(LUCENE_VERSION); try { // default search checks summary and content BooleanQuery query = new BooleanQuery(); QueryParser qp; qp = new QueryParser(LUCENE_VERSION, FIELD_SUMMARY, analyzer); qp.setAllowLeadingWildcard(true); query.add(qp.parse(text), Occur.SHOULD); qp = new QueryParser(LUCENE_VERSION, FIELD_CONTENT, analyzer); qp.setAllowLeadingWildcard(true); query.add(qp.parse(text), Occur.SHOULD); IndexSearcher searcher; if (repositories.length == 1) { // single repository search searcher = getIndexSearcher(repositories[0]); } else { // multiple repository search List<IndexReader> readers = new ArrayList<IndexReader>(); for (String repository : repositories) { IndexSearcher repositoryIndex = getIndexSearcher(repository); readers.add(repositoryIndex.getIndexReader()); } IndexReader[] rdrs = readers.toArray(new IndexReader[readers.size()]); MultiSourceReader reader = new MultiSourceReader(rdrs); searcher = new IndexSearcher(reader); } Query rewrittenQuery = searcher.rewrite(query); logger.debug(rewrittenQuery.toString()); TopScoreDocCollector collector = TopScoreDocCollector.create(5000, true); searcher.search(rewrittenQuery, collector); int offset = Math.max(0, (page - 1) * pageSize); ScoreDoc[] hits = collector.topDocs(offset, pageSize).scoreDocs; int totalHits = collector.getTotalHits(); for (int i = 0; i < hits.length; i++) { int docId = hits[i].doc; Document doc = searcher.doc(docId); SearchResult result = createSearchResult(doc, hits[i].score, offset + i + 1, totalHits); if (repositories.length == 1) { // single repository search result.repository = repositories[0]; } else { // multi-repository search MultiSourceReader reader = (MultiSourceReader) searcher.getIndexReader(); int index = reader.getSourceIndex(docId); result.repository = repositories[index]; } String content = doc.get(FIELD_CONTENT); result.fragment = getHighlightedFragment(analyzer, query, content, result); results.add(result); } } catch (Exception e) { logger.error(MessageFormat.format("Exception while searching for {0}", text), e); } return new ArrayList<SearchResult>(results); } /** * * @param analyzer * @param query * @param content * @param result * @return * @throws IOException * @throws InvalidTokenOffsetsException */ private String getHighlightedFragment(Analyzer analyzer, Query query, String content, SearchResult result) throws IOException, InvalidTokenOffsetsException { if (content == null) { content = ""; } int fragmentLength = SearchObjectType.commit == result.type ? 512 : 150; QueryScorer scorer = new QueryScorer(query, "content"); Fragmenter fragmenter = new SimpleSpanFragmenter(scorer, fragmentLength); // use an artificial delimiter for the token String termTag = "!!--["; String termTagEnd = "]--!!"; SimpleHTMLFormatter formatter = new SimpleHTMLFormatter(termTag, termTagEnd); Highlighter highlighter = new Highlighter(formatter, scorer); highlighter.setTextFragmenter(fragmenter); String [] fragments = highlighter.getBestFragments(analyzer, "content", content, 3); if (ArrayUtils.isEmpty(fragments)) { if (SearchObjectType.blob == result.type) { return ""; } // clip commit message String fragment = content; if (fragment.length() > fragmentLength) { fragment = fragment.substring(0, fragmentLength) + "..."; } return "<pre class=\"text\">" + StringUtils.escapeForHtml(fragment, true) + "</pre>"; } // make sure we have unique fragments Set<String> uniqueFragments = new LinkedHashSet<String>(); for (String fragment : fragments) { uniqueFragments.add(fragment); } fragments = uniqueFragments.toArray(new String[uniqueFragments.size()]); StringBuilder sb = new StringBuilder(); for (int i = 0, len = fragments.length; i < len; i++) { String fragment = fragments[i]; String tag = "<pre class=\"text\">"; // resurrect the raw fragment from removing the artificial delimiters String raw = fragment.replace(termTag, "").replace(termTagEnd, ""); // determine position of the raw fragment in the content int pos = content.indexOf(raw); // restore complete first line of fragment int c = pos; while (c > 0) { c--; if (content.charAt(c) == '\n') { break; } } if (c > 0) { // inject leading chunk of first fragment line fragment = content.substring(c + 1, pos) + fragment; } if (SearchObjectType.blob == result.type) { // count lines as offset into the content for this fragment int line = Math.max(1, StringUtils.countLines(content.substring(0, pos))); // create fragment tag with line number and language String lang = ""; String ext = StringUtils.getFileExtension(result.path).toLowerCase(); if (!StringUtils.isEmpty(ext)) { // maintain leading space! lang = " lang-" + ext; } tag = MessageFormat.format("<pre class=\"prettyprint linenums:{0,number,0}{1}\">", line, lang); } sb.append(tag); // replace the artificial delimiter with html tags String html = StringUtils.escapeForHtml(fragment, false); html = html.replace(termTag, "<span class=\"highlight\">").replace(termTagEnd, "</span>"); sb.append(html); sb.append("</pre>"); if (i < len - 1) { sb.append("<span class=\"ellipses\">...</span><br/>"); } } return sb.toString(); } /** * Simple class to track the results of an index update. */ private class IndexResult { long startTime = System.currentTimeMillis(); long endTime = startTime; boolean success; int branchCount; int commitCount; int blobCount; void add(IndexResult result) { this.branchCount += result.branchCount; this.commitCount += result.commitCount; this.blobCount += result.blobCount; } void success() { success = true; endTime = System.currentTimeMillis(); } float duration() { return (endTime - startTime)/1000f; } } /** * Custom subclass of MultiReader to identify the source index for a given * doc id. This would not be necessary of there was a public method to * obtain this information. * */ private class MultiSourceReader extends MultiReader { MultiSourceReader(IndexReader [] readers) { super(readers, false); } int getSourceIndex(int docId) { int index = -1; try { index = super.readerIndex(docId); } catch (Exception e) { logger.error("Error getting source index", e); } return index; } } }
culmat/gitblit
src/main/java/com/gitblit/service/LuceneService.java
Java
apache-2.0
42,731
/* Copyright 2019 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package node import ( "context" "fmt" "net" "strings" "time" "github.com/onsi/gomega" v1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/util/rand" "k8s.io/apimachinery/pkg/util/sets" clientset "k8s.io/client-go/kubernetes" nodectlr "k8s.io/kubernetes/pkg/controller/nodelifecycle" schedulernodeinfo "k8s.io/kubernetes/pkg/scheduler/nodeinfo" e2elog "k8s.io/kubernetes/test/e2e/framework/log" "k8s.io/kubernetes/test/e2e/system" ) const ( // poll is how often to Poll pods, nodes and claims. poll = 2 * time.Second // singleCallTimeout is how long to try single API calls (like 'get' or 'list'). Used to prevent // transient failures from failing tests. singleCallTimeout = 5 * time.Minute // ssh port sshPort = "22" ) // PodNode is a pod-node pair indicating which node a given pod is running on type PodNode struct { // Pod represents pod name Pod string // Node represents node name Node string } // FirstAddress returns the first address of the given type of each node. func FirstAddress(nodelist *v1.NodeList, addrType v1.NodeAddressType) string { for _, n := range nodelist.Items { for _, addr := range n.Status.Addresses { if addr.Type == addrType && addr.Address != "" { return addr.Address } } } return "" } func isNodeConditionSetAsExpected(node *v1.Node, conditionType v1.NodeConditionType, wantTrue, silent bool) bool { // Check the node readiness condition (logging all). for _, cond := range node.Status.Conditions { // Ensure that the condition type and the status matches as desired. if cond.Type == conditionType { // For NodeReady condition we need to check Taints as well if cond.Type == v1.NodeReady { hasNodeControllerTaints := false // For NodeReady we need to check if Taints are gone as well taints := node.Spec.Taints for _, taint := range taints { if taint.MatchTaint(nodectlr.UnreachableTaintTemplate) || taint.MatchTaint(nodectlr.NotReadyTaintTemplate) { hasNodeControllerTaints = true break } } if wantTrue { if (cond.Status == v1.ConditionTrue) && !hasNodeControllerTaints { return true } msg := "" if !hasNodeControllerTaints { msg = fmt.Sprintf("Condition %s of node %s is %v instead of %t. Reason: %v, message: %v", conditionType, node.Name, cond.Status == v1.ConditionTrue, wantTrue, cond.Reason, cond.Message) } else { msg = fmt.Sprintf("Condition %s of node %s is %v, but Node is tainted by NodeController with %v. Failure", conditionType, node.Name, cond.Status == v1.ConditionTrue, taints) } if !silent { e2elog.Logf(msg) } return false } // TODO: check if the Node is tainted once we enable NC notReady/unreachable taints by default if cond.Status != v1.ConditionTrue { return true } if !silent { e2elog.Logf("Condition %s of node %s is %v instead of %t. Reason: %v, message: %v", conditionType, node.Name, cond.Status == v1.ConditionTrue, wantTrue, cond.Reason, cond.Message) } return false } if (wantTrue && (cond.Status == v1.ConditionTrue)) || (!wantTrue && (cond.Status != v1.ConditionTrue)) { return true } if !silent { e2elog.Logf("Condition %s of node %s is %v instead of %t. Reason: %v, message: %v", conditionType, node.Name, cond.Status == v1.ConditionTrue, wantTrue, cond.Reason, cond.Message) } return false } } if !silent { e2elog.Logf("Couldn't find condition %v on node %v", conditionType, node.Name) } return false } // IsConditionSetAsExpected returns a wantTrue value if the node has a match to the conditionType, otherwise returns an opposite value of the wantTrue with detailed logging. func IsConditionSetAsExpected(node *v1.Node, conditionType v1.NodeConditionType, wantTrue bool) bool { return isNodeConditionSetAsExpected(node, conditionType, wantTrue, false) } // IsConditionSetAsExpectedSilent returns a wantTrue value if the node has a match to the conditionType, otherwise returns an opposite value of the wantTrue. func IsConditionSetAsExpectedSilent(node *v1.Node, conditionType v1.NodeConditionType, wantTrue bool) bool { return isNodeConditionSetAsExpected(node, conditionType, wantTrue, true) } // isConditionUnset returns true if conditions of the given node do not have a match to the given conditionType, otherwise false. func isConditionUnset(node *v1.Node, conditionType v1.NodeConditionType) bool { for _, cond := range node.Status.Conditions { if cond.Type == conditionType { return false } } return true } // Filter filters nodes in NodeList in place, removing nodes that do not // satisfy the given condition func Filter(nodeList *v1.NodeList, fn func(node v1.Node) bool) { var l []v1.Node for _, node := range nodeList.Items { if fn(node) { l = append(l, node) } } nodeList.Items = l } // TotalRegistered returns number of registered Nodes excluding Master Node. func TotalRegistered(c clientset.Interface) (int, error) { nodes, err := waitListSchedulableNodes(c) if err != nil { e2elog.Logf("Failed to list nodes: %v", err) return 0, err } return len(nodes.Items), nil } // TotalReady returns number of ready Nodes excluding Master Node. func TotalReady(c clientset.Interface) (int, error) { nodes, err := waitListSchedulableNodes(c) if err != nil { e2elog.Logf("Failed to list nodes: %v", err) return 0, err } // Filter out not-ready nodes. Filter(nodes, func(node v1.Node) bool { return IsConditionSetAsExpected(&node, v1.NodeReady, true) }) return len(nodes.Items), nil } // GetExternalIP returns node external IP concatenated with port 22 for ssh // e.g. 1.2.3.4:22 func GetExternalIP(node *v1.Node) (string, error) { e2elog.Logf("Getting external IP address for %s", node.Name) host := "" for _, a := range node.Status.Addresses { if a.Type == v1.NodeExternalIP && a.Address != "" { host = net.JoinHostPort(a.Address, sshPort) break } } if host == "" { return "", fmt.Errorf("Couldn't get the external IP of host %s with addresses %v", node.Name, node.Status.Addresses) } return host, nil } // GetInternalIP returns node internal IP func GetInternalIP(node *v1.Node) (string, error) { host := "" for _, address := range node.Status.Addresses { if address.Type == v1.NodeInternalIP && address.Address != "" { host = net.JoinHostPort(address.Address, sshPort) break } } if host == "" { return "", fmt.Errorf("Couldn't get the internal IP of host %s with addresses %v", node.Name, node.Status.Addresses) } return host, nil } // GetAddresses returns a list of addresses of the given addressType for the given node func GetAddresses(node *v1.Node, addressType v1.NodeAddressType) (ips []string) { for j := range node.Status.Addresses { nodeAddress := &node.Status.Addresses[j] if nodeAddress.Type == addressType && nodeAddress.Address != "" { ips = append(ips, nodeAddress.Address) } } return } // CollectAddresses returns a list of addresses of the given addressType for the given list of nodes func CollectAddresses(nodes *v1.NodeList, addressType v1.NodeAddressType) []string { ips := []string{} for i := range nodes.Items { ips = append(ips, GetAddresses(&nodes.Items[i], addressType)...) } return ips } // PickIP picks one public node IP func PickIP(c clientset.Interface) (string, error) { publicIps, err := GetPublicIps(c) if err != nil { return "", fmt.Errorf("get node public IPs error: %s", err) } if len(publicIps) == 0 { return "", fmt.Errorf("got unexpected number (%d) of public IPs", len(publicIps)) } ip := publicIps[0] return ip, nil } // GetPublicIps returns a public IP list of nodes. func GetPublicIps(c clientset.Interface) ([]string, error) { nodes, err := GetReadySchedulableNodes(c) if err != nil { return nil, fmt.Errorf("get schedulable and ready nodes error: %s", err) } ips := CollectAddresses(nodes, v1.NodeExternalIP) if len(ips) == 0 { // If ExternalIP isn't set, assume the test programs can reach the InternalIP ips = CollectAddresses(nodes, v1.NodeInternalIP) } return ips, nil } // GetReadySchedulableNodes addresses the common use case of getting nodes you can do work on. // 1) Needs to be schedulable. // 2) Needs to be ready. // If EITHER 1 or 2 is not true, most tests will want to ignore the node entirely. // If there are no nodes that are both ready and schedulable, this will return an error. func GetReadySchedulableNodes(c clientset.Interface) (nodes *v1.NodeList, err error) { nodes, err = checkWaitListSchedulableNodes(c) if err != nil { return nil, fmt.Errorf("listing schedulable nodes error: %s", err) } Filter(nodes, func(node v1.Node) bool { return IsNodeSchedulable(&node) && isNodeUntainted(&node) }) if len(nodes.Items) == 0 { return nil, fmt.Errorf("there are currently no ready, schedulable nodes in the cluster") } return nodes, nil } // GetBoundedReadySchedulableNodes is like GetReadySchedulableNodes except that it returns // at most maxNodes nodes. Use this to keep your test case from blowing up when run on a // large cluster. func GetBoundedReadySchedulableNodes(c clientset.Interface, maxNodes int) (nodes *v1.NodeList, err error) { nodes, err = GetReadySchedulableNodes(c) if err != nil { return nil, err } if len(nodes.Items) > maxNodes { shuffled := make([]v1.Node, maxNodes) perm := rand.Perm(len(nodes.Items)) for i, j := range perm { if j < len(shuffled) { shuffled[j] = nodes.Items[i] } } nodes.Items = shuffled } return nodes, nil } // GetRandomReadySchedulableNode gets a single randomly-selected node which is available for // running pods on. If there are no available nodes it will return an error. func GetRandomReadySchedulableNode(c clientset.Interface) (*v1.Node, error) { nodes, err := GetReadySchedulableNodes(c) if err != nil { return nil, err } return &nodes.Items[rand.Intn(len(nodes.Items))], nil } // GetReadyNodesIncludingTainted returns all ready nodes, even those which are tainted. // There are cases when we care about tainted nodes // E.g. in tests related to nodes with gpu we care about nodes despite // presence of nvidia.com/gpu=present:NoSchedule taint func GetReadyNodesIncludingTainted(c clientset.Interface) (nodes *v1.NodeList, err error) { nodes, err = checkWaitListSchedulableNodes(c) if err != nil { return nil, fmt.Errorf("listing schedulable nodes error: %s", err) } Filter(nodes, func(node v1.Node) bool { return IsNodeSchedulable(&node) }) return nodes, nil } // GetMasterAndWorkerNodes will return a list masters and schedulable worker nodes func GetMasterAndWorkerNodes(c clientset.Interface) (sets.String, *v1.NodeList, error) { nodes := &v1.NodeList{} masters := sets.NewString() all, err := c.CoreV1().Nodes().List(context.TODO(), metav1.ListOptions{}) if err != nil { return nil, nil, fmt.Errorf("get nodes error: %s", err) } for _, n := range all.Items { if system.DeprecatedMightBeMasterNode(n.Name) { masters.Insert(n.Name) } else if IsNodeSchedulable(&n) && isNodeUntainted(&n) { nodes.Items = append(nodes.Items, n) } } return masters, nodes, nil } // isNodeUntainted tests whether a fake pod can be scheduled on "node", given its current taints. // TODO: need to discuss wether to return bool and error type func isNodeUntainted(node *v1.Node) bool { return isNodeUntaintedWithNonblocking(node, "") } // isNodeUntaintedWithNonblocking tests whether a fake pod can be scheduled on "node" // but allows for taints in the list of non-blocking taints. func isNodeUntaintedWithNonblocking(node *v1.Node, nonblockingTaints string) bool { fakePod := &v1.Pod{ TypeMeta: metav1.TypeMeta{ Kind: "Pod", APIVersion: "v1", }, ObjectMeta: metav1.ObjectMeta{ Name: "fake-not-scheduled", Namespace: "fake-not-scheduled", }, Spec: v1.PodSpec{ Containers: []v1.Container{ { Name: "fake-not-scheduled", Image: "fake-not-scheduled", }, }, }, } nodeInfo := schedulernodeinfo.NewNodeInfo() // Simple lookup for nonblocking taints based on comma-delimited list. nonblockingTaintsMap := map[string]struct{}{} for _, t := range strings.Split(nonblockingTaints, ",") { if strings.TrimSpace(t) != "" { nonblockingTaintsMap[strings.TrimSpace(t)] = struct{}{} } } if len(nonblockingTaintsMap) > 0 { nodeCopy := node.DeepCopy() nodeCopy.Spec.Taints = []v1.Taint{} for _, v := range node.Spec.Taints { if _, isNonblockingTaint := nonblockingTaintsMap[v.Key]; !isNonblockingTaint { nodeCopy.Spec.Taints = append(nodeCopy.Spec.Taints, v) } } nodeInfo.SetNode(nodeCopy) } else { nodeInfo.SetNode(node) } taints, err := nodeInfo.Taints() if err != nil { e2elog.Failf("Can't test predicates for node %s: %v", node.Name, err) return false } return toleratesTaintsWithNoScheduleNoExecuteEffects(taints, fakePod.Spec.Tolerations) } func toleratesTaintsWithNoScheduleNoExecuteEffects(taints []v1.Taint, tolerations []v1.Toleration) bool { filteredTaints := []v1.Taint{} for _, taint := range taints { if taint.Effect == v1.TaintEffectNoExecute || taint.Effect == v1.TaintEffectNoSchedule { filteredTaints = append(filteredTaints, taint) } } toleratesTaint := func(taint v1.Taint) bool { for _, toleration := range tolerations { if toleration.ToleratesTaint(&taint) { return true } } return false } for _, taint := range filteredTaints { if !toleratesTaint(taint) { return false } } return true } // IsNodeSchedulable returns true if: // 1) doesn't have "unschedulable" field set // 2) it also returns true from IsNodeReady func IsNodeSchedulable(node *v1.Node) bool { if node == nil { return false } return !node.Spec.Unschedulable && IsNodeReady(node) } // IsNodeReady returns true if: // 1) it's Ready condition is set to true // 2) doesn't have NetworkUnavailable condition set to true func IsNodeReady(node *v1.Node) bool { nodeReady := IsConditionSetAsExpected(node, v1.NodeReady, true) networkReady := isConditionUnset(node, v1.NodeNetworkUnavailable) || IsConditionSetAsExpectedSilent(node, v1.NodeNetworkUnavailable, false) return nodeReady && networkReady } // hasNonblockingTaint returns true if the node contains at least // one taint with a key matching the regexp. func hasNonblockingTaint(node *v1.Node, nonblockingTaints string) bool { if node == nil { return false } // Simple lookup for nonblocking taints based on comma-delimited list. nonblockingTaintsMap := map[string]struct{}{} for _, t := range strings.Split(nonblockingTaints, ",") { if strings.TrimSpace(t) != "" { nonblockingTaintsMap[strings.TrimSpace(t)] = struct{}{} } } for _, taint := range node.Spec.Taints { if _, hasNonblockingTaint := nonblockingTaintsMap[taint.Key]; hasNonblockingTaint { return true } } return false } // PodNodePairs return podNode pairs for all pods in a namespace func PodNodePairs(c clientset.Interface, ns string) ([]PodNode, error) { var result []PodNode podList, err := c.CoreV1().Pods(ns).List(context.TODO(), metav1.ListOptions{}) if err != nil { return result, err } for _, pod := range podList.Items { result = append(result, PodNode{ Pod: pod.Name, Node: pod.Spec.NodeName, }) } return result, nil } // GetClusterZones returns the values of zone label collected from all nodes. func GetClusterZones(c clientset.Interface) (sets.String, error) { nodes, err := c.CoreV1().Nodes().List(context.TODO(), metav1.ListOptions{}) if err != nil { return nil, fmt.Errorf("Error getting nodes while attempting to list cluster zones: %v", err) } // collect values of zone label from all nodes zones := sets.NewString() for _, node := range nodes.Items { if zone, found := node.Labels[v1.LabelZoneFailureDomain]; found { zones.Insert(zone) } if zone, found := node.Labels[v1.LabelZoneFailureDomainStable]; found { zones.Insert(zone) } } return zones, nil } // CreatePodsPerNodeForSimpleApp creates pods w/ labels. Useful for tests which make a bunch of pods w/o any networking. func CreatePodsPerNodeForSimpleApp(c clientset.Interface, namespace, appName string, podSpec func(n v1.Node) v1.PodSpec, maxCount int) map[string]string { nodes, err := GetBoundedReadySchedulableNodes(c, maxCount) // TODO use wrapper methods in expect.go after removing core e2e dependency on node gomega.ExpectWithOffset(2, err).NotTo(gomega.HaveOccurred()) podLabels := map[string]string{ "app": appName + "-pod", } for i, node := range nodes.Items { e2elog.Logf("%v/%v : Creating container with label app=%v-pod", i, maxCount, appName) _, err := c.CoreV1().Pods(namespace).Create(context.TODO(), &v1.Pod{ ObjectMeta: metav1.ObjectMeta{ Name: fmt.Sprintf(appName+"-pod-%v", i), Labels: podLabels, }, Spec: podSpec(node), }, metav1.CreateOptions{}) // TODO use wrapper methods in expect.go after removing core e2e dependency on node gomega.ExpectWithOffset(2, err).NotTo(gomega.HaveOccurred()) } return podLabels }
mboersma/kubernetes
test/e2e/framework/node/resource.go
GO
apache-2.0
17,669
#set( $symbol_pound = '#' ) #set( $symbol_dollar = '$' ) #set( $symbol_escape = '\' ) 'use strict'; angular.module('${artifactId}App').factory('Applications', function (Restangular) { return Restangular.service('applications'); });
meruvian/yama-archetypes
starter/src/main/resources/archetype-resources/webapp/app/backend/admin/application/application.service.js
JavaScript
apache-2.0
234
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.streams.util.api.requests.backoff; /** * @see org.apache.streams.util.api.requests.backoff.BackOffStrategy */ public abstract class AbstractBackOffStrategy implements BackOffStrategy { private long baseSleepTime; private long lastSleepTime; private int maxAttempts; private int attemptsCount; /** * A BackOffStrategy that can effectively be used endlessly. * @param baseBackOffTime amount of time back of in seconds */ public AbstractBackOffStrategy(long baseBackOffTime) { this(baseBackOffTime, -1); } /** * A BackOffStrategy that has a limited number of uses before it throws a {@link org.apache.streams.util.api.requests.backoff.BackOffException} * @param baseBackOffTime time to back off in milliseconds, must be greater than 0. * @param maximumNumberOfBackOffAttempts maximum number of attempts, must be grater than 0 or -1. -1 indicates there is no maximum number of attempts. */ public AbstractBackOffStrategy(long baseBackOffTime, int maximumNumberOfBackOffAttempts) { if(baseBackOffTime <= 0) { throw new IllegalArgumentException("backOffTimeInMilliSeconds is not greater than 0 : "+baseBackOffTime); } if(maximumNumberOfBackOffAttempts<=0 && maximumNumberOfBackOffAttempts != -1) { throw new IllegalArgumentException("maximumNumberOfBackOffAttempts is not greater than 0 : "+maximumNumberOfBackOffAttempts); } this.baseSleepTime = baseBackOffTime; this.maxAttempts = maximumNumberOfBackOffAttempts; this.attemptsCount = 0; } @Override public void backOff() throws BackOffException { if(this.attemptsCount++ >= this.maxAttempts && this.maxAttempts != -1) { throw new BackOffException(this.attemptsCount-1, this.lastSleepTime); } else { try { Thread.sleep(this.lastSleepTime = calculateBackOffTime(this.attemptsCount, this.baseSleepTime)); } catch (InterruptedException ie) { Thread.currentThread().interrupt(); } } } @Override public void reset() { this.attemptsCount = 0; } /** * Calculate the amount of time in milliseconds that the strategy should back off for * @param attemptCount the number of attempts the strategy has backed off. i.e. 1 -> this is the first attempt, 2 -> this is the second attempt, etc. * @param baseSleepTime the minimum amount of time it should back off for in milliseconds * @return the amount of time it should back off in milliseconds */ protected abstract long calculateBackOffTime(int attemptCount, long baseSleepTime); }
robdouglas/incubator_streams_apache
streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/AbstractBackOffStrategy.java
Java
apache-2.0
3,424
/* * $Header: /var/chroot/cvs/cvs/factsheetDesigner/extern/jakarta-slide-server-src-2.1-iPlus Edit/src/stores/org/apache/slide/store/impl/rdbms/expression/RDBMSNotIsCollectionExpression.java,v 1.2 2006-01-22 22:49:06 peter-cvs Exp $ * $Revision: 1.2 $ * $Date: 2006-01-22 22:49:06 $ * * ==================================================================== * * Copyright 1999-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.slide.store.impl.rdbms.expression; import org.jdom.a.Element; /** */ public class RDBMSNotIsCollectionExpression extends RDBMSCompareExpression { public RDBMSNotIsCollectionExpression(Element element, RDBMSQueryContext context) { super(element, context); } protected String compile() { return "(p" + _tableIndex + ".PROPERTY_NAME = 'resourcetype' AND " + "p" + _tableIndex + ".PROPERTY_NAMESPACE = 'DAV:' AND " + "p" + _tableIndex + ".PROPERTY_VALUE NOT LIKE '%<collection/>%')"; } protected String getPropertyName() { return "resourcetype"; } protected String getPropertyNamespace() { return "DAV:"; } }
integrated/jakarta-slide-server
src/stores/org/apache/slide/store/impl/rdbms/expression/RDBMSNotIsCollectionExpression.java
Java
apache-2.0
1,709
<?php /** * Give up write lock on relation * * @phpstub * * @param resource $relation * * @return int */ function dbplus_unlockrel($relation) { }
schmittjoh/php-stubs
res/php/dbplus/functions/dbplus-unlockrel.php
PHP
apache-2.0
155
/** * Copyright 2021 The AMP HTML Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS-IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import * as Preact from '../../../src/preact'; import {Option, Selector} from './component'; import {PreactBaseElement} from '../../../src/preact/base-element'; import { closestAncestorElementBySelector, createElementWithAttributes, toggleAttribute, tryFocus, } from '../../../src/dom'; import {devAssert} from '../../../src/log'; import {dict} from '../../../src/core/types/object'; import {toArray} from '../../../src/core/types/array'; import {useCallback, useLayoutEffect, useRef} from '../../../src/preact'; export class BaseElement extends PreactBaseElement { /** @override */ init() { const {element} = this; this.optionState = []; // Listen for mutations const mu = new MutationObserver(() => { if (this.isExpectedMutation) { this.isExpectedMutation = false; return; } const {children, options} = getOptions(element, mu); this.optionState = options; this.mutateProps({children, options}); }); mu.observe(element, { attributeFilter: ['option', 'selected', 'disabled'], childList: true, subtree: true, }); // TODO(wg-bento): This hack is in place to prevent doubly rendering. // See https://github.com/ampproject/amp-react-prototype/issues/40. const onChangeHandler = (event) => { const {option, value} = event; this.triggerEvent( this.element, 'select', dict({ 'targetOption': option, 'selectedOptions': value, }) ); this.isExpectedMutation = true; this.mutateProps(dict({'value': value})); }; // Return props const {children, value, options} = getOptions(element, mu); this.optionState = options; return dict({ 'as': SelectorShim, 'shimDomElement': element, 'children': children, 'value': value, 'options': options, 'onChange': onChangeHandler, }); } } /** * @param {!Element} element * @param {MutationObserver} mu * @return {!JsonObject} */ function getOptions(element, mu) { const children = []; const options = []; const value = []; const optionChildren = toArray(element.querySelectorAll('[option]')); optionChildren // Skip options that are themselves within an option .filter( (el) => !closestAncestorElementBySelector( devAssert(el.parentElement?.nodeType == 1, 'Expected an element'), '[option]' ) ) .forEach((child, index) => { const option = child.getAttribute('option') || index.toString(); const selected = child.hasAttribute('selected'); const disabled = child.hasAttribute('disabled'); const tabIndex = child.getAttribute('tabindex'); const props = { as: OptionShim, option, disabled, index, onFocus: () => tryFocus(child), role: child.getAttribute('role') || 'option', shimDomElement: child, // TODO(wg-bento): This implementation causes infinite loops on DOM mutation. // See https://github.com/ampproject/amp-react-prototype/issues/40. postRender: () => { // Skip mutations to avoid cycles. mu.takeRecords(); }, selected, tabIndex, }; if (selected) { value.push(option); } const optionChild = <Option {...props} />; options.push(option); children.push(optionChild); }); return {value, children, options}; } /** * @param {!SelectorDef.OptionProps} props * @return {PreactDef.Renderable} */ export function OptionShim({ shimDomElement, onClick, onFocus, onKeyDown, selected, disabled, role = 'option', tabIndex, }) { const syncEvent = useCallback( (type, handler) => { if (!handler) { return; } shimDomElement.addEventListener(type, handler); return () => shimDomElement.removeEventListener(type, devAssert(handler)); }, [shimDomElement] ); useLayoutEffect(() => syncEvent('click', onClick), [onClick, syncEvent]); useLayoutEffect(() => syncEvent('focus', onFocus), [onFocus, syncEvent]); useLayoutEffect(() => syncEvent('keydown', onKeyDown), [ onKeyDown, syncEvent, ]); useLayoutEffect(() => { toggleAttribute(shimDomElement, 'selected', !!selected); }, [shimDomElement, selected]); useLayoutEffect(() => { toggleAttribute(shimDomElement, 'disabled', !!disabled); shimDomElement.setAttribute('aria-disabled', !!disabled); }, [shimDomElement, disabled]); useLayoutEffect(() => { shimDomElement.setAttribute('role', role); }, [shimDomElement, role]); useLayoutEffect(() => { if (tabIndex != undefined) { shimDomElement.tabIndex = tabIndex; } }, [shimDomElement, tabIndex]); return <div></div>; } /** * @param {!SelectorDef.Props} props * @return {PreactDef.Renderable} */ function SelectorShim({ shimDomElement, children, form, multiple, name, disabled, onKeyDown, role = 'listbox', tabIndex, value, }) { const input = useRef(null); if (!input.current) { input.current = createElementWithAttributes( shimDomElement.ownerDocument, 'input', { 'hidden': '', } ); } useLayoutEffect(() => { const el = input.current; shimDomElement.insertBefore(el, shimDomElement.firstChild); return () => shimDomElement.removeChild(el); }, [shimDomElement]); const syncAttr = useCallback((attr, value) => { if (value) { input.current.setAttribute(attr, value); } else { input.current.removeAttribute(attr); } }, []); useLayoutEffect(() => syncAttr('form', form), [form, syncAttr]); useLayoutEffect(() => syncAttr('name', name), [name, syncAttr]); useLayoutEffect(() => syncAttr('value', value), [value, syncAttr]); useLayoutEffect(() => { if (!onKeyDown) { return; } shimDomElement.addEventListener('keydown', onKeyDown); return () => shimDomElement.removeEventListener('keydown', devAssert(onKeyDown)); }, [shimDomElement, onKeyDown]); useLayoutEffect(() => { toggleAttribute(shimDomElement, 'multiple', !!multiple); shimDomElement.setAttribute('aria-multiselectable', !!multiple); }, [shimDomElement, multiple]); useLayoutEffect(() => { toggleAttribute(shimDomElement, 'disabled', !!disabled); shimDomElement.setAttribute('aria-disabled', !!disabled); }, [shimDomElement, disabled]); useLayoutEffect(() => { shimDomElement.setAttribute('role', role); }, [shimDomElement, role]); useLayoutEffect(() => { if (tabIndex != undefined) { shimDomElement.tabIndex = tabIndex; } }, [shimDomElement, tabIndex]); return <div children={children} />; } /** @override */ BaseElement['Component'] = Selector; /** @override */ BaseElement['detached'] = true; /** @override */ BaseElement['props'] = { 'disabled': {attr: 'disabled', type: 'boolean'}, 'form': {attr: 'form'}, 'multiple': {attr: 'multiple', type: 'boolean'}, 'name': {attr: 'name'}, 'role': {attr: 'role'}, 'tabIndex': {attr: 'tabindex'}, 'keyboardSelectMode': {attr: 'keyboard-select-mode', media: true}, };
nexxtv/amphtml
extensions/amp-selector/1.0/base-element.js
JavaScript
apache-2.0
7,802
package com.zmq.shopmall.fragmen; import android.annotation.SuppressLint; import android.os.Bundle; import android.support.annotation.Nullable; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.webkit.WebSettings; import android.webkit.WebView; import android.webkit.WebViewClient; import com.zmq.shopmall.R; import com.zmq.shopmall.R2; import com.zmq.shopmall.base.BaseFragment; import com.zmq.shopmall.widget.ItemWebView; import butterknife.BindView; /** * Created by Administrator on 2017/6/9. */ @SuppressLint("ValidFragment") public class GoodsDetailWebFragment extends BaseFragment { @BindView(R2.id.iwv_goods_detail) ItemWebView iwvGoodsDetail; @BindView(R2.id.wv_goods_detail) WebView wvGoodsDetail; private WebSettings webSettings; private int id; @SuppressLint("ValidFragment") public GoodsDetailWebFragment(int id){ this.id = id; } @Override protected View initContentView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { view = inflater.inflate(R.layout.fragment_goods_detail_web_library, container, false); return view; } @Override protected void initView(View view) { String url = "http://m.okhqb.com/item/description/1000334264.html?fromApp=true"; if (id == 1){ iwvGoodsDetail.setVisibility(View.VISIBLE); wvGoodsDetail.setVisibility(View.GONE); iwvGoodsDetail.setFocusable(false); iwvGoodsDetail.loadUrl(url); webSettings = iwvGoodsDetail.getSettings();//获取webview控制器 iwvGoodsDetail.setWebViewClient(new GoodsDetailWebViewClient()); }else { iwvGoodsDetail.setVisibility(View.GONE); wvGoodsDetail.setVisibility(View.VISIBLE); wvGoodsDetail.setFocusable(false); wvGoodsDetail.loadUrl(url); webSettings = wvGoodsDetail.getSettings();//获取webview控制器 wvGoodsDetail.setWebViewClient(new GoodsDetailWebViewClient()); } webSettings.setLoadWithOverviewMode(true);//是否允许WebView度超出以概览的方式载入页面,默认false。 webSettings.setBuiltInZoomControls(true);//是否使用内置的缩放机制 webSettings.setLoadsImagesAutomatically(true);//WebView是否下载图片资源,默认为true。 webSettings.setBlockNetworkImage(true);//是否禁止从网络(通过http和https URI schemes访问的资源)下载图片资源,默认值为false。 webSettings.setUseWideViewPort(true);//WebView是否支持HTML的“viewport”标签或者使用wide viewport。 webSettings.setCacheMode(WebSettings.LOAD_NO_CACHE);//重写使用缓存的方式,默认值LOAD_DEFAULT。 } private class GoodsDetailWebViewClient extends WebViewClient { @Override public void onPageFinished(WebView view, String url) { super.onPageFinished(view, url); webSettings.setBlockNetworkImage(false); } } }
dsh923713/Stock
library/src/main/java/com/zmq/shopmall/fragmen/GoodsDetailWebFragment.java
Java
apache-2.0
3,107
# Copyright 2015 Vinicius Chiele. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Message Handler is used to process a message received. """ from .errors import SimpleBusError from .pipeline import PipelineStep class InvokeHandlerStep(PipelineStep): id = 'InvokeHandler' def __init__(self, handlers): self.__handlers = handlers def execute(self, context, next_step): handler = self.__handlers.get(context.message_def.message_cls) if not handler: raise SimpleBusError('No handler found to the message \'%s\'.' % str(type(context.message_def.message_cls))) handler(context.body) next_step()
viniciuschiele/simplebus
simplebus/handlers.py
Python
apache-2.0
1,217
package com.structurizr.analysis; import com.structurizr.model.CodeElement; import com.structurizr.model.Component; import com.structurizr.model.Container; import com.structurizr.util.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.lang.annotation.Annotation; import java.lang.reflect.Modifier; import java.util.*; /** * This is the superclass for a number of component finder strategies. */ public abstract class AbstractComponentFinderStrategy implements ComponentFinderStrategy { private static final Log log = LogFactory.getLog(AbstractComponentFinderStrategy.class); private Set<Component> componentsFound = new HashSet<>(); protected ComponentFinder componentFinder; protected List<SupportingTypesStrategy> supportingTypesStrategies = new ArrayList<>(); private DuplicateComponentStrategy duplicateComponentStrategy = new ThrowExceptionDuplicateComponentStrategy(); protected AbstractComponentFinderStrategy(SupportingTypesStrategy... strategies) { Arrays.stream(strategies).forEach(this::addSupportingTypesStrategy); } protected ComponentFinder getComponentFinder() { return componentFinder; } /** * Sets a reference to the parent component finder. * * @param componentFinder a ComponentFinder instance */ public void setComponentFinder(ComponentFinder componentFinder) { this.componentFinder = componentFinder; } protected TypeRepository getTypeRepository() { return componentFinder.getTypeRepository(); } @Override public void beforeFindComponents() { supportingTypesStrategies.forEach(sts -> sts.setTypeRepository(getTypeRepository())); } @Override public Set<Component> findComponents() { componentsFound.addAll(doFindComponents()); return componentsFound; } /** * A template method into which subclasses can put their component finding code. * * @return the Set of Components found, or an empty set if no components were found */ protected abstract Set<Component> doFindComponents(); @Override public void afterFindComponents() { findSupportingTypes(componentsFound); findDependencies(); } private void findSupportingTypes(Set<Component> components) { for (Component component : components) { for (CodeElement codeElement : component.getCode()) { TypeVisibility visibility = TypeUtils.getVisibility(getTypeRepository(), codeElement.getType()); if (visibility != null) { codeElement.setVisibility(visibility.getName()); } TypeCategory category = TypeUtils.getCategory(getTypeRepository(), codeElement.getType()); if (category != null) { codeElement.setCategory(category.getName()); } } for (SupportingTypesStrategy strategy : supportingTypesStrategies) { for (Class<?> type : strategy.findSupportingTypes(component)) { if (!isNestedClass(type) && componentFinder.getContainer().getComponentOfType(type.getCanonicalName()) == null) { CodeElement codeElement = component.addSupportingType(type.getCanonicalName()); TypeVisibility visibility = TypeUtils.getVisibility(getTypeRepository(), codeElement.getType()); if (visibility != null) { codeElement.setVisibility(visibility.getName()); } TypeCategory category = TypeUtils.getCategory(getTypeRepository(), codeElement.getType()); if (category != null) { codeElement.setCategory(category.getName()); } } } } } } private boolean isNestedClass(Class<?> type) { return type != null && type.getName().indexOf('$') > -1; } private void findDependencies() { for (Component component : componentFinder.getContainer().getComponents()) { for (CodeElement codeElement : component.getCode()) { addEfferentDependencies(component, codeElement.getType(), new HashSet<>()); } } } private void addEfferentDependencies(Component component, String type, Set<String> typesVisited) { typesVisited.add(type); for (Class<?> referencedType : getTypeRepository().findReferencedTypes(type)) { try { if (!isNestedClass(referencedType)) { String referencedTypeName = referencedType.getCanonicalName(); if (!StringUtils.isNullOrEmpty(referencedTypeName)) { Component destinationComponent = componentFinder.getContainer().getComponentOfType(referencedTypeName); if (destinationComponent != null) { if (component != destinationComponent) { component.uses(destinationComponent, ""); } } else if (!typesVisited.contains(referencedTypeName)) { addEfferentDependencies(component, referencedTypeName, typesVisited); } } } } catch (Throwable t) { log.warn(t); } } } /** * Adds a supporting type strategy to this component finder strategy. * * @param supportingTypesStrategy a SupportingTypesStrategy instance */ public void addSupportingTypesStrategy(SupportingTypesStrategy supportingTypesStrategy) { if (supportingTypesStrategy == null) { throw new IllegalArgumentException("A supporting types strategy must be provided."); } supportingTypesStrategies.add(supportingTypesStrategy); } protected Set<Class<?>> findTypesAnnotatedWith(Class<? extends Annotation> annotation) { return TypeUtils.findTypesAnnotatedWith(annotation, getTypeRepository().getAllTypes()); } protected Set<Component> findClassesWithAnnotation(Class<? extends Annotation> type, String technology) { return findClassesWithAnnotation(type, technology, false); } protected Set<Component> findClassesWithAnnotation(Class<? extends Annotation> type, String technology, boolean includePublicTypesOnly) { Set<Component> components = new HashSet<>(); Set<Class<?>> componentTypes = findTypesAnnotatedWith(type); for (Class<?> componentType : componentTypes) { if (!includePublicTypesOnly || Modifier.isPublic(componentType.getModifiers())) { final Container container = getComponentFinder().getContainer(); Component newComponent = addComponent( container, componentType.getSimpleName(), componentType.getCanonicalName(), "", technology); if (newComponent != null) { components.add(newComponent); } } } return components; } public DuplicateComponentStrategy getDuplicateComponentStrategy() { return duplicateComponentStrategy; } public void setDuplicateComponentStrategy(DuplicateComponentStrategy duplicateComponentStrategy) { if (duplicateComponentStrategy != null) { this.duplicateComponentStrategy = duplicateComponentStrategy; } else { this.duplicateComponentStrategy = new ThrowExceptionDuplicateComponentStrategy(); } } protected Component addComponent(Container container, String name, String type, String description, String technology) { if (container.getComponentWithName(name) == null) { return container.addComponent(name, type, description, technology); } else { return duplicateComponentStrategy.duplicateComponentFound(container.getComponentWithName(name), name, type, description, technology); } } }
klu2/structurizr-java
structurizr-analysis/src/com/structurizr/analysis/AbstractComponentFinderStrategy.java
Java
apache-2.0
8,296
#!/usr/bin/python2.7 import nltk f = open("../corpus/data.en","r") f2 = open("../corpus/tagged_data","w") lines = f.readlines() for line in lines: tokens = nltk.word_tokenize(line) tag_list = nltk.pos_tag(tokens) print>> f2, tag_list # f2.write(tag_list)
shyamjvs/cs626_project
python_code/pos_tag_corpus.py
Python
apache-2.0
274
#include "extensions/filters/network/dubbo_proxy/dubbo_protocol_impl.h" #include "envoy/registry/registry.h" #include "common/common/assert.h" namespace Envoy { namespace Extensions { namespace NetworkFilters { namespace DubboProxy { namespace { constexpr uint16_t MagicNumber = 0xdabb; constexpr uint8_t MessageTypeMask = 0x80; constexpr uint8_t EventMask = 0x20; constexpr uint8_t TwoWayMask = 0x40; constexpr uint8_t SerializationTypeMask = 0x1f; constexpr uint64_t FlagOffset = 2; constexpr uint64_t StatusOffset = 3; constexpr uint64_t RequestIDOffset = 4; constexpr uint64_t BodySizeOffset = 12; } // namespace // Consistent with the SerializationType bool isValidSerializationType(SerializationType type) { switch (type) { case SerializationType::Hessian: case SerializationType::Json: break; default: return false; } return true; } // Consistent with the ResponseStatus bool isValidResponseStatus(ResponseStatus status) { switch (status) { case ResponseStatus::Ok: case ResponseStatus::ClientTimeout: case ResponseStatus::ServerTimeout: case ResponseStatus::BadRequest: case ResponseStatus::BadResponse: case ResponseStatus::ServiceNotFound: case ResponseStatus::ServiceError: case ResponseStatus::ClientError: case ResponseStatus::ServerThreadpoolExhaustedError: break; default: return false; } return true; } void parseRequestInfoFromBuffer(Buffer::Instance& data, MessageMetadataSharedPtr metadata) { ASSERT(data.length() >= DubboProtocolImpl::MessageSize); uint8_t flag = data.peekInt<uint8_t>(FlagOffset); bool is_two_way = (flag & TwoWayMask) == TwoWayMask ? true : false; SerializationType type = static_cast<SerializationType>(flag & SerializationTypeMask); if (!isValidSerializationType(type)) { throw EnvoyException( fmt::format("invalid dubbo message serialization type {}", static_cast<std::underlying_type<SerializationType>::type>(type))); } if (!is_two_way) { metadata->setMessageType(MessageType::Oneway); } metadata->setSerializationType(type); } void parseResponseInfoFromBuffer(Buffer::Instance& buffer, MessageMetadataSharedPtr metadata) { ASSERT(buffer.length() >= DubboProtocolImpl::MessageSize); ResponseStatus status = static_cast<ResponseStatus>(buffer.peekInt<uint8_t>(StatusOffset)); if (!isValidResponseStatus(status)) { throw EnvoyException( fmt::format("invalid dubbo message response status {}", static_cast<std::underlying_type<ResponseStatus>::type>(status))); } metadata->setResponseStatus(status); } bool DubboProtocolImpl::decode(Buffer::Instance& buffer, Protocol::Context* context, MessageMetadataSharedPtr metadata) { if (!metadata) { throw EnvoyException("invalid metadata parameter"); } if (buffer.length() < DubboProtocolImpl::MessageSize) { return false; } uint16_t magic_number = buffer.peekBEInt<uint16_t>(); if (magic_number != MagicNumber) { throw EnvoyException(fmt::format("invalid dubbo message magic number {}", magic_number)); } uint8_t flag = buffer.peekInt<uint8_t>(FlagOffset); MessageType type = (flag & MessageTypeMask) == MessageTypeMask ? MessageType::Request : MessageType::Response; bool is_event = (flag & EventMask) == EventMask ? true : false; int64_t request_id = buffer.peekBEInt<int64_t>(RequestIDOffset); int32_t body_size = buffer.peekBEInt<int32_t>(BodySizeOffset); // The body size of the heartbeat message is zero. if (body_size > MaxBodySize || body_size < 0) { throw EnvoyException(fmt::format("invalid dubbo message size {}", body_size)); } metadata->setMessageType(type); metadata->setRequestId(request_id); if (type == MessageType::Request) { parseRequestInfoFromBuffer(buffer, metadata); } else { parseResponseInfoFromBuffer(buffer, metadata); } context->header_size_ = DubboProtocolImpl::MessageSize; context->body_size_ = body_size; context->is_heartbeat_ = is_event; return true; } bool DubboProtocolImpl::encode(Buffer::Instance& buffer, int32_t body_size, const MessageMetadata& metadata) { switch (metadata.message_type()) { case MessageType::Response: { ASSERT(metadata.response_status().has_value()); buffer.writeBEInt<uint16_t>(MagicNumber); uint8_t flag = static_cast<uint8_t>(metadata.serialization_type()); if (metadata.is_event()) { ASSERT(0 == body_size); flag = flag ^ EventMask; } buffer.writeByte(flag); buffer.writeByte(static_cast<uint8_t>(metadata.response_status().value())); buffer.writeBEInt<uint64_t>(metadata.request_id()); buffer.writeBEInt<uint32_t>(body_size); return true; } case MessageType::Request: { NOT_IMPLEMENTED_GCOVR_EXCL_LINE; } default: NOT_REACHED_GCOVR_EXCL_LINE; } } class DubboProtocolConfigFactory : public ProtocolFactoryBase<DubboProtocolImpl> { public: DubboProtocolConfigFactory() : ProtocolFactoryBase(ProtocolType::Dubbo) {} }; /** * Static registration for the Dubbo protocol. @see RegisterFactory. */ REGISTER_FACTORY(DubboProtocolConfigFactory, NamedProtocolConfigFactory); } // namespace DubboProxy } // namespace NetworkFilters } // namespace Extensions } // namespace Envoy
dnoe/envoy
source/extensions/filters/network/dubbo_proxy/dubbo_protocol_impl.cc
C++
apache-2.0
5,310
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.roots.impl; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.impl.scopes.LibraryRuntimeClasspathScope; import com.intellij.openapi.module.impl.scopes.SdkScope; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ModuleExtensionWithSdkOrderEntry; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.util.ConcurrencyUtil; import com.intellij.util.containers.ContainerUtil; import javax.annotation.Nonnull; import jakarta.inject.Inject; import jakarta.inject.Singleton; import java.util.List; import java.util.concurrent.ConcurrentMap; /** * @author yole */ @Singleton public class LibraryScopeCache { public static LibraryScopeCache getInstance(Project project) { return ServiceManager.getService(project, LibraryScopeCache.class); } private final Project myProject; private final ConcurrentMap<List<Module>, GlobalSearchScope> myLibraryScopes = ContainerUtil.newConcurrentMap(); private final ConcurrentMap<String, GlobalSearchScope> mySdkScopes = ContainerUtil.newConcurrentMap(); private final LibrariesOnlyScope myLibrariesOnlyScope; @Inject public LibraryScopeCache(Project project) { myProject = project; myLibrariesOnlyScope = new LibrariesOnlyScope(GlobalSearchScope.allScope(myProject), myProject); } public void clear() { myLibraryScopes.clear(); mySdkScopes.clear(); } @Nonnull public GlobalSearchScope getLibrariesOnlyScope() { return myLibrariesOnlyScope; } @Nonnull public GlobalSearchScope getScopeForLibraryUsedIn(@Nonnull List<Module> modulesLibraryIsUsedIn) { GlobalSearchScope scope = myLibraryScopes.get(modulesLibraryIsUsedIn); if (scope != null) { return scope; } GlobalSearchScope newScope = modulesLibraryIsUsedIn.isEmpty() ? myLibrariesOnlyScope : new LibraryRuntimeClasspathScope(myProject, modulesLibraryIsUsedIn); return ConcurrencyUtil.cacheOrGet(myLibraryScopes, modulesLibraryIsUsedIn, newScope); } @Nonnull public GlobalSearchScope getScopeForSdk(@Nonnull final ModuleExtensionWithSdkOrderEntry sdkOrderEntry) { final String jdkName = sdkOrderEntry.getSdkName(); if (jdkName == null) return GlobalSearchScope.allScope(myProject); GlobalSearchScope scope = mySdkScopes.get(jdkName); if (scope == null) { scope = new SdkScope(myProject, sdkOrderEntry); return ConcurrencyUtil.cacheOrGet(mySdkScopes, jdkName, scope); } return scope; } private static class LibrariesOnlyScope extends GlobalSearchScope { private final GlobalSearchScope myOriginal; private final ProjectFileIndex myIndex; private LibrariesOnlyScope(@Nonnull GlobalSearchScope original, @Nonnull Project project) { super(project); myIndex = ProjectRootManager.getInstance(project).getFileIndex(); myOriginal = original; } @Override public boolean contains(@Nonnull VirtualFile file) { return myOriginal.contains(file) && (myIndex.isInLibraryClasses(file) || myIndex.isInLibrarySource(file)); } @Override public int compare(@Nonnull VirtualFile file1, @Nonnull VirtualFile file2) { return myOriginal.compare(file1, file2); } @Override public boolean isSearchInModuleContent(@Nonnull Module aModule) { return false; } @Override public boolean isSearchOutsideRootModel() { return myOriginal.isSearchOutsideRootModel(); } @Override public boolean isSearchInLibraries() { return true; } } }
consulo/consulo
modules/base/indexing-impl/src/main/java/com/intellij/openapi/roots/impl/LibraryScopeCache.java
Java
apache-2.0
4,427
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package clientes; import conexao.ConexaoBanco; import java.sql.PreparedStatement; import java.sql.SQLException; import javax.swing.JOptionPane; /** * * @author carlos */ public class NCliente extends javax.swing.JFrame { ConexaoBanco conec = new ConexaoBanco(); /** * Creates new form NCliente */ public NCliente() { initComponents(); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jLabel1 = new javax.swing.JLabel(); jLabel2 = new javax.swing.JLabel(); jLabel3 = new javax.swing.JLabel(); jLabel4 = new javax.swing.JLabel(); nome = new javax.swing.JTextField(); tel = new javax.swing.JTextField(); end = new javax.swing.JTextField(); email = new javax.swing.JTextField(); jLabel5 = new javax.swing.JLabel(); jLabel6 = new javax.swing.JLabel(); jPanel1 = new javax.swing.JPanel(); Voltar = new javax.swing.JButton(); Salvar = new javax.swing.JButton(); Cancelar = new javax.swing.JButton(); jLabel7 = new javax.swing.JLabel(); setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); setTitle("Cadastro de Cliente"); setResizable(false); jLabel1.setLabelFor(nome); jLabel1.setText("Nome:"); jLabel2.setLabelFor(tel); jLabel2.setText("Telefone:"); jLabel3.setLabelFor(end); jLabel3.setText("Endereço:"); jLabel4.setLabelFor(email); jLabel4.setText("Email:"); jLabel5.setFont(new java.awt.Font("Tahoma", 0, 8)); // NOI18N jLabel5.setText("Shopware v1.0"); jLabel6.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N jLabel6.setText("Cadastro de novo cliente"); Voltar.setIcon(new javax.swing.ImageIcon(getClass().getResource("/Images/1433560560_reply.png"))); // NOI18N Voltar.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { VoltarActionPerformed(evt); } }); Voltar.setToolTipText("Voltar"); Salvar.setIcon(new javax.swing.ImageIcon(getClass().getResource("/Images/Floppy.png"))); // NOI18N Salvar.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { SalvarActionPerformed(evt); } }); Salvar.setToolTipText("Salvar"); Cancelar.setIcon(new javax.swing.ImageIcon(getClass().getResource("/Images/Blocked.png"))); // NOI18N Cancelar.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { CancelarActionPerformed(evt); } }); Cancelar.setToolTipText("Cancelar"); javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1); jPanel1.setLayout(jPanel1Layout); jPanel1Layout.setHorizontalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(Voltar, javax.swing.GroupLayout.PREFERRED_SIZE, 63, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(Salvar, javax.swing.GroupLayout.PREFERRED_SIZE, 63, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(Cancelar, javax.swing.GroupLayout.PREFERRED_SIZE, 63, javax.swing.GroupLayout.PREFERRED_SIZE)) ); jPanel1Layout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {Cancelar, Salvar, Voltar}); jPanel1Layout.setVerticalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(Cancelar, javax.swing.GroupLayout.PREFERRED_SIZE, 53, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(Salvar, javax.swing.GroupLayout.PREFERRED_SIZE, 53, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(Voltar, javax.swing.GroupLayout.PREFERRED_SIZE, 53, javax.swing.GroupLayout.PREFERRED_SIZE)) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); jLabel7.setIcon(new javax.swing.ImageIcon(getClass().getResource("/Images/logoShalonSmall2.png"))); // NOI18N javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addComponent(jLabel7, javax.swing.GroupLayout.PREFERRED_SIZE, 56, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(27, 27, 27) .addComponent(jLabel6)) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel1) .addComponent(jLabel2) .addComponent(jLabel3) .addComponent(jLabel4)) .addGap(8, 8, 8) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(tel, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, 201, Short.MAX_VALUE) .addComponent(end, javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(nome) .addComponent(email, javax.swing.GroupLayout.Alignment.TRAILING))) .addComponent(jPanel1, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel5, javax.swing.GroupLayout.Alignment.TRAILING))) .addContainerGap(54, Short.MAX_VALUE)) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(34, 34, 34) .addComponent(jLabel6) .addGap(37, 37, 37)) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addContainerGap() .addComponent(jLabel7, javax.swing.GroupLayout.PREFERRED_SIZE, 61, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(18, 18, 18))) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel1) .addComponent(nome, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel2) .addComponent(tel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel3) .addComponent(end, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel4) .addComponent(email, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGap(29, 29, 29) .addComponent(jPanel1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(26, 26, 26) .addComponent(jLabel5) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); pack(); }// </editor-fold>//GEN-END:initComponents private void SalvarActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_SalvarActionPerformed try{ PreparedStatement pst = conec.conn.prepareStatement("insert into estados(nome,tel,end,email)values(?,?,?,?)");//passagem do sql para inserção pst.setString(1,nome.getText());//passagem dos parametros pst.setString(2,tel.getText()); pst.setString(3,end.getText()); pst.setString(4,email.getText()); pst.executeUpdate();//executa a inserçaõ JOptionPane.showMessageDialog(rootPane, "Salvo com sucesso"); } catch (SQLException ex) { JOptionPane.showMessageDialog(rootPane, "Erro na inserção \n Erro:" + ex); } }//GEN-LAST:event_SalvarActionPerformed private void CancelarActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_CancelarActionPerformed // TODO add your handling code here: }//GEN-LAST:event_CancelarActionPerformed private void VoltarActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_VoltarActionPerformed // TODO add your handling code here: }//GEN-LAST:event_VoltarActionPerformed /** * @param args the command line arguments */ public static void main(String args[]) { /* Set the Nimbus look and feel */ //<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) "> /* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel. * For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html */ try { for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) { if ("Nimbus".equals(info.getName())) { javax.swing.UIManager.setLookAndFeel(info.getClassName()); break; } } } catch (ClassNotFoundException ex) { java.util.logging.Logger.getLogger(NCliente.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (InstantiationException ex) { java.util.logging.Logger.getLogger(NCliente.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (IllegalAccessException ex) { java.util.logging.Logger.getLogger(NCliente.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (javax.swing.UnsupportedLookAndFeelException ex) { java.util.logging.Logger.getLogger(NCliente.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } //</editor-fold> /* Create and display the form */ java.awt.EventQueue.invokeLater(new Runnable() { public void run() { new NCliente().setVisible(true); } }); } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton Cancelar; private javax.swing.JButton Salvar; private javax.swing.JButton Voltar; private javax.swing.JTextField email; private javax.swing.JTextField end; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel2; private javax.swing.JLabel jLabel3; private javax.swing.JLabel jLabel4; private javax.swing.JLabel jLabel5; private javax.swing.JLabel jLabel6; private javax.swing.JLabel jLabel7; private javax.swing.JPanel jPanel1; private javax.swing.JTextField nome; private javax.swing.JTextField tel; // End of variables declaration//GEN-END:variables }
Rafiski/Shalon
src/src/clientes/NCliente.java
Java
apache-2.0
13,884
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.appcostal.struts; import javax.servlet.http.HttpServletRequest; import org.apache.struts.action.ActionErrors; import org.apache.struts.action.ActionMapping; import org.apache.struts.action.ActionMessage; /** * * @author Jesus */ public class EliminarIgualaActionForm extends org.apache.struts.action.ActionForm { private String idiguala; public String getIdiguala() { return idiguala; } public void setIdiguala(String idiguala) { this.idiguala = idiguala; } public EliminarIgualaActionForm() { super(); // TODO Auto-generated constructor stub } /** * This is the action called from the Struts framework. * * @param mapping The ActionMapping used to select this instance. * @param request The HTTP Request we are processing. * @return */ public ActionErrors validate(ActionMapping mapping, HttpServletRequest request) { ActionErrors errors = new ActionErrors(); return errors; } }
MAMISHO/AppCostal-Beta
appCostal-beta/src/java/com/appcostal/struts/EliminarIgualaActionForm.java
Java
apache-2.0
1,283
//***************************************************************************** // Copyright 2017-2020 Intel Corporation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //***************************************************************************** #include <algorithm> #include <cinttypes> #include <cmath> #include <cstdlib> #include <iterator> #include <limits> #include <random> #include <string> // clang-format off #ifdef ${BACKEND_NAME}_FLOAT_TOLERANCE_BITS #define DEFAULT_FLOAT_TOLERANCE_BITS ${BACKEND_NAME}_FLOAT_TOLERANCE_BITS #endif // clang-format on #include "gtest/gtest.h" #include "ngraph/check.hpp" #include "ngraph/ngraph.hpp" #include "ngraph/op/util/attr_types.hpp" #include "util/all_close.hpp" #include "util/all_close_f.hpp" #include "util/ndarray.hpp" #include "util/random.hpp" #include "util/test_case.hpp" #include "util/test_control.hpp" #include "util/test_tools.hpp" using namespace std; using namespace ngraph; static string s_manifest = "${MANIFEST}"; NGRAPH_TEST(${BACKEND_NAME}, scale_shift_no_broadcast) { auto data = make_shared<op::v0::Parameter>(element::f32, Shape{3, 6}); auto scale = make_shared<op::v0::Parameter>(element::f32, Shape{3, 6}); auto shift = make_shared<op::v0::Parameter>(element::f32, Shape{3, 6}); auto scale_shift_func = make_shared<op::v0::ScaleShift>(data, scale, shift); auto function = make_shared<Function>(OutputVector{scale_shift_func}, ParameterVector{data, scale, shift}); auto test_case = test::NgraphTestCase(function, "${BACKEND_NAME}"); // Data test_case.add_input<float>(vector<float>(18, 2)); // Scale test_case.add_input<float>(vector<float>(18, 2)); // Shift test_case.add_input<float>(vector<float>(18, 2)); // output test_case.add_expected_output<float>(Shape{3, 6}, vector<float>(18, 6)); test_case.run(); } NGRAPH_TEST(${BACKEND_NAME}, scale_shift) { auto data = make_shared<op::v0::Parameter>(element::f32, Shape{3, 6}); auto scale = make_shared<op::v0::Parameter>(element::f32, Shape{3, 6}); auto shift = make_shared<op::v0::Parameter>(element::f32, Shape{}); auto scale_shift_func = make_shared<op::v0::ScaleShift>(data, scale, shift); auto function = make_shared<Function>(OutputVector{scale_shift_func}, ParameterVector{data, scale, shift}); auto test_case = test::NgraphTestCase(function, "${BACKEND_NAME}"); // Data test_case.add_input<float>(vector<float>(18, 2)); // Scale test_case.add_input<float>(vector<float>(18, 2)); // Shift test_case.add_input<float>(vector<float>{2}); // output test_case.add_expected_output<float>(Shape{3, 6}, vector<float>(18, 6)); test_case.run(); }
NervanaSystems/ngraph
test/backend/scale.in.cpp
C++
apache-2.0
3,226
package jp.go.affrc.naro.wgs.entity.common; import javax.annotation.Generated; import org.seasar.extension.jdbc.JdbcManager; import org.seasar.extension.unit.S2TestCase; import static jp.go.affrc.naro.wgs.entity.common.names.TAvailableLibNames.*; /** * {@link TAvailableLib}のテストクラスです。 * */ @Generated(value = {"S2JDBC-Gen 2.4.47", "org.seasar.extension.jdbc.gen.internal.model.EntityTestModelFactoryImpl"}, date = "2014/02/26 11:09:29") public class TAvailableLibTest extends S2TestCase { private JdbcManager jdbcManager; /** * 事前処理をします。 * * @throws Exception */ @Override protected void setUp() throws Exception { super.setUp(); include("s2jdbc.dicon"); } /** * 識別子による取得をテストします。 * * @throws Exception */ public void testFindById() throws Exception { jdbcManager.from(TAvailableLib.class).id("aaa").getSingleResult(); } /** * TAvailableGeneratorListとの外部結合をテストします。 * * @throws Exception */ public void testLeftOuterJoin_TAvailableGeneratorList() throws Exception { jdbcManager.from(TAvailableLib.class).leftOuterJoin(TAvailableGeneratorList()).id("aaa").getSingleResult(); } }
TKiura/WeDGenS
backend/src/test/java/jp/go/affrc/naro/wgs/entity/common/TAvailableLibTest.java
Java
apache-2.0
1,368
"""Tests for aiohttp/protocol.py""" import asyncio import unittest import zlib from unittest import mock import pytest from multidict import CIMultiDict from yarl import URL import aiohttp from aiohttp import http_exceptions, streams from aiohttp.http_parser import (DeflateBuffer, HttpPayloadParser, HttpRequestParserPy, HttpResponseParserPy) try: import brotli except ImportError: brotli = None REQUEST_PARSERS = [HttpRequestParserPy] RESPONSE_PARSERS = [HttpResponseParserPy] try: from aiohttp import _http_parser REQUEST_PARSERS.append(_http_parser.HttpRequestParserC) RESPONSE_PARSERS.append(_http_parser.HttpResponseParserC) except ImportError: # pragma: no cover pass @pytest.fixture def protocol(): return mock.Mock() @pytest.fixture(params=REQUEST_PARSERS) def parser(loop, protocol, request): """Parser implementations""" return request.param(protocol, loop, 8190, 32768, 8190) @pytest.fixture(params=REQUEST_PARSERS) def request_cls(request): """Request Parser class""" return request.param @pytest.fixture(params=RESPONSE_PARSERS) def response(loop, protocol, request): """Parser implementations""" return request.param(protocol, loop, 8190, 32768, 8190) @pytest.fixture(params=RESPONSE_PARSERS) def response_cls(request): """Parser implementations""" return request.param def test_parse_headers(parser): text = b'''GET /test HTTP/1.1\r test: line\r continue\r test2: data\r \r ''' messages, upgrade, tail = parser.feed_data(text) assert len(messages) == 1 msg = messages[0][0] assert list(msg.headers.items()) == [('test', 'line continue'), ('test2', 'data')] assert msg.raw_headers == ((b'test', b'line continue'), (b'test2', b'data')) assert not msg.should_close assert msg.compression is None assert not msg.upgrade def test_parse(parser): text = b'GET /test HTTP/1.1\r\n\r\n' messages, upgrade, tail = parser.feed_data(text) assert len(messages) == 1 msg, _ = messages[0] assert msg.compression is None assert not msg.upgrade assert msg.method == 'GET' assert msg.path == '/test' assert msg.version == (1, 1) async def test_parse_body(parser): text = b'GET /test HTTP/1.1\r\nContent-Length: 4\r\n\r\nbody' messages, upgrade, tail = parser.feed_data(text) assert len(messages) == 1 _, payload = messages[0] body = await payload.read(4) assert body == b'body' async def test_parse_body_with_CRLF(parser): text = b'\r\nGET /test HTTP/1.1\r\nContent-Length: 4\r\n\r\nbody' messages, upgrade, tail = parser.feed_data(text) assert len(messages) == 1 _, payload = messages[0] body = await payload.read(4) assert body == b'body' def test_parse_delayed(parser): text = b'GET /test HTTP/1.1\r\n' messages, upgrade, tail = parser.feed_data(text) assert len(messages) == 0 assert not upgrade messages, upgrade, tail = parser.feed_data(b'\r\n') assert len(messages) == 1 msg = messages[0][0] assert msg.method == 'GET' def test_headers_multi_feed(parser): text1 = b'GET /test HTTP/1.1\r\n' text2 = b'test: line\r' text3 = b'\n continue\r\n\r\n' messages, upgrade, tail = parser.feed_data(text1) assert len(messages) == 0 messages, upgrade, tail = parser.feed_data(text2) assert len(messages) == 0 messages, upgrade, tail = parser.feed_data(text3) assert len(messages) == 1 msg = messages[0][0] assert list(msg.headers.items()) == [('test', 'line continue')] assert msg.raw_headers == ((b'test', b'line continue'),) assert not msg.should_close assert msg.compression is None assert not msg.upgrade def test_headers_split_field(parser): text1 = b'GET /test HTTP/1.1\r\n' text2 = b't' text3 = b'es' text4 = b't: value\r\n\r\n' messages, upgrade, tail = parser.feed_data(text1) messages, upgrade, tail = parser.feed_data(text2) messages, upgrade, tail = parser.feed_data(text3) assert len(messages) == 0 messages, upgrade, tail = parser.feed_data(text4) assert len(messages) == 1 msg = messages[0][0] assert list(msg.headers.items()) == [('test', 'value')] assert msg.raw_headers == ((b'test', b'value'),) assert not msg.should_close assert msg.compression is None assert not msg.upgrade def test_parse_headers_multi(parser): text = (b'GET /test HTTP/1.1\r\n' b'Set-Cookie: c1=cookie1\r\n' b'Set-Cookie: c2=cookie2\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) assert len(messages) == 1 msg = messages[0][0] assert list(msg.headers.items()) == [('Set-Cookie', 'c1=cookie1'), ('Set-Cookie', 'c2=cookie2')] assert msg.raw_headers == ((b'Set-Cookie', b'c1=cookie1'), (b'Set-Cookie', b'c2=cookie2')) assert not msg.should_close assert msg.compression is None def test_conn_default_1_0(parser): text = b'GET /test HTTP/1.0\r\n\r\n' messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.should_close def test_conn_default_1_1(parser): text = b'GET /test HTTP/1.1\r\n\r\n' messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close def test_conn_close(parser): text = (b'GET /test HTTP/1.1\r\n' b'connection: close\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.should_close def test_conn_close_1_0(parser): text = (b'GET /test HTTP/1.0\r\n' b'connection: close\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.should_close def test_conn_keep_alive_1_0(parser): text = (b'GET /test HTTP/1.0\r\n' b'connection: keep-alive\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close def test_conn_keep_alive_1_1(parser): text = (b'GET /test HTTP/1.1\r\n' b'connection: keep-alive\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close def test_conn_other_1_0(parser): text = (b'GET /test HTTP/1.0\r\n' b'connection: test\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.should_close def test_conn_other_1_1(parser): text = (b'GET /test HTTP/1.1\r\n' b'connection: test\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close def test_request_chunked(parser): text = (b'GET /test HTTP/1.1\r\n' b'transfer-encoding: chunked\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg, payload = messages[0] assert msg.chunked assert not upgrade assert isinstance(payload, streams.FlowControlStreamReader) def test_conn_upgrade(parser): text = (b'GET /test HTTP/1.1\r\n' b'connection: upgrade\r\n' b'upgrade: websocket\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close assert msg.upgrade assert upgrade def test_compression_empty(parser): text = (b'GET /test HTTP/1.1\r\n' b'content-encoding: \r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression is None def test_compression_deflate(parser): text = (b'GET /test HTTP/1.1\r\n' b'content-encoding: deflate\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression == 'deflate' def test_compression_gzip(parser): text = (b'GET /test HTTP/1.1\r\n' b'content-encoding: gzip\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression == 'gzip' @pytest.mark.skipif(brotli is None, reason="brotli is not installed") def test_compression_brotli(parser): text = (b'GET /test HTTP/1.1\r\n' b'content-encoding: br\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression == 'br' def test_compression_unknown(parser): text = (b'GET /test HTTP/1.1\r\n' b'content-encoding: compress\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression is None def test_headers_connect(parser): text = (b'CONNECT www.google.com HTTP/1.1\r\n' b'content-length: 0\r\n\r\n') messages, upgrade, tail = parser.feed_data(text) msg, payload = messages[0] assert upgrade assert isinstance(payload, streams.FlowControlStreamReader) def test_headers_old_websocket_key1(parser): text = (b'GET /test HTTP/1.1\r\n' b'SEC-WEBSOCKET-KEY1: line\r\n\r\n') with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) def test_headers_content_length_err_1(parser): text = (b'GET /test HTTP/1.1\r\n' b'content-length: line\r\n\r\n') with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) def test_headers_content_length_err_2(parser): text = (b'GET /test HTTP/1.1\r\n' b'content-length: -1\r\n\r\n') with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) def test_invalid_header(parser): text = (b'GET /test HTTP/1.1\r\n' b'test line\r\n\r\n') with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) def test_invalid_name(parser): text = (b'GET /test HTTP/1.1\r\n' b'test[]: line\r\n\r\n') with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) def test_max_header_field_size(parser): name = b'test' * 10 * 1024 text = (b'GET /test HTTP/1.1\r\n' + name + b':data\r\n\r\n') with pytest.raises(http_exceptions.LineTooLong): parser.feed_data(text) def test_max_header_value_size(parser): name = b'test' * 10 * 1024 text = (b'GET /test HTTP/1.1\r\n' b'data:' + name + b'\r\n\r\n') with pytest.raises(http_exceptions.LineTooLong): parser.feed_data(text) def test_max_header_value_size_continuation(parser): name = b'test' * 10 * 1024 text = (b'GET /test HTTP/1.1\r\n' b'data: test\r\n ' + name + b'\r\n\r\n') with pytest.raises(http_exceptions.LineTooLong): parser.feed_data(text) def test_http_request_parser(parser): text = b'GET /path HTTP/1.1\r\n\r\n' messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg == ('GET', '/path', (1, 1), CIMultiDict(), (), False, None, False, False, URL('/path')) def test_http_request_bad_status_line(parser): text = b'getpath \r\n\r\n' with pytest.raises(http_exceptions.BadStatusLine): parser.feed_data(text) def test_http_request_upgrade(parser): text = (b'GET /test HTTP/1.1\r\n' b'connection: upgrade\r\n' b'upgrade: websocket\r\n\r\n' b'some raw data') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close assert msg.upgrade assert upgrade assert tail == b'some raw data' def test_http_request_parser_utf8(parser): text = 'GET /path HTTP/1.1\r\nx-test:тест\r\n\r\n'.encode('utf-8') messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg == ('GET', '/path', (1, 1), CIMultiDict([('X-TEST', 'тест')]), ((b'x-test', 'тест'.encode('utf-8')),), False, None, False, False, URL('/path')) def test_http_request_parser_non_utf8(parser): text = 'GET /path HTTP/1.1\r\nx-test:тест\r\n\r\n'.encode('cp1251') msg = parser.feed_data(text)[0][0][0] assert msg == ('GET', '/path', (1, 1), CIMultiDict([('X-TEST', 'тест'.encode('cp1251').decode( 'utf-8', 'surrogateescape'))]), ((b'x-test', 'тест'.encode('cp1251')),), False, None, False, False, URL('/path')) def test_http_request_parser_two_slashes(parser): text = b'GET //path HTTP/1.1\r\n\r\n' msg = parser.feed_data(text)[0][0][0] assert msg[:-1] == ('GET', '//path', (1, 1), CIMultiDict(), (), False, None, False, False) def test_http_request_parser_bad_method(parser): with pytest.raises(http_exceptions.BadStatusLine): parser.feed_data(b'!12%()+=~$ /get HTTP/1.1\r\n\r\n') def test_http_request_parser_bad_version(parser): with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(b'GET //get HT/11\r\n\r\n') def test_http_request_max_status_line(parser): with pytest.raises(http_exceptions.LineTooLong): parser.feed_data( b'GET /path' + b'test' * 10 * 1024 + b' HTTP/1.1\r\n\r\n') def test_http_response_parser_utf8(response): text = 'HTTP/1.1 200 Ok\r\nx-test:тест\r\n\r\n'.encode('utf-8') messages, upgraded, tail = response.feed_data(text) assert len(messages) == 1 msg = messages[0][0] assert msg.version == (1, 1) assert msg.code == 200 assert msg.reason == 'Ok' assert msg.headers == CIMultiDict([('X-TEST', 'тест')]) assert msg.raw_headers == ((b'x-test', 'тест'.encode('utf-8')),) assert not upgraded assert not tail def test_http_response_parser_bad_status_line_too_long(response): with pytest.raises(http_exceptions.LineTooLong): response.feed_data( b'HTTP/1.1 200 Ok' + b'test' * 10 * 1024 + b'\r\n\r\n') def test_http_response_parser_bad_version(response): with pytest.raises(http_exceptions.BadHttpMessage): response.feed_data(b'HT/11 200 Ok\r\n\r\n') def test_http_response_parser_no_reason(response): msg = response.feed_data(b'HTTP/1.1 200\r\n\r\n')[0][0][0] assert msg.version == (1, 1) assert msg.code == 200 assert not msg.reason def test_http_response_parser_bad(response): with pytest.raises(http_exceptions.BadHttpMessage): response.feed_data(b'HTT/1\r\n\r\n') def test_http_response_parser_code_under_100(response): msg = response.feed_data(b'HTTP/1.1 99 test\r\n\r\n')[0][0][0] assert msg.code == 99 def test_http_response_parser_code_above_999(response): with pytest.raises(http_exceptions.BadHttpMessage): response.feed_data(b'HTTP/1.1 9999 test\r\n\r\n') def test_http_response_parser_code_not_int(response): with pytest.raises(http_exceptions.BadHttpMessage): response.feed_data(b'HTTP/1.1 ttt test\r\n\r\n') def test_http_request_chunked_payload(parser): text = (b'GET /test HTTP/1.1\r\n' b'transfer-encoding: chunked\r\n\r\n') msg, payload = parser.feed_data(text)[0][0] assert msg.chunked assert not payload.is_eof() assert isinstance(payload, streams.FlowControlStreamReader) parser.feed_data(b'4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n') assert b'dataline' == b''.join(d for d in payload._buffer) assert [4, 8] == payload._http_chunk_splits assert payload.is_eof() def test_http_request_chunked_payload_and_next_message(parser): text = (b'GET /test HTTP/1.1\r\n' b'transfer-encoding: chunked\r\n\r\n') msg, payload = parser.feed_data(text)[0][0] messages, upgraded, tail = parser.feed_data( b'4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n' b'POST /test2 HTTP/1.1\r\n' b'transfer-encoding: chunked\r\n\r\n') assert b'dataline' == b''.join(d for d in payload._buffer) assert [4, 8] == payload._http_chunk_splits assert payload.is_eof() assert len(messages) == 1 msg2, payload2 = messages[0] assert msg2.method == 'POST' assert msg2.chunked assert not payload2.is_eof() def test_http_request_chunked_payload_chunks(parser): text = (b'GET /test HTTP/1.1\r\n' b'transfer-encoding: chunked\r\n\r\n') msg, payload = parser.feed_data(text)[0][0] parser.feed_data(b'4\r\ndata\r') parser.feed_data(b'\n4') parser.feed_data(b'\r') parser.feed_data(b'\n') parser.feed_data(b'li') parser.feed_data(b'ne\r\n0\r\n') parser.feed_data(b'test: test\r\n') assert b'dataline' == b''.join(d for d in payload._buffer) assert [4, 8] == payload._http_chunk_splits assert not payload.is_eof() parser.feed_data(b'\r\n') assert b'dataline' == b''.join(d for d in payload._buffer) assert [4, 8] == payload._http_chunk_splits assert payload.is_eof() def test_parse_chunked_payload_chunk_extension(parser): text = (b'GET /test HTTP/1.1\r\n' b'transfer-encoding: chunked\r\n\r\n') msg, payload = parser.feed_data(text)[0][0] parser.feed_data( b'4;test\r\ndata\r\n4\r\nline\r\n0\r\ntest: test\r\n\r\n') assert b'dataline' == b''.join(d for d in payload._buffer) assert [4, 8] == payload._http_chunk_splits assert payload.is_eof() def _test_parse_no_length_or_te_on_post(loop, protocol, request_cls): parser = request_cls(protocol, loop, readall=True) text = b'POST /test HTTP/1.1\r\n\r\n' msg, payload = parser.feed_data(text)[0][0] assert payload.is_eof() def test_parse_payload_response_without_body(loop, protocol, response_cls): parser = response_cls(protocol, loop, response_with_body=False) text = (b'HTTP/1.1 200 Ok\r\n' b'content-length: 10\r\n\r\n') msg, payload = parser.feed_data(text)[0][0] assert payload.is_eof() def test_parse_length_payload(response): text = (b'HTTP/1.1 200 Ok\r\n' b'content-length: 4\r\n\r\n') msg, payload = response.feed_data(text)[0][0] assert not payload.is_eof() response.feed_data(b'da') response.feed_data(b't') response.feed_data(b'aHT') assert payload.is_eof() assert b'data' == b''.join(d for d in payload._buffer) def test_parse_no_length_payload(parser): text = b'PUT / HTTP/1.1\r\n\r\n' msg, payload = parser.feed_data(text)[0][0] assert payload.is_eof() def test_partial_url(parser): messages, upgrade, tail = parser.feed_data(b'GET /te') assert len(messages) == 0 messages, upgrade, tail = parser.feed_data(b'st HTTP/1.1\r\n\r\n') assert len(messages) == 1 msg, payload = messages[0] assert msg.method == 'GET' assert msg.path == '/test' assert msg.version == (1, 1) assert payload.is_eof() def test_url_parse_non_strict_mode(parser): payload = 'GET /test/тест HTTP/1.1\r\n\r\n'.encode('utf-8') messages, upgrade, tail = parser.feed_data(payload) assert len(messages) == 1 msg, payload = messages[0] assert msg.method == 'GET' assert msg.path == '/test/тест' assert msg.version == (1, 1) assert payload.is_eof() class TestParsePayload(unittest.TestCase): def setUp(self): self.stream = mock.Mock() asyncio.set_event_loop(None) def test_parse_eof_payload(self): out = aiohttp.FlowControlDataQueue(self.stream) p = HttpPayloadParser(out, readall=True) p.feed_data(b'data') p.feed_eof() self.assertTrue(out.is_eof()) self.assertEqual([(bytearray(b'data'), 4)], list(out._buffer)) def test_parse_no_body(self): out = aiohttp.FlowControlDataQueue(self.stream) p = HttpPayloadParser(out, method='PUT') self.assertTrue(out.is_eof()) self.assertTrue(p.done) def test_parse_length_payload_eof(self): out = aiohttp.FlowControlDataQueue(self.stream) p = HttpPayloadParser(out, length=4) p.feed_data(b'da') with pytest.raises(http_exceptions.ContentLengthError): p.feed_eof() def test_parse_chunked_payload_size_error(self): out = aiohttp.FlowControlDataQueue(self.stream) p = HttpPayloadParser(out, chunked=True) self.assertRaises( http_exceptions.TransferEncodingError, p.feed_data, b'blah\r\n') self.assertIsInstance( out.exception(), http_exceptions.TransferEncodingError) def test_http_payload_parser_length(self): out = aiohttp.FlowControlDataQueue(self.stream) p = HttpPayloadParser(out, length=2) eof, tail = p.feed_data(b'1245') self.assertTrue(eof) self.assertEqual(b'12', b''.join(d for d, _ in out._buffer)) self.assertEqual(b'45', tail) _comp = zlib.compressobj(wbits=-zlib.MAX_WBITS) _COMPRESSED = b''.join([_comp.compress(b'data'), _comp.flush()]) def test_http_payload_parser_deflate(self): length = len(self._COMPRESSED) out = aiohttp.FlowControlDataQueue(self.stream) p = HttpPayloadParser( out, length=length, compression='deflate') p.feed_data(self._COMPRESSED) self.assertEqual(b'data', b''.join(d for d, _ in out._buffer)) self.assertTrue(out.is_eof()) def test_http_payload_parser_deflate_no_wbits(self): comp = zlib.compressobj() COMPRESSED = b''.join([comp.compress(b'data'), comp.flush()]) length = len(COMPRESSED) out = aiohttp.FlowControlDataQueue(self.stream) p = HttpPayloadParser( out, length=length, compression='deflate') p.feed_data(COMPRESSED) self.assertEqual(b'data', b''.join(d for d, _ in out._buffer)) self.assertTrue(out.is_eof()) def test_http_payload_parser_length_zero(self): out = aiohttp.FlowControlDataQueue(self.stream) p = HttpPayloadParser(out, length=0) self.assertTrue(p.done) self.assertTrue(out.is_eof()) @pytest.mark.skipif(brotli is None, reason="brotli is not installed") def test_http_payload_brotli(self): compressed = brotli.compress(b'brotli data') out = aiohttp.FlowControlDataQueue(self.stream) p = HttpPayloadParser( out, length=len(compressed), compression='br') p.feed_data(compressed) self.assertEqual(b'brotli data', b''.join(d for d, _ in out._buffer)) self.assertTrue(out.is_eof()) class TestDeflateBuffer(unittest.TestCase): def setUp(self): self.stream = mock.Mock() asyncio.set_event_loop(None) def test_feed_data(self): buf = aiohttp.FlowControlDataQueue(self.stream) dbuf = DeflateBuffer(buf, 'deflate') dbuf.decompressor = mock.Mock() dbuf.decompressor.decompress.return_value = b'line' dbuf.feed_data(b'data', 4) self.assertEqual([b'line'], list(d for d, _ in buf._buffer)) def test_feed_data_err(self): buf = aiohttp.FlowControlDataQueue(self.stream) dbuf = DeflateBuffer(buf, 'deflate') exc = ValueError() dbuf.decompressor = mock.Mock() dbuf.decompressor.decompress.side_effect = exc self.assertRaises( http_exceptions.ContentEncodingError, dbuf.feed_data, b'data', 4) def test_feed_eof(self): buf = aiohttp.FlowControlDataQueue(self.stream) dbuf = DeflateBuffer(buf, 'deflate') dbuf.decompressor = mock.Mock() dbuf.decompressor.flush.return_value = b'line' dbuf.feed_eof() self.assertEqual([b'line'], list(d for d, _ in buf._buffer)) self.assertTrue(buf._eof) def test_feed_eof_err(self): buf = aiohttp.FlowControlDataQueue(self.stream) dbuf = DeflateBuffer(buf, 'deflate') dbuf.decompressor = mock.Mock() dbuf.decompressor.flush.return_value = b'line' dbuf.decompressor.eof = False self.assertRaises(http_exceptions.ContentEncodingError, dbuf.feed_eof) def test_empty_body(self): buf = aiohttp.FlowControlDataQueue(self.stream) dbuf = DeflateBuffer(buf, 'deflate') dbuf.feed_eof() self.assertTrue(buf.at_eof())
playpauseandstop/aiohttp
tests/test_http_parser.py
Python
apache-2.0
24,179
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // using System; // using System.ComponentModel.Composition; // using System.Threading; // using System.Windows.Forms; // using QuantConnect.Configuration; // using QuantConnect.Interfaces; // using QuantConnect.Lean.Engine; // using QuantConnect.Logging; // using QuantConnect.Packets; // using QuantConnect.Util; package com.quantconnect.lean.launcher; import java.time.LocalTime; import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.math3.analysis.function.Log; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.quantconnect.lean.Globals; import com.quantconnect.lean.configuration.Config; import com.quantconnect.lean.engine.LeanEngineSystemHandlers; import com.quantconnect.lean.lean.engine.LeanEngineAlgorithmHandlers; import com.quantconnect.lean.packets.AlgorithmNodePacket; import com.quantconnect.lean.packets.LiveNodePacket; import com.quantconnect.lean.util.Composer; public class Program { private static final String _collapseMessage = "Unhandled exception breaking past controls and causing collapse of algorithm node. This is likely a memory leak of an external dependency or the underlying OS terminating the LEAN engine."; private static final Logger LOG = LoggerFactory.getLogger( Program.class ); public static void main( final String[] args ) { //Initialize: final String mode = "RELEASE"; // #if DEBUG //TODO add arg option handling // mode = "DEBUG"; // #endif final String environment = Config.get( "environment" ); final boolean liveMode = Config.getBoolean( "live-mode" ); // LOG.debuggingEnabled = Config.GetBool( "debug-mode"); // Log.LogHandler = Composer.Instance.GetExportedValueByTypeName<ILogHandler>(Config.Get( "log-handler", "CompositeLogHandler")); //Name thread for the profiler: Thread.currentThread().setName( "Algorithm Analysis Thread" ); LOG.trace( "Engine.Main(): LEAN ALGORITHMIC TRADING ENGINE v" + Globals.getVersion() + " Mode: " + mode ); LOG.trace( "Engine.Main(): Started " + LocalTime.now().toString() ); final Runtime runtime = Runtime.getRuntime(); runtime.gc(); LOG.trace( "Engine.Main(): Memory " + runtime.freeMemory() + " free " + runtime.totalMemory() + " Mb-Used " + runtime.maxMemory() + " Mb-Total"); //TODO //Import external libraries specific to physical server location (cloud/local) LeanEngineSystemHandlers leanEngineSystemHandlers; try { leanEngineSystemHandlers = LeanEngineSystemHandlers.fromConfiguration( Composer.INSTANCE ); } catch( final Exception compositionException ) { LOG.error( "Engine.Main(): Failed to load library: " + compositionException ); throw compositionException; } //Setup packeting, queue and controls system: These don't do much locally. leanEngineSystemHandlers.initialize(); //-> Pull job from QuantConnect job queue, or, pull local build: String assemblyPath = null; final Pair<String,AlgorithmNodePacket> nextJob = leanEngineSystemHandlers.getJobQueue().nextJob( assemblyPath ); assemblyPath = nextJob.getLeft(); final AlgorithmNodePacket job = nextJob.getRight(); if( job == null ) throw new NullPointerException( "Engine.Main(): Job was null." ); LeanEngineAlgorithmHandlers leanEngineAlgorithmHandlers; try { leanEngineAlgorithmHandlers = LeanEngineAlgorithmHandlers.fromConfiguration( Composer.INSTANCE ); } catch( final Exception compositionException ) { LOG.error( "Engine.Main(): Failed to load library: ", compositionException ); throw compositionException; } // if( environment.EndsWith( "-desktop")) // { // Application.EnableVisualStyles(); // messagingHandler = leanEngineSystemHandlers.Notify; // thread = new Thread(() -> LaunchUX(messagingHandler, job)); // thread.SetApartmentState(ApartmentState.STA); // thread.Start(); // } // log the job endpoints LOG.trace( "JOB HANDLERS: "); LOG.trace( " DataFeed: " + leanEngineAlgorithmHandlers.DataFeed.GetType().FullName); LOG.trace( " Setup: " + leanEngineAlgorithmHandlers.Setup.GetType().FullName); LOG.trace( " RealTime: " + leanEngineAlgorithmHandlers.RealTime.GetType().FullName); LOG.trace( " Results: " + leanEngineAlgorithmHandlers.Results.GetType().FullName); LOG.trace( " Transactions: " + leanEngineAlgorithmHandlers.Transactions.GetType().FullName); LOG.trace( " History: " + leanEngineAlgorithmHandlers.HistoryProvider.GetType().FullName); LOG.trace( " Commands: " + leanEngineAlgorithmHandlers.CommandQueue.GetType().FullName); if( job instanceof LiveNodePacket ) LOG.trace( " Brokerage: " + ((LiveNodePacket)job).Brokerage ); // if the job version doesn't match this instance version then we can't process it // we also don't want to reprocess redelivered jobs if( VersionHelper.IsNotEqualVersion(job.Version) || job.Redelivered) { Log.Error( "Engine.Run(): Job Version: " + job.Version + " Deployed Version: " + Globals.Version + " Redelivered: " + job.Redelivered); //Tiny chance there was an uncontrolled collapse of a server, resulting in an old user task circulating. //In this event kill the old algorithm and leave a message so the user can later review. leanEngineSystemHandlers.Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, _collapseMessage); leanEngineSystemHandlers.Notify.SetAuthentication(job); leanEngineSystemHandlers.Notify.Send(new RuntimeErrorPacket(job.AlgorithmId, _collapseMessage)); leanEngineSystemHandlers.JobQueue.AcknowledgeJob(job); return; } try { engine = new Engine( leanEngineSystemHandlers, leanEngineAlgorithmHandlers, liveMode ); engine.run( job, assemblyPath ); } finally { //Delete the message from the job queue: leanEngineSystemHandlers.getJobQueue().acknowledgeJob( job ); LOG.trace( "Engine.Main(): Packet removed from queue: " + job.getAlgorithmId() ); // clean up resources leanEngineSystemHandlers.close(); leanEngineAlgorithmHandlers.close(); } } // /** // * Form launcher method for thread. // */ // static void LaunchUX( IMessagingHandler messaging, AlgorithmNodePacket job ) { // //Launch the UX // //form = Composer.Instance.GetExportedValueByTypeName<Form>( "desktop-ux-classname"); // form = new Views.WinForms.LeanWinForm(messaging, job); // Application.Run(form); // } // } }
aricooperman/jLean
src/main/java/com/quantconnect/lean/launcher/Program.java
Java
apache-2.0
7,853
/** * Copyright 2011-2021 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.runtime.directio.api; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintWriter; import java.util.Arrays; import java.util.HashSet; import java.util.Scanner; import java.util.Set; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import com.asakusafw.runtime.directio.BinaryStreamFormat; import com.asakusafw.runtime.directio.hadoop.HadoopDataSource; import com.asakusafw.runtime.directio.hadoop.HadoopDataSourceProfile; import com.asakusafw.runtime.directio.hadoop.HadoopDataSourceUtil; import com.asakusafw.runtime.io.ModelInput; import com.asakusafw.runtime.io.ModelOutput; import com.asakusafw.testdriver.OperatorTestEnvironment; /** * Test for {@link DirectIo}. */ public class DirectIoTest { /** * testing environment. */ @Rule public OperatorTestEnvironment env = new OperatorTestEnvironment(); /** * temporary folder. */ @Rule public TemporaryFolder folder = new TemporaryFolder(); /** * simple case. * @throws Exception if failed */ @Test public void simple() throws Exception { File root = injectDataSource("testing", folder.newFolder()); env.reload(); put(new File(root, "testing.txt"), "Hello, world!"); Set<String> results; try (ModelInput<StringBuilder> input = DirectIo.open(MockFormat.class, "testing", "*.txt")) { results = consume(input); } assertThat(results, is(set("Hello, world!"))); } /** * missing files. * @throws Exception if failed */ @Test public void missing() throws Exception { injectDataSource("testing", folder.newFolder()); env.reload(); Set<String> results; try (ModelInput<StringBuilder> input = DirectIo.open(MockFormat.class, "testing", "*.txt")) { results = consume(input); } assertThat(results, is(empty())); } /** * multiple files. * @throws Exception if failed */ @Test public void multiple() throws Exception { File root = injectDataSource("testing", folder.newFolder()); env.reload(); put(new File(root, "t1.txt"), "Hello1"); put(new File(root, "t2.txt"), "Hello2"); put(new File(root, "t3.txt"), "Hello3"); Set<String> results; try (ModelInput<StringBuilder> input = DirectIo.open(MockFormat.class, "testing", "*.txt")) { results = consume(input); } assertThat(results, is(set("Hello1", "Hello2", "Hello3"))); } private File injectDataSource(String path, File mapped) { env.configure(qualify(), HadoopDataSource.class.getName()); env.configure(qualify(HadoopDataSourceUtil.KEY_PATH), path); env.configure(qualify(HadoopDataSourceProfile.KEY_PATH), mapped.toURI().toString()); return mapped; } private String qualify() { return String.format("%s%s", HadoopDataSourceUtil.PREFIX, "t"); } private String qualify(String key) { return String.format("%s%s.%s", HadoopDataSourceUtil.PREFIX, "t", key); } private Set<String> set(String... values) { return new HashSet<>(Arrays.asList(values)); } private File put(File file, String... lines) throws IOException { file.getAbsoluteFile().getParentFile().mkdirs(); try (PrintWriter writer = new PrintWriter(file, "UTF-8")) { for (String line : lines) { writer.println(line); } } return file; } private Set<String> consume(ModelInput<StringBuilder> input) throws IOException { Set<String> results = new HashSet<>(); StringBuilder buf = new StringBuilder(); while (input.readTo(buf)) { results.add(buf.toString()); } return results; } /** * mock data format. */ public static class MockFormat extends BinaryStreamFormat<StringBuilder> { @Override public Class<StringBuilder> getSupportedType() { return StringBuilder.class; } @Override public ModelInput<StringBuilder> createInput( Class<? extends StringBuilder> dataType, String path, InputStream stream, long offset, long fragmentSize) throws IOException, InterruptedException { assertThat(offset, is(0L)); Scanner scanner = new Scanner(stream, "UTF-8"); return new ModelInput<StringBuilder>() { @Override public boolean readTo(StringBuilder model) throws IOException { if (scanner.hasNextLine()) { model.setLength(0); model.append(scanner.nextLine()); return true; } else { return false; } } @Override public void close() throws IOException { scanner.close(); } }; } @Override public ModelOutput<StringBuilder> createOutput( Class<? extends StringBuilder> dataType, String path, OutputStream stream) throws IOException, InterruptedException { throw new UnsupportedOperationException(); } } }
asakusafw/asakusafw
sandbox-project/asakusa-directio-runtime-ext/src/test/java/com/asakusafw/runtime/directio/api/DirectIoTest.java
Java
apache-2.0
6,165
/* * Copyright 2012-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.web; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.List; import javax.servlet.Servlet; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.ListableBeanFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.AutoConfigureAfter; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.ResourceLoaderAware; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; import org.springframework.core.Ordered; import org.springframework.core.annotation.Order; import org.springframework.core.convert.converter.Converter; import org.springframework.core.convert.converter.GenericConverter; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.Resource; import org.springframework.core.io.ResourceLoader; import org.springframework.format.Formatter; import org.springframework.format.FormatterRegistry; import org.springframework.format.datetime.DateFormatter; import org.springframework.http.converter.HttpMessageConverter; import org.springframework.util.StringUtils; import org.springframework.validation.DefaultMessageCodesResolver; import org.springframework.validation.MessageCodesResolver; import org.springframework.web.accept.ContentNegotiationManager; import org.springframework.web.context.request.RequestContextListener; import org.springframework.web.filter.HiddenHttpMethodFilter; import org.springframework.web.servlet.DispatcherServlet; import org.springframework.web.servlet.LocaleResolver; import org.springframework.web.servlet.View; import org.springframework.web.servlet.ViewResolver; import org.springframework.web.servlet.config.annotation.DelegatingWebMvcConfiguration; import org.springframework.web.servlet.config.annotation.EnableWebMvc; import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry; import org.springframework.web.servlet.config.annotation.ViewControllerRegistry; import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupport; import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter; import org.springframework.web.servlet.handler.SimpleUrlHandlerMapping; import org.springframework.web.servlet.i18n.FixedLocaleResolver; import org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter; import org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerMapping; import org.springframework.web.servlet.resource.ResourceHttpRequestHandler; import org.springframework.web.servlet.view.BeanNameViewResolver; import org.springframework.web.servlet.view.ContentNegotiatingViewResolver; import org.springframework.web.servlet.view.InternalResourceViewResolver; /** * {@link EnableAutoConfiguration Auto-configuration} for {@link EnableWebMvc Web MVC}. * * @author Phillip Webb * @author Dave Syer * @author Andy Wilkinson */ @Configuration @ConditionalOnWebApplication @ConditionalOnClass({ Servlet.class, DispatcherServlet.class, WebMvcConfigurerAdapter.class }) @ConditionalOnMissingBean(WebMvcConfigurationSupport.class) @Order(Ordered.HIGHEST_PRECEDENCE + 10) @AutoConfigureAfter(DispatcherServletAutoConfiguration.class) public class WebMvcAutoConfiguration { private static final String[] SERVLET_RESOURCE_LOCATIONS = { "/" }; private static final String[] CLASSPATH_RESOURCE_LOCATIONS = { "classpath:/META-INF/resources/", "classpath:/resources/", "classpath:/static/", "classpath:/public/" }; private static final String[] RESOURCE_LOCATIONS; static { RESOURCE_LOCATIONS = new String[CLASSPATH_RESOURCE_LOCATIONS.length + SERVLET_RESOURCE_LOCATIONS.length]; System.arraycopy(SERVLET_RESOURCE_LOCATIONS, 0, RESOURCE_LOCATIONS, 0, SERVLET_RESOURCE_LOCATIONS.length); System.arraycopy(CLASSPATH_RESOURCE_LOCATIONS, 0, RESOURCE_LOCATIONS, SERVLET_RESOURCE_LOCATIONS.length, CLASSPATH_RESOURCE_LOCATIONS.length); } private static final String[] STATIC_INDEX_HTML_RESOURCES; static { STATIC_INDEX_HTML_RESOURCES = new String[RESOURCE_LOCATIONS.length]; for (int i = 0; i < STATIC_INDEX_HTML_RESOURCES.length; i++) { STATIC_INDEX_HTML_RESOURCES[i] = RESOURCE_LOCATIONS[i] + "index.html"; } } public static String DEFAULT_PREFIX = ""; public static String DEFAULT_SUFFIX = ""; @Bean @ConditionalOnMissingBean(HiddenHttpMethodFilter.class) public HiddenHttpMethodFilter hiddenHttpMethodFilter() { return new HiddenHttpMethodFilter(); } // Defined as a nested config to ensure WebMvcConfigurerAdapter is not read when not // on the classpath @Configuration @Import(EnableWebMvcConfiguration.class) @EnableConfigurationProperties({ WebMvcProperties.class, ResourceProperties.class }) public static class WebMvcAutoConfigurationAdapter extends WebMvcConfigurerAdapter { private static Log logger = LogFactory.getLog(WebMvcConfigurerAdapter.class); @Value("${spring.view.prefix:}") private String prefix = ""; @Value("${spring.view.suffix:}") private String suffix = ""; @Autowired private ResourceProperties resourceProperties = new ResourceProperties(); @Autowired private WebMvcProperties mvcProperties = new WebMvcProperties(); @Autowired private ListableBeanFactory beanFactory; @Autowired private ResourceLoader resourceLoader; @Autowired private HttpMessageConverters messageConverters; @Override public void configureMessageConverters(List<HttpMessageConverter<?>> converters) { converters.addAll(this.messageConverters.getConverters()); } @Bean @ConditionalOnMissingBean(InternalResourceViewResolver.class) public InternalResourceViewResolver defaultViewResolver() { InternalResourceViewResolver resolver = new InternalResourceViewResolver(); resolver.setPrefix(this.prefix); resolver.setSuffix(this.suffix); return resolver; } @Bean @ConditionalOnMissingBean(RequestContextListener.class) public RequestContextListener requestContextListener() { return new RequestContextListener(); } @Bean @ConditionalOnBean(View.class) public BeanNameViewResolver beanNameViewResolver() { BeanNameViewResolver resolver = new BeanNameViewResolver(); resolver.setOrder(Ordered.LOWEST_PRECEDENCE - 10); return resolver; } @Bean @ConditionalOnBean(ViewResolver.class) @ConditionalOnMissingBean(name = "viewResolver") public ContentNegotiatingViewResolver viewResolver(BeanFactory beanFactory) { ContentNegotiatingViewResolver resolver = new ContentNegotiatingViewResolver(); resolver.setContentNegotiationManager(beanFactory .getBean(ContentNegotiationManager.class)); // ContentNegotiatingViewResolver uses all the other view resolvers to locate // a view so it should have a high precedence resolver.setOrder(Ordered.HIGHEST_PRECEDENCE); return resolver; } @Bean @ConditionalOnMissingBean(LocaleResolver.class) @ConditionalOnProperty(prefix = "spring.mvc", name = "locale") public LocaleResolver localeResolver() { return new FixedLocaleResolver( StringUtils.parseLocaleString(this.mvcProperties.getLocale())); } @Bean @ConditionalOnProperty(prefix = "spring.mvc", name = "date-format") public Formatter<Date> dateFormatter() { return new DateFormatter(this.mvcProperties.getDateFormat()); } @Override public MessageCodesResolver getMessageCodesResolver() { if (this.mvcProperties.getMessageCodesResolverFormat() != null) { DefaultMessageCodesResolver resolver = new DefaultMessageCodesResolver(); resolver.setMessageCodeFormatter(this.mvcProperties .getMessageCodesResolverFormat()); return resolver; } return null; } @Override public void addFormatters(FormatterRegistry registry) { for (Converter<?, ?> converter : getBeansOfType(Converter.class)) { registry.addConverter(converter); } for (GenericConverter converter : getBeansOfType(GenericConverter.class)) { registry.addConverter(converter); } for (Formatter<?> formatter : getBeansOfType(Formatter.class)) { registry.addFormatter(formatter); } } private <T> Collection<T> getBeansOfType(Class<T> type) { return this.beanFactory.getBeansOfType(type).values(); } @Override public void addResourceHandlers(ResourceHandlerRegistry registry) { if (!this.resourceProperties.isAddMappings()) { logger.debug("Default resource handling disabled"); return; } Integer cachePeriod = this.resourceProperties.getCachePeriod(); if (!registry.hasMappingForPattern("/webjars/**")) { registry.addResourceHandler("/webjars/**") .addResourceLocations("classpath:/META-INF/resources/webjars/") .setCachePeriod(cachePeriod); } if (!registry.hasMappingForPattern("/**")) { registry.addResourceHandler("/**") .addResourceLocations(RESOURCE_LOCATIONS) .setCachePeriod(cachePeriod); } } @Override public void addViewControllers(ViewControllerRegistry registry) { addStaticIndexHtmlViewControllers(registry); } private void addStaticIndexHtmlViewControllers(ViewControllerRegistry registry) { for (String resource : STATIC_INDEX_HTML_RESOURCES) { if (this.resourceLoader.getResource(resource).exists()) { try { logger.info("Adding welcome page: " + this.resourceLoader.getResource(resource).getURL()); } catch (IOException ex) { // Ignore } // Use forward: prefix so that no view resolution is done registry.addViewController("/").setViewName("forward:/index.html"); return; } } } @Configuration public static class FaviconConfiguration implements ResourceLoaderAware { private ResourceLoader resourceLoader; @Bean public SimpleUrlHandlerMapping faviconHandlerMapping() { SimpleUrlHandlerMapping mapping = new SimpleUrlHandlerMapping(); mapping.setOrder(Integer.MIN_VALUE + 1); mapping.setUrlMap(Collections.singletonMap("**/favicon.ico", faviconRequestHandler())); return mapping; } @Override public void setResourceLoader(ResourceLoader resourceLoader) { this.resourceLoader = resourceLoader; } @Bean public ResourceHttpRequestHandler faviconRequestHandler() { ResourceHttpRequestHandler requestHandler = new ResourceHttpRequestHandler(); requestHandler.setLocations(getLocations()); return requestHandler; } private List<Resource> getLocations() { List<Resource> locations = new ArrayList<Resource>( CLASSPATH_RESOURCE_LOCATIONS.length + 1); for (String location : CLASSPATH_RESOURCE_LOCATIONS) { locations.add(this.resourceLoader.getResource(location)); } locations.add(new ClassPathResource("/")); return Collections.unmodifiableList(locations); } } } /** * Configuration equivalent to {@code @EnableWebMvc}. */ @Configuration public static class EnableWebMvcConfiguration extends DelegatingWebMvcConfiguration { @Autowired(required = false) private WebMvcProperties mvcProperties; @Bean @Override public RequestMappingHandlerAdapter requestMappingHandlerAdapter() { RequestMappingHandlerAdapter adapter = super.requestMappingHandlerAdapter(); adapter.setIgnoreDefaultModelOnRedirect(this.mvcProperties == null ? true : this.mvcProperties.isIgnoreDefaultModelOnRedirect()); return adapter; } @Bean @Primary @Override public RequestMappingHandlerMapping requestMappingHandlerMapping() { // Must be @Primary for MvcUriComponentsBuilder to work return super.requestMappingHandlerMapping(); } } }
10045125/spring-boot
spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/web/WebMvcAutoConfiguration.java
Java
apache-2.0
13,159
package de.undercouch.bson4jackson.io; import org.junit.Test; import static org.junit.Assert.assertEquals; /** * Tests {@link ByteOrderUtil} * @author Michel Kraemer */ public class ByteOrderUtilTest { @Test public void flipInt() { assertEquals(0xDDCCBBAA, ByteOrderUtil.flip(0xAABBCCDD)); assertEquals(-129, ByteOrderUtil.flip(Integer.MAX_VALUE)); assertEquals(128, ByteOrderUtil.flip(Integer.MIN_VALUE)); } }
michel-kraemer/bson4jackson
src/test/java/de/undercouch/bson4jackson/io/ByteOrderUtilTest.java
Java
apache-2.0
453
/* * Copyright (C) 2016 the original author or authors. * * This file is part of jGrades Application Project. * * Licensed under the Apache License, Version 2.0 (the "License"); * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 */ package org.jgrades.logging.model; import org.jgrades.logging.model.updater.PerLevelUpdater; import org.jgrades.logging.model.updater.PerModuleAndLevelUpdater; import org.jgrades.logging.model.updater.PerModuleUpdater; public enum LoggingStrategy { LOG_FILE_PER_MODULE(new PerModuleUpdater()), LOG_FILE_PER_LEVEL(new PerLevelUpdater()), LOG_FILE_PER_MODULE_AND_LEVEL(new PerModuleAndLevelUpdater()); private final transient XmlFileNameTagsUpdater updater; LoggingStrategy(XmlFileNameTagsUpdater updater) { this.updater = updater; } public XmlFileNameTagsUpdater getUpdater() { return updater; } }
jgrades/jgrades
jg-logging/src/main/java/org/jgrades/logging/model/LoggingStrategy.java
Java
apache-2.0
936
/* * This code is subject to the HIEOS License, Version 1.0 * * Copyright(c) 2011 Vangent, Inc. All rights reserved. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package com.vangent.hieos.xutil.atna; /** * * @author Bernie Thuman */ public class ATNAAuditEventStop extends ATNAAuditEvent { // Nothing to add. }
kef/hieos
src/xutil/src/com/vangent/hieos/xutil/atna/ATNAAuditEventStop.java
Java
apache-2.0
610
package me.bianbian.worldclock.webapp.filter; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import javax.servlet.FilterChain; import javax.servlet.RequestDispatcher; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import java.io.IOException; /** * Borrowed from the Display Tag project: * http://displaytag.sourceforge.net/xref-test/org/displaytag/filter/MockFilterSupport.html * * Todo: look into using Spring's MockFilterChain: * http://www.springframework.org/docs/api/org/springframework/mock/web/MockFilterChain.html */ public class MockFilterChain implements FilterChain { private final Log log = LogFactory.getLog(MockFilterChain.class); private String forwardURL; public void doFilter(ServletRequest request, ServletResponse response) throws IOException, ServletException { String uri = ((HttpServletRequest) request).getRequestURI(); String requestContext = ((HttpServletRequest) request).getContextPath(); if (StringUtils.isNotEmpty(requestContext) && uri.startsWith(requestContext)) { uri = uri.substring(requestContext.length()); } this.forwardURL = uri; log.debug("Forwarding to: " + uri); RequestDispatcher dispatcher = request.getRequestDispatcher(uri); dispatcher.forward(request, response); } public String getForwardURL() { return this.forwardURL; } }
bianbian/worldclock
web/src/test/java/me/bianbian/worldclock/webapp/filter/MockFilterChain.java
Java
apache-2.0
1,590
package org.itevents.model.builder; import org.itevents.model.*; import java.util.Date; import java.util.List; /** * Created by vaa25 on 30.09.2015. */ public class EventBuilder { private int id; private String title; private Date eventDate; private Date createDate; private String regLink; private String address; private Location location; private String contact; private Integer price; private Currency currency; private City city; private List<Technology> technologies; private EventBuilder() { } public static EventBuilder anEvent() { return new EventBuilder(); } public EventBuilder id(int id) { this.id = id; return this; } public EventBuilder title(String title) { this.title = title; return this; } public EventBuilder eventDate(Date eventDate) { this.eventDate = eventDate; return this; } public EventBuilder сreateDate(Date createDate) { this.createDate = createDate; return this; } public EventBuilder regLink(String regLink) { this.regLink = regLink; return this; } public EventBuilder address(String address) { this.address = address; return this; } public EventBuilder location(Location location) { this.location = location; return this; } public EventBuilder сontact(String contact) { this.contact = contact; return this; } public EventBuilder price(Integer price) { this.price = price; return this; } public EventBuilder сurrency(Currency currency) { this.currency = currency; return this; } public EventBuilder сity(City city) { this.city = city; return this; } public EventBuilder technologies(List<Technology> technologies) { this.technologies = technologies; return this; } public EventBuilder but() { return anEvent().id(id).title(title).eventDate(eventDate).сreateDate(createDate).regLink(regLink) .address(address).location(location).сontact(contact).price(price).сurrency(currency) .сity(city).technologies(technologies); } public Event build() { Event event = new Event(); event.setId(id); event.setTitle(title); event.setEventDate(eventDate); event.setCreateDate(createDate); event.setRegLink(regLink); event.setAddress(address); event.setLocation(location); event.setContact(contact); event.setPrice(price); event.setCurrency(currency); event.setCity(city); event.setTechnologies(technologies); return event; } }
vladislove80/itevents
website/restservice/src/main/java/org/itevents/model/builder/EventBuilder.java
Java
apache-2.0
2,798
// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package buildererror provides an interface for builder Errors. package buildererror import ( "crypto/sha256" "fmt" "io" "strings" ) const ( errorIDLength = 8 ) // ID is a short error code passed to the user for supportability. type ID string // Error is a gcpbuildpack structured error. type Error struct { BuildpackID string `json:"buildpackId"` BuildpackVersion string `json:"buildpackVersion"` Type Status `json:"errorType"` Status Status `json:"canonicalCode"` ID ID `json:"errorId"` Message string `json:"errorMessage"` } func (e *Error) Error() string { if e.ID == "" { return e.Message } return fmt.Sprintf("%s [id:%s]", e.Message, e.ID) } // Errorf constructs an Error. func Errorf(status Status, format string, args ...interface{}) *Error { msg := fmt.Sprintf(format, args...) return &Error{ Type: status, Status: status, ID: GenerateErrorID(msg), Message: msg, } } // InternalErrorf constructs an Error with status StatusInternal (Google-attributed SLO). func InternalErrorf(format string, args ...interface{}) *Error { return Errorf(StatusInternal, format, args...) } // UserErrorf constructs an Error with status StatusUnknown (user-attributed SLO). func UserErrorf(format string, args ...interface{}) *Error { return Errorf(StatusUnknown, format, args...) } // GenerateErrorID creates a short hash from the provided parts. func GenerateErrorID(parts ...string) ID { h := sha256.New() for _, p := range parts { io.WriteString(h, p) } result := fmt.Sprintf("%x", h.Sum(nil)) // Since this is only a reporting aid for support, we truncate the hash to make it more human friendly. return ID(strings.ToLower(result[:errorIDLength])) }
GoogleCloudPlatform/buildpacks
pkg/buildererror/error.go
GO
apache-2.0
2,345
/* * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package me.cmoz.grizzly.protobuf; import com.google.protobuf.CodedInputStream; import com.google.protobuf.ExtensionRegistryLite; import com.google.protobuf.MessageLite; import org.glassfish.grizzly.Buffer; import org.glassfish.grizzly.utils.BufferInputStream; import java.io.IOException; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; /** * Decodes Protocol Buffers messages from the input stream using a * {@code Varint32} encoded header to determine message size. */ @Slf4j public class Varint32ProtobufDecoder extends AbstractProtobufDecoder { /** * A protobuf decoder that uses a {@code Varint32} encoded header to * determine the size of a message to be decoded. * * @param prototype The base protocol buffers serialization unit. * @param extensionRegistry A table of known extensions, searchable by name * or field number, may be {@code null}. */ public Varint32ProtobufDecoder( final @NonNull MessageLite prototype, final ExtensionRegistryLite extensionRegistry) { super(prototype, extensionRegistry); } /** {@inheritDoc} */ @Override public int readHeader(final Buffer input) throws IOException { if (input == null) { throw new IllegalArgumentException("'input' cannot be null."); } final BufferInputStream inputStream = new BufferInputStream(input); return CodedInputStream.readRawVarint32(input.get(), inputStream); } /** {@inheritDoc} */ @Override public String getName() { return Varint32ProtobufDecoder.class.getName(); } }
novabyte/grizzly-protobuf
src/main/java/me/cmoz/grizzly/protobuf/Varint32ProtobufDecoder.java
Java
apache-2.0
2,258
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2017.05.20 at 03:05:24 PM IST // package org.akomantoso.schema.v3.release; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;extension base="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0}blocksreq"> * &lt;attGroup ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0}name"/> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "") @XmlRootElement(name = "formula") public class Formula extends Blocksreq { @XmlAttribute(name = "name", required = true) protected String name; /** * Gets the value of the name property. * * @return * possible object is * {@link String } * */ public String getName() { return name; } /** * Sets the value of the name property. * * @param value * allowed object is * {@link String } * */ public void setName(String value) { this.name = value; } }
kohsah/akomantoso-lib
src/main/java/org/akomantoso/schema/v3/release/Formula.java
Java
apache-2.0
1,760
/* * Copyright (c) 2001-2007 Sun Microsystems, Inc. All rights reserved. * * The Sun Project JXTA(TM) Software License * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. The end-user documentation included with the redistribution, if any, must * include the following acknowledgment: "This product includes software * developed by Sun Microsystems, Inc. for JXTA(TM) technology." * Alternately, this acknowledgment may appear in the software itself, if * and wherever such third-party acknowledgments normally appear. * * 4. The names "Sun", "Sun Microsystems, Inc.", "JXTA" and "Project JXTA" must * not be used to endorse or promote products derived from this software * without prior written permission. For written permission, please contact * Project JXTA at http://www.jxta.org. * * 5. Products derived from this software may not be called "JXTA", nor may * "JXTA" appear in their name, without prior written permission of Sun. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SUN * MICROSYSTEMS OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * JXTA is a registered trademark of Sun Microsystems, Inc. in the United * States and other countries. * * Please see the license information page at : * <http://www.jxta.org/project/www/license.html> for instructions on use of * the license in source files. * * ==================================================================== * * This software consists of voluntary contributions made by many individuals * on behalf of Project JXTA. For more information on Project JXTA, please see * http://www.jxta.org. * * This license is based on the BSD license adopted by the Apache Foundation. */ package net.jxta.socket; import net.jxta.credential.Credential; import net.jxta.credential.CredentialValidator; import net.jxta.document.AdvertisementFactory; import net.jxta.document.StructuredDocumentFactory; import net.jxta.document.XMLDocument; import net.jxta.endpoint.EndpointAddress; import net.jxta.endpoint.Message; import net.jxta.endpoint.MessageElement; import net.jxta.impl.endpoint.EndpointServiceImpl; import net.jxta.logging.Logging; import net.jxta.peergroup.PeerGroup; import net.jxta.pipe.InputPipe; import net.jxta.pipe.PipeMsgEvent; import net.jxta.pipe.PipeMsgListener; import net.jxta.pipe.PipeService; import net.jxta.protocol.PeerAdvertisement; import net.jxta.protocol.PipeAdvertisement; import java.io.IOException; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketAddress; import java.net.SocketException; import java.net.SocketTimeoutException; import java.security.cert.X509Certificate; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.TimeUnit; import java.util.logging.Logger; import java.util.Set; /** * JxtaServerSocket is a bi-directional Pipe that behaves very much like * ServerSocket. It creates an inputpipe and listens for pipe connection * requests. JxtaServerSocket also defines it own protocol. Requests arrive as * a JXTA Message with the following elements: * <p/> * &lt;Cred> Credentials which can be used to determine trust &lt;/Cred> * <p/> * &lt;reqPipe> requestor's pipe advertisement &lt;/reqPipe> * <p/> * &lt;remPipe> Remote pipe advertisement &lt;/remPipe> * <p/> * &lt;reqPeer> Remote peer advertisement &lt;/reqPeer> * <p/> * &lt;stream> determine whether the connection is reliable, or not &lt;/stream> * <p/> * &lt;close> close request &lt;/close> * <p/> * &lt;data> Data &lt;/data> * <p/> * JxtaServerSocket then creates a new private pipe, listens for messages on that pipe, * resolves the requestor's pipe, and sends a &lt;remPipe> private pipe created &lt;/remotePipe> * advertisement back, where the remote side is resolved. * <p/> * The {@code accept()} backlog defaults to 50 requests. * <p/> * The timeout default to 60 seconds, i.e. blocking. */ public class JxtaServerSocket extends ServerSocket implements PipeMsgListener { private static final Logger LOG = Logger.getLogger(JxtaServerSocket.class.getName()); protected static final String MSG_ELEMENT_NAMESPACE = "JXTASOC"; protected static final String credTag = "Cred"; protected static final String reqPipeTag = "reqPipe"; protected static final String remPeerTag = "remPeer"; protected static final String remPipeTag = "remPipe"; protected static final String dataTag = "data"; protected static final String closeTag = "close"; protected final static String closeReqValue = "close"; protected final static String closeAckValue = "closeACK"; protected static final String streamTag = "stream"; protected static final String encryptTag = "encrypt"; protected static final String symmetricKeyTag = "symmetricKey"; private final static int DEFAULT_BACKLOG = 50; private final static long DEFAULT_TIMEOUT = 60 * 1000L; /** * QUEUE_END_MESSAGE is used to signal that the queue has been closed. */ protected static final Message QUEUE_END_MESSAGE = new Message(); /** * The PeerGroup */ protected PeerGroup group; /** * The pipe advertisement we are serving. */ protected PipeAdvertisement pipeAdv; /** * The input pipe on which we listen for connect requests. */ protected InputPipe serverPipe; /** * The credential we will present to connect requests. */ protected Credential localCredential = null; /** * The number of connect requests we will allow to become backlogged. */ protected int backlog = DEFAULT_BACKLOG; /** * The timeout for accept operations. */ protected long timeout = DEFAULT_TIMEOUT; protected BlockingQueue<Message> queue = null; protected volatile boolean bound = false; protected volatile boolean closed = false; private CredentialValidator credValidator = null; private volatile Throwable creatorTrace = new Throwable("Instance construction stack trace"); private PeerGroup netPeerGroup; /** * The data encryption flag. */ private boolean encrypt = false; /** * Default Constructor * <p/> * A call to {@code bind()} is needed to finish initializing this object. * * @throws IOException if an io error occurs */ public JxtaServerSocket() throws IOException {} /** * Constructs and binds a JxtaServerSocket using a JxtaSocketAddress as * the address. * * @param address an instance of JxtaSocketAddress * @throws IOException if an io error occurs * @see net.jxta.socket.JxtaSocketAddress */ public JxtaServerSocket(SocketAddress address) throws IOException { this(address, DEFAULT_BACKLOG); } /** * Constructs and binds a JxtaServerSocket using a JxtaSocketAddress as * the address. * * @param address an instance of JxtaSocketAddress * @param encrypt the data * @throws IOException if an io error occurs * @see net.jxta.socket.JxtaSocketAddress */ public JxtaServerSocket(SocketAddress address, boolean encrypt) throws IOException { this(address, DEFAULT_BACKLOG, encrypt); } /** * Constructs and binds a JxtaServerSocket to the specified pipe. * * @param group JXTA PeerGroup * @param pipeAdv PipeAdvertisement on which pipe requests are accepted * @throws IOException if an I/O error occurs */ public JxtaServerSocket(PeerGroup group, PipeAdvertisement pipeAdv) throws IOException { this(group, pipeAdv, DEFAULT_BACKLOG); } /** * Constructs and binds a JxtaServerSocket to the specified pipe. * * @param group JXTA PeerGroup * @param pipeAdv PipeAdvertisement on which pipe requests are accepted * @param encrypt the data * @throws IOException if an I/O error occurs */ public JxtaServerSocket(PeerGroup group, PipeAdvertisement pipeAdv, boolean encrypt) throws IOException { this(group, pipeAdv, DEFAULT_BACKLOG, encrypt); } /** * Constructs and binds a JxtaServerSocket using a JxtaSocketAddress as * the address. * * @param address an instance of JxtaSocketAddress * @param backlog the size of the backlog queue * @throws IOException if an I/O error occurs * @see net.jxta.socket.JxtaSocketAddress */ public JxtaServerSocket(SocketAddress address, int backlog) throws IOException { this(address, backlog, (int) DEFAULT_TIMEOUT); } /** * Constructs and binds a JxtaServerSocket using a JxtaSocketAddress as * the address. * * @param address an instance of JxtaSocketAddress * @param backlog the size of the backlog queue * @param encrypt the data * @throws IOException if an I/O error occurs * @see net.jxta.socket.JxtaSocketAddress */ public JxtaServerSocket(SocketAddress address, int backlog, boolean encrypt) throws IOException { this(address, backlog, (int) DEFAULT_TIMEOUT, encrypt); } /** * Constructor for the JxtaServerSocket object * * @param group JXTA PeerGroup * @param pipeAdv PipeAdvertisement on which pipe requests are accepted * @param backlog the maximum length of the queue. * @throws IOException if an I/O error occurs */ public JxtaServerSocket(PeerGroup group, PipeAdvertisement pipeAdv, int backlog) throws IOException { this(group, pipeAdv, backlog, (int) DEFAULT_TIMEOUT); } /** * Constructor for the JxtaServerSocket object * * @param group JXTA PeerGroup * @param pipeAdv PipeAdvertisement on which pipe requests are accepted * @param backlog the maximum length of the queue. * @param encrypt the data * @throws IOException if an I/O error occurs */ public JxtaServerSocket(PeerGroup group, PipeAdvertisement pipeAdv, int backlog, boolean encrypt) throws IOException { this(group, pipeAdv, backlog, (int) DEFAULT_TIMEOUT, encrypt); } /** * Constructs and binds a JxtaServerSocket using a JxtaSocketAddress as * the address. * * @param address an instance of JxtaSocketAddress * @param backlog the size of the backlog queue * @param timeout connection timeout in milliseconds * @throws IOException if an I/O error occurs * @see net.jxta.socket.JxtaSocketAddress */ public JxtaServerSocket(SocketAddress address, int backlog, int timeout) throws IOException { setSoTimeout(timeout); bind(address, backlog); } /** * Constructs and binds a JxtaServerSocket using a JxtaSocketAddress as * the address. * * @param address an instance of JxtaSocketAddress * @param backlog the size of the backlog queue * @param timeout connection timeout in milliseconds * @param encrypt the data * @throws IOException if an I/O error occurs * @see net.jxta.socket.JxtaSocketAddress */ public JxtaServerSocket(SocketAddress address, int backlog, int timeout, boolean encrypt) throws IOException { setSoTimeout(timeout); this.encrypt = encrypt; bind(address, backlog); } /** * Constructor for the JxtaServerSocket object. * * @param group JXTA PeerGroup * @param pipeAdv PipeAdvertisement on which pipe requests are accepted * @param backlog the maximum length of the queue. * @param timeout the specified timeout, in milliseconds * @throws IOException if an I/O error occurs */ public JxtaServerSocket(PeerGroup group, PipeAdvertisement pipeAdv, int backlog, int timeout) throws IOException { this(group, pipeAdv, backlog, timeout, null); } /** * Constructor for the JxtaServerSocket object. * * @param group JXTA PeerGroup * @param pipeAdv PipeAdvertisement on which pipe requests are accepted * @param backlog the maximum length of the queue. * @param timeout the specified timeout, in milliseconds * @param encrypt the data * @throws IOException if an I/O error occurs */ public JxtaServerSocket(PeerGroup group, PipeAdvertisement pipeAdv, int backlog, int timeout, boolean encrypt) throws IOException { this(group, pipeAdv, backlog, timeout, null, encrypt); } /** * Constructor for the JxtaServerSocket object. * * @param group JXTA PeerGroup * @param pipeAdv PipeAdvertisement on which pipe requests are accepted * @param backlog the maximum length of the queue. * @param timeout the specified timeout, in milliseconds * @param credValidator the CredentialValidator * @throws IOException if an I/O error occurs */ public JxtaServerSocket(PeerGroup group, PipeAdvertisement pipeAdv, int backlog, int timeout, CredentialValidator credValidator) throws IOException { setSoTimeout(timeout); this.credValidator = credValidator; bind(group, pipeAdv, backlog); } /** * Constructor for the JxtaServerSocket object. * * @param group JXTA PeerGroup * @param pipeAdv PipeAdvertisement on which pipe requests are accepted * @param backlog the maximum length of the queue. * @param timeout the specified timeout, in milliseconds * @param credValidator the CredentialValidator * @param encrypt the data * @throws IOException if an I/O error occurs */ public JxtaServerSocket(PeerGroup group, PipeAdvertisement pipeAdv, int backlog, int timeout, CredentialValidator credValidator, boolean encrypt) throws IOException { setSoTimeout(timeout); this.credValidator = credValidator; this.encrypt = encrypt; bind(group, pipeAdv, backlog); } /** * {@inheritDoc} * <p/> * Closes the JxtaServerPipe. */ @Override protected void finalize() throws Throwable { super.finalize(); if (!closed) Logging.logCheckedWarning(LOG, "JxtaServerSocket is being finalized without being previously closed. This is likely an application level bug.", creatorTrace); close(); } /** * The netPeerGroup needs to be set when resolving SocketAddresses with only the peerID supplied. * @param netPeerGroup */ public void setNetPeerGroup(PeerGroup netPeerGroup) { this.netPeerGroup = netPeerGroup; } private PeerGroup.GlobalRegistry getGlobalRegistry() throws IOException { if (netPeerGroup == null) { throw new IOException("Can not resolve the peerID in socket address, must setNetPeerGroup() on JXTAServerSocket"); } return netPeerGroup.getGlobalRegistry(); } /** * {@inheritDoc} */ @Override public Socket accept() throws IOException { if (!isBound()) throw new SocketException("Socket is not bound yet"); try { Logging.logCheckedFine(LOG, "Waiting for a connection"); while (true) { if (isClosed()) throw new SocketException("Socket is closed"); Message msg = queue.poll(timeout, TimeUnit.MILLISECONDS); if (isClosed()) throw new SocketException("Socket is closed"); if (msg == null) throw new SocketTimeoutException("Timeout reached"); if (QUEUE_END_MESSAGE == msg) throw new SocketException("Socket is closed."); JxtaSocket socket = processMessage(msg); // make sure we have a socket returning if (socket != null) { Logging.logCheckedFine(LOG, "New socket connection ", socket); return socket; } else { Logging.logCheckedWarning(LOG, "No connection."); } } } catch (InterruptedException ie) { SocketException interrupted = new SocketException("interrupted"); interrupted.initCause(ie); throw interrupted; } } /** * Binds the <code>JxtaServerSocket</code> to a specific pipe advertisement * * @param group JXTA PeerGroup * @param pipeAdv PipeAdvertisement on which pipe requests are accepted * @throws IOException if an I/O error occurs */ public void bind(PeerGroup group, PipeAdvertisement pipeAdv) throws IOException { bind(group, pipeAdv, DEFAULT_BACKLOG); } /** * Binds the <code>JxtaServerSocket</code> to a specific pipe advertisement * * @param group JXTA PeerGroup * @param pipeadv PipeAdvertisement on which pipe requests are accepted * @param backlog the maximum length of the queue. * @throws IOException if an I/O error occurs */ public void bind(PeerGroup group, PipeAdvertisement pipeadv, int backlog) throws IOException { if (PipeService.PropagateType.equals(pipeadv.getType())) { throw new IOException("Propagate pipe advertisements are not supported"); } if (backlog <= 0) { throw new IllegalArgumentException("backlog must be > 0"); } this.backlog = backlog; queue = new ArrayBlockingQueue<Message>(backlog); this.group = group; this.pipeAdv = pipeadv; PipeService pipeSvc = group.getPipeService(); serverPipe = pipeSvc.createInputPipe(pipeadv, this); setBound(true); } /** * {@inheritDoc} * <p/> * Used to bind a JxtaServerSocket created with the no-arg constructor. */ @Override public void bind(SocketAddress endpoint) throws IOException { bind(endpoint, backlog); } /** * {@inheritDoc} * <p/> * Used to bind a JxtaServerSocket created with the no-arg constructor. */ @Override public void bind(SocketAddress endpoint, int backlog) throws IOException { if (endpoint instanceof JxtaSocketAddress) { JxtaSocketAddress socketAddress = (JxtaSocketAddress) endpoint; PeerGroup pg = getGlobalRegistry().lookupInstance(socketAddress.getPeerGroupId()); if (pg == null) { throw new IOException( "Can't connect socket in PeerGroup with id " + socketAddress.getPeerGroupId() + ". No running instance of the group is registered."); } // bind(pg.getWeakInterface(), socketAddress.getPipeAdv(), backlog); bind(pg, socketAddress.getPipeAdv(), backlog); // pg.unref(); } else { throw new IllegalArgumentException("Unsupported subclass of SocketAddress; " + "use JxtaSocketAddress instead."); } } /** * {@inheritDoc} */ @Override public void close() throws IOException { if (closed) { return; } closed = true; creatorTrace = null; if (isBound()) { // close all the pipe serverPipe.close(); setBound(false); } queue.clear(); while (true) { try { queue.put(QUEUE_END_MESSAGE); // end queue message is now on the queue, we are done. break; } catch (InterruptedException woken) { // We MUST put the terminal message onto the queue before // finishing. We won't have a second chance. Thread.interrupted(); } } Logging.logCheckedInfo(LOG, "Closed : ", this); } /** * @return the server socket's JxtaSocketAddress * @see java.net.ServerSocket#getLocalSocketAddress() */ @Override public SocketAddress getLocalSocketAddress() { return new JxtaSocketAddress(getGroup(), getPipeAdv()); } /** * {@inheritDoc} */ @Override public int getSoTimeout() throws IOException { if (isClosed()) { throw new SocketException("Socket is closed"); } if (timeout > Integer.MAX_VALUE) { return 0; } else { return (int) timeout; } } /** * {@inheritDoc} */ @Override public void setSoTimeout(int timeout) throws SocketException { if (isClosed()) { throw new SocketException("Socket is closed"); } if (timeout < 0) { throw new IllegalArgumentException("timeout must be >= 0"); } if (0 == timeout) { this.timeout = Long.MAX_VALUE; } else { this.timeout = (long) timeout; } } /** * {@inheritDoc} */ @Override public boolean isBound() { return bound; } /** * {@inheritDoc} */ @Override public boolean isClosed() { return closed; } /** * Sets whether this socket is currently bound or not. A socket is * considered bound if the local resources required in order to interact * with a remote peer are allocated and open. * * @param boundState The new bound state. */ private synchronized void setBound(boolean boundState) { this.bound = boundState; } /** * Gets the encrypt flag status. If true, data will be encrypted between * the remote JxtaSocket and the JxtaSocket created by the JxtaServerSocket. * * @return encrypt */ public boolean isEncrypted() { return encrypt; } /** * Sets the encrypt flag status. If true, data will be encrypted between * the remote JxtaSocket and the JxtaSocket created by the JxtaServerSocket. * Can only be set before the JxtaServerSocket is bound * * @param encrypt */ public void setEncrypted(boolean encrypt) { if (!isBound()) this.encrypt = encrypt; } /** * Gets the group associated with this JxtaServerSocket object * * @return The group value */ public PeerGroup getGroup() { return group; } /** * Gets the PipeAdvertisement associated with this JxtaServerSocket object * * @return The pipeAdv value */ public PipeAdvertisement getPipeAdv() { return pipeAdv; } /** * {@inheritDoc} */ public void pipeMsgEvent(PipeMsgEvent event) { // deal with messages as they come in Message message = event.getMessage(); if (message == null) { return; } boolean pushed = false; try { pushed = queue.offer(message, timeout, TimeUnit.MILLISECONDS); } catch (InterruptedException woken) { Logging.logCheckedFine(LOG, "Interrupted\n", woken); } Logging.logCheckedWarning(LOG, "backlog queue full, connect request dropped"); } /** * processMessage is the main mechanism in establishing bi-directional connections * <p/> * It accepts connection messages and constructs a JxtaSocket with a ephemeral * InputPipe and a messenger. * * @param msg The client connection request (assumed not null) * @return JxtaSocket Which may be null if an error occurs. */ private JxtaSocket processMessage(Message msg) { PipeAdvertisement remoteEphemeralPipeAdv = null; PeerAdvertisement remotePeerAdv = null; Credential credential = null; Logging.logCheckedFine(LOG, "Processing a connection message : ", msg); try { MessageElement el = msg.getMessageElement(MSG_ELEMENT_NAMESPACE, reqPipeTag); if (el != null) { XMLDocument pipeAdvDoc = (XMLDocument) StructuredDocumentFactory.newStructuredDocument(el); remoteEphemeralPipeAdv = (PipeAdvertisement) AdvertisementFactory.newAdvertisement(pipeAdvDoc); } el = msg.getMessageElement(MSG_ELEMENT_NAMESPACE, remPeerTag); if (el != null) { XMLDocument peerAdvDoc = (XMLDocument) StructuredDocumentFactory.newStructuredDocument(el); remotePeerAdv = (PeerAdvertisement) AdvertisementFactory.newAdvertisement(peerAdvDoc); } el = msg.getMessageElement(MSG_ELEMENT_NAMESPACE, credTag); if (el != null) { try { XMLDocument credDoc = (XMLDocument) StructuredDocumentFactory.newStructuredDocument(el); credential = group.getMembershipService().makeCredential(credDoc); if (!checkCred(credential)) { Logging.logCheckedWarning(LOG, "Invalid credential"); return null; } } catch (Exception ignored) { // ignored } } boolean isReliable = false; el = msg.getMessageElement(MSG_ELEMENT_NAMESPACE, streamTag); if (el != null) { isReliable = Boolean.valueOf(el.toString()); } Set<EndpointAddress> verifiedAddressSet = (Set)msg.getMessageProperty(EndpointServiceImpl.VERIFIED_ADDRESS_SET); Set<X509Certificate> tempCertSet = (Set)msg.getMessageProperty(EndpointServiceImpl.MESSAGE_SIGNER_SET); if ((null != remoteEphemeralPipeAdv) && (null != remotePeerAdv)) { return createEphemeralSocket(group, pipeAdv, remoteEphemeralPipeAdv, remotePeerAdv, localCredential, credential, isReliable, verifiedAddressSet, tempCertSet); } else { Logging.logCheckedWarning(LOG, "Connection message did not contain valid connection information."); return null; } } catch (IOException e) { // deal with the error Logging.logCheckedWarning(LOG, "IOException occured\n", e); } catch (RuntimeException e) { // deal with the error Logging.logCheckedWarning(LOG, "Exception occured\n", e); } return null; } /** * Invokes the specified CredentialValidator to very a credential * @param cred the credential * @return <code>true</code> if valid, or if no validator is specified */ private boolean checkCred(Credential cred) { return credValidator == null || credValidator.checkCred(cred); } /** * Construct the emphemeral socket result from accept. This method exists * primarily so that sub-classes can substitute a different JxtaSocket * sub-class. * * @param group The peer group for the socket. * @param pipeAdv The public pipe advertisement. * @param remoteEphemeralPipeAdv The pipe advertisement of the remote peer's * ephemeral pipe. * @param remotePeerAdv The peer advertisement of the remote peer. * @param localCredential Our credential. * @param credential The credential of the remote peer. * @param isReliable if true, uses the reliability library in non-direct mode * @return The new JxtaSocket instance. * @throws IOException if an io error occurs */ protected JxtaSocket createEphemeralSocket(PeerGroup group, PipeAdvertisement pipeAdv, PipeAdvertisement remoteEphemeralPipeAdv, PeerAdvertisement remotePeerAdv, Credential localCredential, Credential credential, boolean isReliable) throws IOException { return new JxtaSocket(group, pipeAdv, remoteEphemeralPipeAdv, remotePeerAdv, localCredential, credential, isReliable); } /** * Construct the emphemeral socket result from accept. This method exists * primarily so that sub-classes can substitute a different JxtaSocket * sub-class. * * @param group The peer group for the socket. * @param pipeAdv The public pipe advertisement. * @param remoteEphemeralPipeAdv The pipe advertisement of the remote peer's * ephemeral pipe. * @param remotePeerAdv The peer advertisement of the remote peer. * @param localCredential Our credential. * @param credential The credential of the remote peer. * @param isReliable if true, uses the reliability library in non-direct mode * @param verifiedAddressSet The verified address set from the connect Message * @param verifiedAddressCertSet The verified cert set from the connect Message * @return The new JxtaSocket instance. * @throws IOException if an io error occurs */ protected JxtaSocket createEphemeralSocket(PeerGroup group, PipeAdvertisement pipeAdv, PipeAdvertisement remoteEphemeralPipeAdv, PeerAdvertisement remotePeerAdv, Credential localCredential, Credential credential, boolean isReliable, Set<EndpointAddress> verifiedAddressSet, Set<X509Certificate> verifiedAddressCertSet) throws IOException { return new JxtaSocket(group, pipeAdv, remoteEphemeralPipeAdv, remotePeerAdv, localCredential, credential, isReliable, verifiedAddressSet, verifiedAddressCertSet, encrypt); } /** * Sets the credential to be used by this socket connection. If no * credentials are set, the default group credential will be used. * * @param localCredential The credential to be used for connection responses * or <tt>null</tt> if the default credential is to be used. */ public void setCredential(Credential localCredential) { this.localCredential = localCredential; } /** * {@inheritDoc} * <p/> * This output is suitable for debugging but should not be parsed. All * of the information is available through other means. */ @Override public String toString() { StringBuilder result = new StringBuilder(); result.append(getClass().getName()); result.append('@'); result.append(System.identityHashCode(this)); result.append('['); result.append(pipeAdv.getPipeID()); result.append(']'); result.append(isClosed() ? " CLOSED :" : " OPEN :"); result.append(isBound() ? " BOUND " : " UNBOUND "); return result.toString(); } }
johnjianfang/jxse
src/main/java/net/jxta/socket/JxtaServerSocket.java
Java
apache-2.0
31,538
/* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.zetasketch.internal.hllplus; import static com.google.common.truth.Truth.assertThat; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Unit tests for {@link Data}. */ @RunWith(JUnit4.class) @SuppressWarnings("boxing") public class DataTest { /** Absolute difference to tolerate when comparing floating point numbers below. */ private static final double TOLERANCE = 0.0001; @Test public void alpha() { assertThat(Data.alpha(14)).isWithin(TOLERANCE).of(0.7213); } @Test public void estimateBias_WhenExactlyDefined() { // Defined exactly in bias tables. assertThat(Data.estimateBias(738.1256, 10)).isWithin(TOLERANCE).of(737.1256); assertThat(Data.estimateBias(14573.7784, 14)).isWithin(TOLERANCE).of(9248.7784); } @Test public void estimateBias_WhenInterpolationNeeded() { // Interpolated results with values computed by hand starting with the empirical data tables // from the HLL++ paper (https://goo.gl/pc916Z). For example, for the first value we get: // // means = [738.1256, 750.4234, 763.1064, 775.4732, 788.4636, 801.0644] // biases = [737.1256, 724.4234, 711.1064, 698.4732, 685.4636, 673.0644] // weights = [1.0/(estimate - mean)^2 for mean in means] // bias = sum(w*b for (w, b) in zip(weights, biases)) / sum(weights) // = 736.4957464911646 // // We test interpolations on the left, in the center and on the right side of the bias tables. assertThat(Data.estimateBias(1490, 11)).isWithin(TOLERANCE).of(1456.8144); assertThat(Data.estimateBias(16300, 14)).isWithin(TOLERANCE).of(8005.2257); assertThat(Data.estimateBias(653000, 17)).isWithin(TOLERANCE).of(-411.7805); } /** * Tests that the bias is zero when the bias table defines values for the given precision but the * estimate is off the left or right side of the defined values. */ @Test public void estimateBias_ReturnsZeroWhenMeanOutOfRange() { assertThat(Data.estimateBias(738, 10)).isWithin(TOLERANCE).of(0); assertThat(Data.estimateBias(1310000, 18)).isWithin(TOLERANCE).of(0); } @Test public void estimateBias_ReturnsZeroWhenPrecisionOutOfRange() { assertThat(Data.estimateBias(1000, Data.MINIMUM_PRECISION - 1)).isWithin(TOLERANCE).of(0); assertThat(Data.estimateBias(1000, Data.MAXIMUM_PRECISION + 1)).isWithin(TOLERANCE).of(0); } @Test public void linearCountingThreshold_WhenPreciselyDefined() { assertThat(Data.linearCountingThreshold(14)).isEqualTo(11500); } @Test public void linearCountingThreshold_WhenComputed() { assertThat(Data.linearCountingThreshold(19)).isEqualTo(1310720); } }
google/zetasketch
javatests/com/google/zetasketch/internal/hllplus/DataTest.java
Java
apache-2.0
3,307
package cn.edu.sjtu.se.dclab.server.service.impl; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.security.core.userdetails.UsernameNotFoundException; import cn.edu.sjtu.se.dclab.server.service.UserService; /** *2015年1月19日 下午9:44:00 *@author changyi yuan */ public class UserServiceImpl implements UserService { @Override public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException { return null; } }
wbear2/archetype-server
src/main/java/cn/edu/sjtu/se/dclab/server/service/impl/UserServiceImpl.java
Java
apache-2.0
500
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six from tempest import config from tempest.lib.common.utils import data_utils from tempest.lib import decorators from tempest.lib import exceptions as lib_exc from ironic_tempest_plugin.common import waiters from ironic_tempest_plugin.tests.api.admin import api_microversion_fixture from ironic_tempest_plugin.tests.api.admin import base CONF = config.CONF class TestNodes(base.BaseBaremetalTest): """Tests for baremetal nodes.""" def setUp(self): super(TestNodes, self).setUp() _, self.chassis = self.create_chassis() _, self.node = self.create_node(self.chassis['uuid']) def _associate_node_with_instance(self): self.client.set_node_power_state(self.node['uuid'], 'power off') waiters.wait_for_bm_node_status(self.client, self.node['uuid'], 'power_state', 'power off') instance_uuid = data_utils.rand_uuid() self.client.update_node(self.node['uuid'], instance_uuid=instance_uuid) self.addCleanup(self.client.update_node, uuid=self.node['uuid'], instance_uuid=None) return instance_uuid @decorators.idempotent_id('4e939eb2-8a69-4e84-8652-6fffcbc9db8f') def test_create_node(self): params = {'cpu_arch': 'x86_64', 'cpus': '12', 'local_gb': '10', 'memory_mb': '1024'} _, body = self.create_node(self.chassis['uuid'], **params) self._assertExpected(params, body['properties']) @decorators.idempotent_id('9ade60a4-505e-4259-9ec4-71352cbbaf47') def test_delete_node(self): _, node = self.create_node(self.chassis['uuid']) self.delete_node(node['uuid']) self.assertRaises(lib_exc.NotFound, self.client.show_node, node['uuid']) @decorators.idempotent_id('55451300-057c-4ecf-8255-ba42a83d3a03') def test_show_node(self): _, loaded_node = self.client.show_node(self.node['uuid']) self._assertExpected(self.node, loaded_node) @decorators.idempotent_id('4ca123c4-160d-4d8d-a3f7-15feda812263') def test_list_nodes(self): _, body = self.client.list_nodes() self.assertIn(self.node['uuid'], [i['uuid'] for i in body['nodes']]) @decorators.idempotent_id('85b1f6e0-57fd-424c-aeff-c3422920556f') def test_list_nodes_association(self): _, body = self.client.list_nodes(associated=True) self.assertNotIn(self.node['uuid'], [n['uuid'] for n in body['nodes']]) self._associate_node_with_instance() _, body = self.client.list_nodes(associated=True) self.assertIn(self.node['uuid'], [n['uuid'] for n in body['nodes']]) _, body = self.client.list_nodes(associated=False) self.assertNotIn(self.node['uuid'], [n['uuid'] for n in body['nodes']]) @decorators.idempotent_id('18c4ebd8-f83a-4df7-9653-9fb33a329730') def test_node_port_list(self): _, port = self.create_port(self.node['uuid'], data_utils.rand_mac_address()) _, body = self.client.list_node_ports(self.node['uuid']) self.assertIn(port['uuid'], [p['uuid'] for p in body['ports']]) @decorators.idempotent_id('72591acb-f215-49db-8395-710d14eb86ab') def test_node_port_list_no_ports(self): _, node = self.create_node(self.chassis['uuid']) _, body = self.client.list_node_ports(node['uuid']) self.assertEmpty(body['ports']) @decorators.idempotent_id('4fed270a-677a-4d19-be87-fd38ae490320') def test_update_node(self): props = {'cpu_arch': 'x86_64', 'cpus': '12', 'local_gb': '10', 'memory_mb': '128'} _, node = self.create_node(self.chassis['uuid'], **props) new_p = {'cpu_arch': 'x86', 'cpus': '1', 'local_gb': '10000', 'memory_mb': '12300'} _, body = self.client.update_node(node['uuid'], properties=new_p) _, node = self.client.show_node(node['uuid']) self._assertExpected(new_p, node['properties']) @decorators.idempotent_id('cbf1f515-5f4b-4e49-945c-86bcaccfeb1d') def test_validate_driver_interface(self): _, body = self.client.validate_driver_interface(self.node['uuid']) core_interfaces = ['power', 'deploy'] for interface in core_interfaces: self.assertIn(interface, body) @decorators.idempotent_id('5519371c-26a2-46e9-aa1a-f74226e9d71f') def test_set_node_boot_device(self): self.client.set_node_boot_device(self.node['uuid'], 'pxe') @decorators.idempotent_id('9ea73775-f578-40b9-bc34-efc639c4f21f') def test_get_node_boot_device(self): body = self.client.get_node_boot_device(self.node['uuid']) self.assertIn('boot_device', body) self.assertIn('persistent', body) self.assertIsInstance(body['boot_device'], six.string_types) self.assertIsInstance(body['persistent'], bool) @decorators.idempotent_id('3622bc6f-3589-4bc2-89f3-50419c66b133') def test_get_node_supported_boot_devices(self): body = self.client.get_node_supported_boot_devices(self.node['uuid']) self.assertIn('supported_boot_devices', body) self.assertIsInstance(body['supported_boot_devices'], list) @decorators.idempotent_id('f63b6288-1137-4426-8cfe-0d5b7eb87c06') def test_get_console(self): _, body = self.client.get_console(self.node['uuid']) con_info = ['console_enabled', 'console_info'] for key in con_info: self.assertIn(key, body) @decorators.idempotent_id('80504575-9b21-4670-92d1-143b948f9437') def test_set_console_mode(self): self.client.set_console_mode(self.node['uuid'], True) waiters.wait_for_bm_node_status(self.client, self.node['uuid'], 'console_enabled', True) @decorators.idempotent_id('b02a4f38-5e8b-44b2-aed2-a69a36ecfd69') def test_get_node_by_instance_uuid(self): instance_uuid = self._associate_node_with_instance() _, body = self.client.show_node_by_instance_uuid(instance_uuid) self.assertEqual(1, len(body['nodes'])) self.assertIn(self.node['uuid'], [n['uuid'] for n in body['nodes']]) class TestNodesVif(base.BaseBaremetalTest): min_microversion = '1.28' @classmethod def skip_checks(cls): super(TestNodesVif, cls).skip_checks() if not CONF.service_available.neutron: raise cls.skipException('Neutron is not enabled.') def setUp(self): super(TestNodesVif, self).setUp() _, self.chassis = self.create_chassis() _, self.node = self.create_node(self.chassis['uuid']) @decorators.idempotent_id('a3d319d0-cacb-4e55-a3dc-3fa8b74880f1') def test_vif_on_port(self): """Test attachment and detachment of VIFs on the node with port. Test steps: 1) Create chassis and node in setUp. 2) Create port for the node. 3) Attach VIF to the node. 4) Check VIF info in VIFs list and port internal_info. 5) Detach VIF from the node. 6) Check that no more VIF info in VIFs list and port internal_info. """ self.useFixture( api_microversion_fixture.APIMicroversionFixture('1.28')) _, self.port = self.create_port(self.node['uuid'], data_utils.rand_mac_address()) self.client.vif_attach(self.node['uuid'], 'test-vif') _, body = self.client.vif_list(self.node['uuid']) self.assertEqual({'vifs': [{'id': 'test-vif'}]}, body) _, port = self.client.show_port(self.port['uuid']) self.assertEqual('test-vif', port['internal_info']['tenant_vif_port_id']) self.client.vif_detach(self.node['uuid'], 'test-vif') _, body = self.client.vif_list(self.node['uuid']) self.assertEqual({'vifs': []}, body) _, port = self.client.show_port(self.port['uuid']) self.assertNotIn('tenant_vif_port_id', port['internal_info']) @decorators.idempotent_id('95279515-7d0a-4f5f-987f-93e36aae5585') def test_vif_on_portgroup(self): """Test attachment and detachment of VIFs on the node with port group. Test steps: 1) Create chassis and node in setUp. 2) Create port for the node. 3) Create port group for the node. 4) Plug port into port group. 5) Attach VIF to the node. 6) Check VIF info in VIFs list and port group internal_info, but not in port internal_info. 7) Detach VIF from the node. 8) Check that no VIF info in VIFs list and port group internal_info. """ self.useFixture( api_microversion_fixture.APIMicroversionFixture('1.28')) _, self.port = self.create_port(self.node['uuid'], data_utils.rand_mac_address()) _, self.portgroup = self.create_portgroup( self.node['uuid'], address=data_utils.rand_mac_address()) patch = [{'path': '/portgroup_uuid', 'op': 'add', 'value': self.portgroup['uuid']}] self.client.update_port(self.port['uuid'], patch) self.client.vif_attach(self.node['uuid'], 'test-vif') _, body = self.client.vif_list(self.node['uuid']) self.assertEqual({'vifs': [{'id': 'test-vif'}]}, body) _, port = self.client.show_port(self.port['uuid']) self.assertNotIn('tenant_vif_port_id', port['internal_info']) _, portgroup = self.client.show_portgroup(self.portgroup['uuid']) self.assertEqual('test-vif', portgroup['internal_info']['tenant_vif_port_id']) self.client.vif_detach(self.node['uuid'], 'test-vif') _, body = self.client.vif_list(self.node['uuid']) self.assertEqual({'vifs': []}, body) _, portgroup = self.client.show_portgroup(self.portgroup['uuid']) self.assertNotIn('tenant_vif_port_id', portgroup['internal_info']) @decorators.idempotent_id('a3d319d0-cacb-4e55-a3dc-3fa8b74880f2') def test_vif_already_set_on_extra(self): self.useFixture( api_microversion_fixture.APIMicroversionFixture('1.28')) _, self.port = self.create_port(self.node['uuid'], data_utils.rand_mac_address()) patch = [{'path': '/extra/vif_port_id', 'op': 'add', 'value': 'test-vif'}] self.client.update_port(self.port['uuid'], patch) _, body = self.client.vif_list(self.node['uuid']) self.assertEqual({'vifs': [{'id': 'test-vif'}]}, body) self.assertRaises(lib_exc.Conflict, self.client.vif_attach, self.node['uuid'], 'test-vif') self.client.vif_detach(self.node['uuid'], 'test-vif')
NaohiroTamura/ironic
ironic_tempest_plugin/tests/api/admin/test_nodes.py
Python
apache-2.0
11,622
import Container from "@extension-kid/core/dist/src/Container"; const mockRequest = jest.fn(); jest.mock("@dcos/http-service", () => ({ request: mockRequest, })); import { marbles, observe } from "rxjs-marbles/jest"; import { of } from "rxjs"; import { catchError, take, tap } from "rxjs/operators"; import gql from "graphql-tag"; import extensionFactory from "#SRC/js/data/ui-update"; import dataLayerContainerModuleFactory, { DataLayerType, } from "@extension-kid/data-layer"; import DataLayer from "@extension-kid/data-layer/dataLayer"; function createTestContainer() { const container = new Container(); container.load(dataLayerContainerModuleFactory()); const uiUpdateModule = extensionFactory(); if (uiUpdateModule) { container.load(uiUpdateModule); } else { throw new Error("Failed to get ui data-layer extension module"); } return container; } describe("UI-Update Service data-layer", () => { let container: Container | null = null; let dl: DataLayer | null = null; beforeEach(() => { jest.clearAllMocks(); container = createTestContainer(); dl = container.get<DataLayer>(DataLayerType); }); afterEach(() => { dl = null; container = null; }); describe("Query", () => { describe("ui", () => { it( "handles a graphql query", marbles((m) => { const reqResp$ = m.cold("--j|", { j: { response: { default: false, packageVersion: "2.50.1", buildVersion: "master+v2.50.1+hfges", }, code: 200, message: "OK", }, }); mockRequest.mockReturnValue(reqResp$); window.DCOS_UI_VERSION = "unit_test+v2.50.1"; const query = gql` query { ui { clientBuild packageVersion packageVersionIsDefault serverBuild } } `; if (dl === null) { throw new Error(); } const queryResult$ = dl.query(query, {}).pipe(take(1)); const expected$ = m.cold("--(j|)", { j: { data: { ui: { clientBuild: "v2.50.1", packageVersion: "2.50.1", packageVersionIsDefault: false, serverBuild: "v2.50.1", }, }, }, }); m.expect(queryResult$).toBeObservable(expected$); }) ); it( "doesn't call api just for clientBuild", observe(() => { const reqResp = { response: { default: false, packageVersion: "2.50.1", buildVersion: "master+v2.50.1+hfges", }, code: 200, message: "OK", }; mockRequest.mockReturnValue(of(reqResp)); window.DCOS_UI_VERSION = "unit_test+v1.0.0"; const query = gql` query { ui { clientBuild } } `; if (dl === null) { throw new Error(); } return dl.query(query, {}).pipe( take(1), tap((value) => { expect(mockRequest.mock.calls.length).toEqual(0); expect(value).toEqual({ data: { ui: { clientBuild: "v1.0.0", }, }, }); }) ); }) ); it( "makes a single version request", observe(() => { const reqResp = { response: { default: false, packageVersion: "2.50.1", buildVersion: "master+v2.50.1+hfges", }, code: 200, message: "OK", }; mockRequest.mockReturnValue(of(reqResp)); const query = gql` query { ui { packageVersion packageVersionIsDefault } } `; if (dl === null) { throw new Error(); } return dl.query(query, {}).pipe( take(1), tap(() => { expect(mockRequest.mock.calls.length).toEqual(1); }) ); }) ); it( "emits an error for non-2XX responses", marbles((m) => { const reqResp$ = m.cold("j|", { j: { response: "There is a problem", code: 500, message: "Internal Server Error", }, }); mockRequest.mockReturnValue(reqResp$); const query = gql` query { ui { packageVersion packageVersionIsDefault } } `; if (dl === null) { throw new Error(); } const queryResult$ = dl.query(query, {}).pipe(take(1)); m.expect(queryResult$).toBeObservable( m.cold("#", undefined, { message: "There is a problem", name: "Error", }) ); }) ); it( "tries 3 requests before failing", observe(() => { mockRequest.mockImplementation(() => of({ response: "There is a problem", code: 500, message: "Internal Server Error", }) ); const query = gql` query { ui { packageVersion } } `; if (dl === null) { throw new Error(); } return dl.query(query, {}).pipe( take(1), catchError(() => { expect(mockRequest.mock.calls.length).toEqual(3); return of({}); }) ); }) ); it( "can handle up to two errors", marbles((m) => { const responses = [ { response: "There is a problem", code: 500, message: "Internal Server Error", }, { response: "There is a problem", code: 500, message: "Internal Server Error", }, { response: { default: false, packageVersion: "2.50.1", buildVersion: "master+v2.50.1+hfges", }, code: 200, message: "OK", }, ]; mockRequest.mockImplementation(() => m.cold("--j|", { j: responses.shift(), }) ); const query = gql` query { ui { packageVersion packageVersionIsDefault } } `; if (dl === null) { throw new Error(); } const queryResult$ = dl.query(query, {}).pipe(take(1)); const expected$ = m.cold("------(j|)", { j: { data: { ui: { packageVersion: "2.50.1", packageVersionIsDefault: false, }, }, }, }); m.expect(queryResult$).toBeObservable(expected$); }) ); }); }); describe("Mutations", () => { describe("#updateDCOSUI", () => { it( "handles executing a mutation", marbles((m) => { const reqResp$ = m.cold("--j|", { j: { code: 200, message: "OK", response: "Update to 1.1.0 completed", }, }); mockRequest.mockReturnValueOnce(reqResp$); const updateMutation = gql` mutation { updateDCOSUI(newVersion: $version) } `; if (dl === null) { throw new Error(); } const mutationResult$ = dl .query(updateMutation, { version: "1.1.0", }) .pipe(take(1)); m.expect(mutationResult$).toBeObservable( m.cold("--(j|)", { j: { data: { updateDCOSUI: "Complete: Update to 1.1.0 completed", }, }, }) ); }) ); it( "emits an error if request fails", marbles((m) => { const reqResp$ = m.cold("--j|", { j: { code: 500, message: "Internal Server Error", response: "Failed", }, }); mockRequest.mockReturnValueOnce(reqResp$); const updateMutation = gql` mutation { updateDCOSUI(newVersion: $version) } `; if (dl === null) { throw new Error(); } const mutationResult$ = dl .query(updateMutation, { version: "1.1.0", }) .pipe(take(1)); m.expect(mutationResult$).toBeObservable( m.cold("--#", undefined, { message: "Failed", name: "Error", }) ); }) ); }); describe("#resetDCOSUI", () => { it( "handles executing a mutation", marbles((m) => { const reqResp$ = m.cold("--j|", { j: { code: 200, message: "OK", response: "OK", }, }); mockRequest.mockReturnValueOnce(reqResp$); const resetMutation = gql` mutation { resetDCOSUI } `; if (dl === null) { throw new Error(); } const mutationResult$ = dl.query(resetMutation, {}).pipe(take(1)); m.expect(mutationResult$).toBeObservable( m.cold("--(j|)", { j: { data: { resetDCOSUI: "Complete: OK", }, }, }) ); }) ); it( "emits an error if request fails", marbles((m) => { const reqResp$ = m.cold("--j|", { j: { code: 500, message: "Internal Server Error", response: "Failed", }, }); mockRequest.mockReturnValueOnce(reqResp$); const resetMutation = gql` mutation { resetDCOSUI } `; if (dl === null) { throw new Error(); } const mutationResult$ = dl.query(resetMutation, {}).pipe(take(1)); m.expect(mutationResult$).toBeObservable( m.cold("--#", undefined, { message: "Failed", name: "Error", }) ); }) ); }); }); });
dcos/dcos-ui
src/js/data/ui-update/__tests__/index-test.ts
TypeScript
apache-2.0
11,178
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.compiler.integrationtests.operators; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Stream; import org.assertj.core.api.Assertions; import org.drools.core.base.ClassObjectType; import org.drools.kiesession.rulebase.InternalKnowledgeBase; import org.drools.core.reteoo.EntryPointNode; import org.drools.core.reteoo.FromNode; import org.drools.core.reteoo.LeftInputAdapterNode; import org.drools.core.reteoo.LeftTupleSink; import org.drools.core.reteoo.ObjectTypeNode; import org.drools.core.reteoo.Sink; import org.drools.core.rule.EntryPointId; import org.drools.testcoverage.common.model.Address; import org.drools.testcoverage.common.model.Cheese; import org.drools.testcoverage.common.model.Cheesery; import org.drools.testcoverage.common.model.DomainObject; import org.drools.testcoverage.common.model.DomainObjectHolder; import org.drools.testcoverage.common.model.Order; import org.drools.testcoverage.common.model.OrderItem; import org.drools.testcoverage.common.model.Person; import org.drools.testcoverage.common.model.Pet; import org.drools.testcoverage.common.util.KieBaseTestConfiguration; import org.drools.testcoverage.common.util.KieBaseUtil; import org.drools.testcoverage.common.util.KieSessionTestConfiguration; import org.drools.testcoverage.common.util.KieUtil; import org.drools.testcoverage.common.util.TestParametersUtil; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.kie.api.KieBase; import org.kie.api.KieBaseConfiguration; import org.kie.api.KieServices; import org.kie.api.builder.KieBuilder; import org.kie.api.builder.KieModule; import org.kie.api.builder.ReleaseId; import org.kie.api.runtime.KieContainer; import org.kie.api.runtime.KieSession; import org.kie.api.runtime.rule.EntryPoint; import org.kie.api.runtime.rule.FactHandle; import org.kie.internal.builder.conf.LanguageLevelOption; import org.kie.internal.builder.conf.PropertySpecificOption; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; @RunWith(Parameterized.class) public class FromTest { private final KieBaseTestConfiguration kieBaseTestConfiguration; public FromTest(final KieBaseTestConfiguration kieBaseTestConfiguration) { this.kieBaseTestConfiguration = kieBaseTestConfiguration; } @Parameterized.Parameters(name = "KieBase type={0}") public static Collection<Object[]> getParameters() { return TestParametersUtil.getKieBaseCloudConfigurations(true); } public static class ListsContainer { public List<String> getList1() { return Arrays.asList( "a", "bb", "ccc" ); } public List<String> getList2() { return Arrays.asList( "1", "22", "333" ); } public Number getSingleValue() { return 1; } } @Test public void testFromSharing() { testFromSharingCommon(kieBaseTestConfiguration, new HashMap<>(), 2, 2); } public static void testFromSharingCommon(KieBaseTestConfiguration kieBaseTestConfiguration, Map<String, String> configurationProperties, int expectedNumberOfFromNode, int numberOfSinksInSecondFromNode) { // Keeping original test as non-property reactive by default, just allowed. final String drl = fromSharingRule(); final ReleaseId releaseId1 = KieServices.get().newReleaseId("org.kie", "from-test", "1"); configurationProperties.put(PropertySpecificOption.PROPERTY_NAME, PropertySpecificOption.ALLOWED.toString()); final KieModule kieModule = KieUtil.getKieModuleFromDrls(releaseId1, kieBaseTestConfiguration, KieSessionTestConfiguration.STATEFUL_REALTIME, configurationProperties, drl); final KieContainer kieContainer = KieServices.get().newKieContainer(kieModule.getReleaseId()); final KieBase kbase = kieContainer.getKieBase(); final KieSession ksession = kbase.newKieSession(); try { final ObjectTypeNode otn = insertObjectFireRules((InternalKnowledgeBase) kbase, ksession); // There is only 1 LIA assertEquals( 1, otn.getObjectSinkPropagator().size() ); final LeftInputAdapterNode lian = (LeftInputAdapterNode)otn.getObjectSinkPropagator().getSinks()[0]; // There are only 2 FromNodes since R2 and R3 with the sharing the second From // There will be 3 FromNodes without the sharing, that is with exec model with plain lambda and native image final LeftTupleSink[] sinks = lian.getSinkPropagator().getSinks(); assertEquals(expectedNumberOfFromNode, sinks.length ); // The first from has R1 has sink assertEquals( 1, sinks[0].getSinkPropagator().size() ); // The second from has both R2 and R3 as sinks when node sharing // When node sharing is disabled, it will only have one assertEquals(numberOfSinksInSecondFromNode, sinks[1].getSinkPropagator().size() ); } finally { ksession.dispose(); } } @Test public void testFromSharingWithPropertyReactive() { // As above but with property reactive as default final String drl = fromSharingRule(); // property reactive as default: final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("from-test", kieBaseTestConfiguration, drl); final KieSession ksession = kbase.newKieSession(); try { final ObjectTypeNode otn = insertObjectFireRules((InternalKnowledgeBase) kbase, ksession); // There are 2 LIAs, one for the list1 and the other for the list2 assertEquals( 2, otn.getObjectSinkPropagator().size() ); final LeftInputAdapterNode lia0 = (LeftInputAdapterNode)otn.getObjectSinkPropagator().getSinks()[0]; // There are only 2 FromNodes since R2 and R3 are sharing the second From // The first FROM node has R1 has sink final LeftTupleSink[] sinks0 = lia0.getSinkPropagator().getSinks(); assertEquals( 1, sinks0.length ); assertEquals( 1, sinks0[0].getSinkPropagator().size() ); // The second FROM node has both R2 and R3 as sinks final LeftInputAdapterNode lia1 = (LeftInputAdapterNode)otn.getObjectSinkPropagator().getSinks()[1]; final LeftTupleSink[] sinks1 = lia1.getSinkPropagator().getSinks(); assertEquals( 1, sinks1.length ); assertEquals( 2, sinks1[0].getSinkPropagator().size() ); } finally { ksession.dispose(); } } public static String fromSharingRule() { return "import " + ListsContainer.class.getCanonicalName() + "\n" + "global java.util.List output1;\n" + "global java.util.List output2;\n" + "rule R1 when\n" + " ListsContainer( $list : list1 )\n" + " $s : String( length == 2 ) from $list\n" + "then\n" + " output1.add($s);\n" + "end\n" + "rule R2 when\n" + " ListsContainer( $list : list2 )\n" + " $s : String( length == 2 ) from $list\n" + "then\n" + " output2.add($s);\n" + "end\n" + "rule R3 when\n" + " ListsContainer( $list : list2 )\n" + " $s : String( length == 2 ) from $list\n" + "then\n" + " output2.add($s);\n" + "end\n"; } private static ObjectTypeNode insertObjectFireRules(InternalKnowledgeBase kbase, KieSession ksession) { final List<String> output1 = new ArrayList<>(); ksession.setGlobal("output1", output1); final List<String> output2 = new ArrayList<>(); ksession.setGlobal("output2", output2); ksession.insert(new ListsContainer()); ksession.fireAllRules(); assertEquals("bb", output1.get(0)); assertEquals("22", output2.get(0)); assertEquals("22", output2.get(1)); final EntryPointNode epn = kbase.getRete().getEntryPointNodes().values().iterator().next(); return epn.getObjectTypeNodes().get(new ClassObjectType(ListsContainer.class)); } @Test public void testFromSharingWithAccumulate() { final String drl = "package org.drools.compiler.integrationtests.operators;\n" + "\n" + "import java.util.List;\n" + "import java.util.ArrayList;\n" + "import " + Cheesery.class.getCanonicalName() + " ;\n" + "import " + Cheese.class.getCanonicalName() + " ;\n" + "\n" + "global java.util.List output1;\n" + "global java.util.List output2;\n" + "\n" + "rule R1\n" + " when\n" + " $cheesery : Cheesery()\n" + " $list : List( ) from accumulate( $cheese : Cheese( ) from $cheesery.getCheeses(),\n" + " init( List l = new ArrayList(); ),\n" + " action( l.add( $cheese ); )\n" + " result( l ) )\n" + " then\n" + " output1.add( $list );\n" + "end\n" + "rule R2\n" + " when\n" + " $cheesery : Cheesery()\n" + " $list : List( ) from accumulate( $cheese : Cheese( ) from $cheesery.getCheeses(),\n" + " init( List l = new ArrayList(); ),\n" + " action( l.add( $cheese ); )\n" + " result( l ) )\n" + " then\n" + " output2.add( $list );\n" + "end\n"; final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("from-test", kieBaseTestConfiguration, drl); EntryPointNode epn = (( InternalKnowledgeBase ) kbase).getRete().getEntryPointNode( EntryPointId.DEFAULT ); ObjectTypeNode otn = epn.getObjectTypeNodes().get( new ClassObjectType( Cheesery.class) ); Sink[] otnSinks = otn.getSinks(); assertEquals( 1, otnSinks.length ); LeftInputAdapterNode lia = (LeftInputAdapterNode) otnSinks[0]; Sink[] liaSinks = lia.getSinks(); // there must be only 1 shared from node assertEquals( 1, Stream.of(liaSinks).filter( sink -> sink instanceof FromNode ).count() ); final KieSession ksession = kbase.newKieSession(); try { final List<?> output1 = new ArrayList<>(); ksession.setGlobal( "output1", output1 ); final List<?> output2 = new ArrayList<>(); ksession.setGlobal( "output2", output2 ); final Cheesery cheesery = new Cheesery(); cheesery.addCheese( new Cheese( "stilton", 8 ) ); cheesery.addCheese( new Cheese("provolone", 8 ) ); final FactHandle cheeseryHandle = ksession.insert(cheesery ); ksession.fireAllRules(); assertEquals( 1, output1.size() ); assertEquals( 2, ( (List) output1.get( 0 ) ).size() ); assertEquals( 1, output2.size() ); assertEquals( 2, ( (List) output2.get( 0 ) ).size() ); output1.clear(); output2.clear(); ksession.update( cheeseryHandle, cheesery ); ksession.fireAllRules(); assertEquals( 1, output1.size() ); assertEquals( 2, ( (List) output1.get( 0 ) ).size() ); assertEquals( 1, output2.size() ); assertEquals( 2, ( (List) output2.get( 0 ) ).size() ); } finally { ksession.dispose(); } } @Test public void testFromWithSingleValue() { // DROOLS-1243 final String drl = "import " + ListsContainer.class.getCanonicalName() + "\n" + "global java.util.List out;\n" + "rule R1 when\n" + " $list : ListsContainer( )\n" + " $s : Integer() from $list.singleValue\n" + "then\n" + " out.add($s);\n" + "end\n"; final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("from-test", kieBaseTestConfiguration, drl); final KieSession ksession = kbase.newKieSession(); try { final List<Integer> out = new ArrayList<>(); ksession.setGlobal( "out", out ); ksession.insert( new ListsContainer() ); ksession.fireAllRules(); assertEquals( 1, out.size() ); assertEquals( 1, (int)out.get(0) ); } finally { ksession.dispose(); } } @Test public void testFromWithSingleValueAndIncompatibleType() { // DROOLS-1243 final String drl = "import " + ListsContainer.class.getCanonicalName() + "\n" + "global java.util.List out;\n" + "rule R1 when\n" + " $list : ListsContainer( )\n" + " $s : String() from $list.singleValue\n" + "then\n" + " out.add($s);\n" + "end\n"; final KieBuilder kieBuilder = KieUtil.getKieBuilderFromDrls(kieBaseTestConfiguration, false, drl); Assertions.assertThat(kieBuilder.getResults().getMessages()).isNotEmpty(); } public static class Container2 { private final Number wrapped; public Container2(final Number wrapped) { this.wrapped = wrapped; } public Number getSingleValue() { return this.wrapped; } } @Test public void testFromWithInterfaceAndAbstractClass() { final String drl = "import " + Container2.class.getCanonicalName() + "\n" + "import " + Comparable.class.getCanonicalName() + "\n" + "global java.util.List out;\n" + "rule R1 when\n" + " $c2 : Container2( )\n" + " $s : Comparable() from $c2.singleValue\n" + "then\n" + " out.add($s);\n" + "end\n"; final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("from-test", kieBaseTestConfiguration, drl); final KieSession ksession = kbase.newKieSession(); try { final List<Integer> out = new ArrayList<>(); ksession.setGlobal( "out", out ); ksession.insert( new Container2(1) ); ksession.fireAllRules(); assertEquals( 1, out.size() ); assertEquals( 1, (int)out.get(0) ); out.clear(); ksession.insert( new Container2( new AtomicInteger(1) ) ); ksession.fireAllRules(); assertEquals( 0, out.size() ); } finally { ksession.dispose(); } } public static class Container2b { private final AtomicInteger wrapped; public Container2b(final AtomicInteger wrapped) { this.wrapped = wrapped; } public AtomicInteger getSingleValue() { return this.wrapped; } } public interface CustomIntegerMarker {} public static class CustomInteger extends AtomicInteger implements CustomIntegerMarker { public CustomInteger(final int initialValue) { super(initialValue); } } @Test public void testFromWithInterfaceAndConcreteClass() { final String drl = "import " + Container2b.class.getCanonicalName() + "\n" + "import " + CustomIntegerMarker.class.getCanonicalName() + "\n" + "global java.util.List out;\n" + "rule R1 when\n" + " $c2 : Container2b( )\n" + " $s : CustomIntegerMarker() from $c2.singleValue\n" + "then\n" + " out.add($s);\n" + "end\n"; final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("from-test", kieBaseTestConfiguration, drl); final KieSession ksession = kbase.newKieSession(); try { final List<AtomicInteger> out = new ArrayList<>(); ksession.setGlobal( "out", out ); ksession.insert( new Container2b( new CustomInteger(1) ) ); ksession.fireAllRules(); assertEquals( 1, out.size() ); assertEquals( 1, out.get(0).get() ); out.clear(); ksession.insert( new Container2b( new AtomicInteger(1) ) ); ksession.fireAllRules(); assertEquals( 0, out.size() ); } finally { ksession.dispose(); } } public static class Container3 { private final Integer wrapped; public Container3(final Integer wrapped) { this.wrapped = wrapped; } public Integer getSingleValue() { return this.wrapped; } } @Test public void testFromWithInterfaceAndFinalClass() { final String drl = "import " + Container3.class.getCanonicalName() + "\n" + "import " + CustomIntegerMarker.class.getCanonicalName() + "\n" + "global java.util.List out;\n" + "rule R1 when\n" + " $c3 : Container3( )\n" + " $s : CustomIntegerMarker() from $c3.singleValue\n" + "then\n" + " out.add($s);\n" + "end\n"; final KieBuilder kieBuilder = KieUtil.getKieBuilderFromDrls(kieBaseTestConfiguration, false, drl); Assertions.assertThat(kieBuilder.getResults().getMessages()).isNotEmpty(); } @Test public void testBasicFrom() { final String drl = "package org.drools.compiler.integrationtests.operators;\n" + "import " + Cheese.class.getCanonicalName() + ";\n" + "import " + Cheesery.class.getCanonicalName() + ";\n" + "import java.util.List;\n" + "\n" + "global List list1;\n" + "global List list2;\n" + "global List list3;\n" + "global Cheesery cheesery;\n" + "\n" + "rule \"test from using a global\"\n" + " when\n" + " $cheese : Cheese() from cheesery.getCheeses()\n" + " then\n" + " list1.add( $cheese );\n" + "end\n" + "\n" + "\n" + "rule \"test from using a declaration\"\n" + " when\n" + " $ch : Cheesery()\n" + " $cheese : Cheese() from $ch.getCheeses()\n" + " then\n" + " list2.add( $cheese );\n" + "end\n" + "\n" + "\n" + "rule \"test from with filter\"\n" + " when\n" + " $cheese : Cheese(type == \"stilton\" ) from cheesery.getCheeses()\n" + " then\n" + " list3.add( $cheese );\n" + "end"; final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("from-test", kieBaseTestConfiguration, drl); final KieSession ksession = kbase.newKieSession(); try { final List list1 = new ArrayList(); ksession.setGlobal("list1", list1); final List list2 = new ArrayList(); ksession.setGlobal("list2", list2); final List list3 = new ArrayList(); ksession.setGlobal("list3", list3); final Cheesery cheesery = new Cheesery(); final Cheese stilton = new Cheese("stilton", 12); final Cheese cheddar = new Cheese("cheddar", 15); cheesery.addCheese(stilton); cheesery.addCheese(cheddar); ksession.setGlobal("cheesery", cheesery); ksession.insert(cheesery); final Person p = new Person("stilton"); ksession.insert(p); ksession.fireAllRules(); ksession.fireAllRules(); // from using a global assertEquals(2, ((List) ksession.getGlobal("list1")).size()); assertEquals(cheddar, ((List) ksession.getGlobal("list1")).get(0)); assertEquals(stilton, ((List) ksession.getGlobal("list1")).get(1)); // from using a declaration assertEquals(2, ((List) ksession.getGlobal("list2")).size()); assertEquals(cheddar, ((List) ksession.getGlobal("list2")).get(0)); assertEquals(stilton, ((List) ksession.getGlobal("list2")).get(1)); // from using a declaration assertEquals(1, ((List) ksession.getGlobal("list3")).size()); assertEquals(stilton, ((List) ksession.getGlobal("list3")).get(0)); } finally { ksession.dispose(); } } public static class ToListFunction { public List toList(final Object object1, final Object object2, final String object3, final int integer, final Map map, final List inputList) { final List<Object> list = new ArrayList<>(); list.add(object1); list.add(object2); list.add(object3); list.add(integer); list.add(map); list.add(inputList); return list; } } @Test @Ignore public void testFromWithParams() { final String drl = "package org.drools.compiler.integrationtests.operators;\n" + " \n" + "import " + ToListFunction.class.getCanonicalName() + ";\n" + "import " + Person.class.getCanonicalName() + ";\n" + "\n" + "global ToListFunction testObject;\n" + "global java.util.List list;\n" + "global java.lang.Object globalObject;\n" + "\n" + "rule \"test from\"\n" + " when\n" + " $person : Person()\n" + " $object : Object() from testObject.toList(globalObject, $person, \"literal\", 42, [ $person : globalObject, \"key1\" : [ \"key2\" : \"value2\"]], [$person, 42, [\"x\", \"y\"]])\n" + " then\n" + " list.add( $object );\n" + "end"; final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("from-test", kieBaseTestConfiguration, drl); final KieSession ksession = kbase.newKieSession(); try { final List list = new ArrayList(); final Object globalObject = new Object(); ksession.setGlobal("list", list); ksession.setGlobal("testObject", new ToListFunction()); ksession.setGlobal("globalObject", globalObject); final Person bob = new Person("bob"); ksession.insert(bob); ksession.fireAllRules(); assertEquals(6, ((List) ksession.getGlobal("list")).size()); final List array = (List) ((List) ksession.getGlobal("list")).get(0); assertEquals(3, array.size()); final Person p = (Person) array.get(0); assertEquals(p, bob); assertEquals(42, array.get(1)); final List nested = (List) array.get(2); assertEquals("x", nested.get(0)); assertEquals("y", nested.get(1)); final Map map = (Map) ((List) ksession.getGlobal("list")).get(1); assertEquals(2, map.keySet().size()); assertTrue(map.keySet().contains(bob)); assertEquals(globalObject, map.get(bob)); assertTrue(map.keySet().contains("key1")); final Map nestedMap = (Map) map.get("key1"); assertEquals(1, nestedMap.keySet().size()); assertTrue(nestedMap.keySet().contains("key2")); assertEquals("value2", nestedMap.get("key2")); assertEquals(42, ((List) ksession.getGlobal("list")).get(2)); assertEquals("literal", ((List) ksession.getGlobal("list")).get(3)); assertEquals(bob, ((List) ksession.getGlobal("list")).get(4)); assertEquals(globalObject, ((List) ksession.getGlobal("list")).get(5)); } finally { ksession.dispose(); } } public static class Results { public int getResultsCount() { return 1; } } public static class Storage { public Results search(Query query) { return new Results(); } } public static class Query { public Query(String pattern, String column) { } } @Test public void testFromWithNewConstructor() { final String drl = "package org.drools.compiler.integrationtests.operators\n" + "\n" + "import " + Query.class.getCanonicalName() + ";\n" + "import " + Storage.class.getCanonicalName() + ";\n" + "import " + Results.class.getCanonicalName() + ";\n" + "\n" + "rule \"Verify_1\"\n" + " when\n" + " content : Storage()\n" + " results : Results( ) from content.search(new Query(\"test\",\"field\"))\n" + " then\n" + " System.out.println( results );\n" + "end"; final KieModule kieModule = KieUtil.getKieModuleFromDrls("from-test", kieBaseTestConfiguration, drl); final KieContainer kieContainer = KieServices.get().newKieContainer(kieModule.getReleaseId()); final KieBaseConfiguration kieBaseConfiguration = kieBaseTestConfiguration.getKieBaseConfiguration(); kieBaseConfiguration.setProperty(LanguageLevelOption.PROPERTY_NAME, "DRL5"); kieContainer.newKieBase(kieBaseConfiguration); } /** * JBRULES-1415 Certain uses of from causes NullPointerException in WorkingMemoryLogger */ @Test public void testFromDeclarationWithWorkingMemoryLogger() { final String drl = "package org.drools.compiler.integrationtests.operators;\n" + "import " + Cheesery.class.getCanonicalName() + ";\n" + "import " + Cheese.class.getCanonicalName() + ";\n" + "global java.util.List list\n" + "rule \"Test Rule\"\n" + "when\n" + " $cheesery : Cheesery()\n" + " Cheese( $type : type) from $cheesery.cheeses\n" + "then\n" + " list.add( $type );\n" + "end"; final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("from-test", kieBaseTestConfiguration, drl); final KieSession session = kbase.newKieSession(); try { final List list = new ArrayList(); session.setGlobal("list", list); final Cheesery cheesery = new Cheesery(); cheesery.addCheese(new Cheese("stilton", 22)); session.insert(cheesery); session.fireAllRules(); assertEquals(1, ((List) session.getGlobal("list")).size()); assertEquals("stilton", ((List) session.getGlobal("list")).get(0)); } finally { session.dispose(); } } @Test public void testFromArrayIteration() { final String drl = "package org.drools.compiler.integrationtests.operators;\n" + "import " + DomainObject.class.getCanonicalName() + ";\n" + "import " + DomainObjectHolder.class.getCanonicalName() + ";\n" + "global java.util.List list\n" + "\n" + "rule \"Test Rule\"\n" + "when\n" + " $holder : DomainObjectHolder()\n" + " $object : DomainObject( $message : message) from $holder.objects;\n" + "then\n" + " list.add( $message );\n" + "end"; final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("from-test", kieBaseTestConfiguration, drl); final KieSession session = kbase.newKieSession(); try { final List list = new ArrayList(); session.setGlobal("list", list); session.insert(new DomainObjectHolder()); session.fireAllRules(); assertEquals(3, list.size()); assertEquals("Message3", list.get(0)); assertEquals("Message2", list.get(1)); assertEquals("Message1", list.get(2)); } finally { session.dispose(); } } @Test public void testFromExprFollowedByNot() { final String drl = "package org.drools.compiler.integrationtests.operators;\n" + "import " + Person.class.getCanonicalName() + ";\n" + "import " + Pet.class.getCanonicalName() + ";\n" + "global java.util.List list;\n" + "rule \"Rule 1\"\n" + " when\n" + " p : Person ($var: pet )\n" + " Pet () from $var\n" + " not Pet ()\n" + " then\n" + " list.add( p );\n" + "end\n"; final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("from-test", kieBaseTestConfiguration, drl); final KieSession ksession = kbase.newKieSession(); try { final List list = new ArrayList(); ksession.setGlobal("list", list); final Person p = new Person(); p.setPet(new Pet(Pet.PetType.PARROT)); ksession.insert(p); ksession.fireAllRules(); assertEquals(1, list.size()); assertSame(p, list.get(0)); } finally { ksession.dispose(); } } @Test public void testFromNestedAccessors() { final String drl = "package org.drools.compiler.integrationtests.operators;\n" + "import " + Order.class.getCanonicalName() + ";\n" + "import " + OrderItem.class.getCanonicalName() + ";\n" + "global java.util.List results;\n" + "rule \"test from nested accessors\"\n" + "when\n" + " $oi : OrderItem( seq == 1 )\n" + " $os : Order.OrderStatus() from $oi.order.status\n" + "then\n" + " results.add( $os );\n" + "end"; final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("from-test", kieBaseTestConfiguration, drl); final KieSession ksession = kbase.newKieSession(); try { final List list = new ArrayList(); ksession.setGlobal("results", list); final Order order1 = new Order(11, "Bob"); final OrderItem item11 = new OrderItem(order1, 1); final OrderItem item12 = new OrderItem(order1, 2); order1.addItem(item11); order1.addItem(item12); ksession.insert(order1); ksession.insert(item11); ksession.insert(item12); ksession.fireAllRules(); assertEquals(1, list.size()); assertSame(order1.getStatus(), list.get(0)); } finally { ksession.dispose(); } } @Test public void testFromNodeWithMultipleBetas() { final String drl = "import " + Person.class.getCanonicalName() + ";\n" + "import " + Cheese.class.getCanonicalName() + ";\n" + "import " + Address.class.getCanonicalName() + ";\n" + "rule R1 when\n" + " $p : Person( $name : name, $addresses : addresses )\n" + " $c : Cheese( $type: type == $name )\n" + " $a : Address( street == $type, city == $name ) from $addresses\n" + "then\n" + "end\n"; final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("from-test", kieBaseTestConfiguration, drl); final KieSession ksession = kbase.newKieSession(); try { final Person p = new Person("x"); p.addAddress(new Address("x", 1, "x")); p.addAddress(new Address("y", 2, "y")); ksession.insert(p); ksession.insert(new Cheese("x")); ksession.fireAllRules(); } finally { ksession.dispose(); } } @Test public void testFromWithStrictModeOn() { // JBRULES-3533 final String drl = "import java.util.Map;\n" + "dialect \"mvel\"\n" + "rule \"LowerCaseFrom\"\n" + "when\n" + " Map($valOne : this['keyOne'] !=null)\n" + " $lowerValue : String() from $valOne.toLowerCase()\n" + "then\n" + " System.out.println( $valOne.toLowerCase() );\n" + "end\n"; final KieBuilder kieBuilder = KieUtil.getKieBuilderFromDrls(kieBaseTestConfiguration, false, drl); Assertions.assertThat(kieBuilder.getResults().getMessages()).isNotEmpty(); } @Test public void testJavaImplicitWithFrom() { testDialectWithFrom("java"); } @Test public void testMVELImplicitWithFrom() { testDialectWithFrom("mvel"); } private void testDialectWithFrom(final String dialect) { final String drl = "" + "package org.drools.compiler.test \n" + "import java.util.List \n" + "global java.util.List list \n" + "global java.util.List list2 \n" + "rule \"show\" dialect \"" + dialect + "\" \n" + "when \n" + " $m : List( eval( size == 0 ) ) from [list] \n" + "then \n" + " list2.add('r1'); \n" + "end \n"; final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("from-test", kieBaseTestConfiguration, drl); final KieSession ksession = kbase.newKieSession(); try { final List list = new ArrayList(); ksession.setGlobal("list", list); ksession.setGlobal("list2", list); ksession.fireAllRules(); assertEquals("r1", list.get(0)); } finally { ksession.dispose(); } } @Test public void testMultipleFroms() { final String drl = "package org.drools.compiler.integrationtests.operators;\n" + "import java.util.List;\n" + "import " + Cheesery.class.getCanonicalName() + ";\n" + "import " + Cheese.class.getCanonicalName() + ";\n" + "global Cheesery cheesery;\n" + "global java.util.List results;\n" + "\n" + "rule MyRule\n" + " dialect \"java\"\n" + "when\n" + " $i : List() from collect(Cheese() from cheesery.getCheeses())\n" + " $k : List() from collect(Cheese() from cheesery.getCheeses())\n" + "then\n" + " results.add( $i );\n" + " results.add( $k );\n" + "end"; final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("from-test", kieBaseTestConfiguration, drl); final KieSession ksession = kbase.newKieSession(); try { final List results = new ArrayList(); ksession.setGlobal("results", results); final Cheesery cheesery = new Cheesery(); cheesery.addCheese(new Cheese("stilton", 15)); cheesery.addCheese(new Cheese("brie", 10)); ksession.setGlobal("cheesery", cheesery); ksession.fireAllRules(); assertEquals(2, results.size()); assertEquals(2, ((List) results.get(0)).size()); assertEquals(2, ((List) results.get(1)).size()); } finally { ksession.dispose(); } } @Test public void testNetworkBuildErrorAcrossEntryPointsAndFroms() { final String drl = "package org.drools.compiler.integrationtests.operators;\n" + "import " + Person.class.getCanonicalName() + ";\n" + "import " + Cheese.class.getCanonicalName() + ";\n" + "global java.util.List list\n" + "rule rule1\n" + "when\n" + " Cheese() from entry-point \"testep\"\n" + " $p : Person() from list\n" + "then \n" + " list.add( \"rule1\" ) ;\n" + " insert( $p );\n" + "end\n" + "rule rule2\n" + "when\n" + " $p : Person() \n" + "then \n" + " list.add( \"rule2\" ) ;\n" + "end\n"; final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("from-test", kieBaseTestConfiguration, drl); final KieSession ksession = kbase.newKieSession(); try { final EntryPoint ep = ksession.getEntryPoint("testep"); final List list = new ArrayList(); ksession.setGlobal("list", list); list.add(new Person("darth")); ep.insert(new Cheese("cheddar")); ksession.fireAllRules(); assertEquals(3, list.size()); } finally { ksession.dispose(); } } @Test public void testUpdateFromCollect() { // DROOLS-6504 final String drl = "import " + List.class.getCanonicalName() + ";\n" + "import " + ClassWithValues.class.getCanonicalName() + ";\n" + "rule R when\n" + " $cwvs: List(size > 0) from collect (ClassWithValues(values.size == 0))\n" + " $values: List(size > 0) from collect (String())\n" + " then\n" + " \n" + " for (ClassWithValues cwv: (List<ClassWithValues>)$cwvs) {\n" + " cwv.add(\"not in memory\");\n" + " ((List<String>)$values).forEach(cwv::add);\n" + " update(cwv);\n" + " }\n" + "end\n"; final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("from-test", kieBaseTestConfiguration, drl); final KieSession ksession = kbase.newKieSession(); ksession.insert(new ClassWithValues()); ksession.insert("test"); assertEquals(1, ksession.fireAllRules()); } public static class ClassWithValues { private List<String> values = new ArrayList<>(); public void add(String value) { this.values.add(value); } public List<String> getValues() { return this.values; } } }
manstis/drools
drools-test-coverage/test-compiler-integration/src/test/java/org/drools/compiler/integrationtests/operators/FromTest.java
Java
apache-2.0
44,341
/* ======================================================================== * PlantUML : a free UML diagram generator * ======================================================================== * * Project Info: http://plantuml.com * * This file is part of Smetana. * Smetana is a partial translation of Graphviz/Dot sources from C to Java. * * (C) Copyright 2009-2017, Arnaud Roques * * This translation is distributed under the same Licence as the original C program: * ************************************************************************* * Copyright (c) 2011 AT&T Intellectual Property * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: See CVS logs. Details at http://www.graphviz.org/ ************************************************************************* * * THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC * LICENSE ("AGREEMENT"). [Eclipse Public License - v 1.0] * * ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES * RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. * * You may obtain a copy of the License at * * http://www.eclipse.org/legal/epl-v10.html * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package gen.lib.common; import static smetana.core.Macro.UNSUPPORTED; public class intset__c { //1 9k44uhd5foylaeoekf3llonjq // extern Dtmethod_t* Dtset //1 1ahfywsmzcpcig2oxm7pt9ihj // extern Dtmethod_t* Dtbag //1 anhghfj3k7dmkudy2n7rvt31v // extern Dtmethod_t* Dtoset //1 5l6oj1ux946zjwvir94ykejbc // extern Dtmethod_t* Dtobag //1 2wtf222ak6cui8cfjnw6w377z // extern Dtmethod_t* Dtlist //1 d1s1s6ibtcsmst88e3057u9r7 // extern Dtmethod_t* Dtstack //1 axa7mflo824p6fspjn1rdk0mt // extern Dtmethod_t* Dtqueue //1 ega812utobm4xx9oa9w9ayij6 // extern Dtmethod_t* Dtdeque //1 cyfr996ur43045jv1tjbelzmj // extern Dtmethod_t* Dtorder //1 wlofoiftbjgrrabzb2brkycg // extern Dtmethod_t* Dttree //1 12bds94t7voj7ulwpcvgf6agr // extern Dtmethod_t* Dthash //1 9lqknzty480cy7zsubmabkk8h // extern Dtmethod_t _Dttree //1 bvn6zkbcp8vjdhkccqo1xrkrb // extern Dtmethod_t _Dthash //1 9lidhtd6nsmmv3e7vjv9e10gw // extern Dtmethod_t _Dtlist //1 34ujfamjxo7xn89u90oh2k6f8 // extern Dtmethod_t _Dtqueue //1 3jy4aceckzkdv950h89p4wjc8 // extern Dtmethod_t _Dtstack //3 8qgr88nich230f7xchwdzz29a // static void* mkIntItem(Dt_t* d,intitem* obj,Dtdisc_t* disc) public static Object mkIntItem(Object... arg) { UNSUPPORTED("8fbx43f8g4rod2yqfdymsxdnx"); // static void* UNSUPPORTED("a10lzkc4r102m2qlk5imsvovv"); // mkIntItem(Dt_t* d,intitem* obj,Dtdisc_t* disc) UNSUPPORTED("yo7buicdiu29rv5vxhas0v3s"); // { UNSUPPORTED("ekjuvztgs19rbqj0v3lmfo01q"); // intitem* np = (intitem*)zmalloc(sizeof(intitem)); UNSUPPORTED("506xq20ierdh2vdh0oxwa7m8v"); // np->id = obj->id; UNSUPPORTED("184diuw6jvuoeak7fkbqm6fr6"); // return (void*)np; UNSUPPORTED("c24nfmv9i7o5eoqaymbibp7m7"); // } throw new UnsupportedOperationException(); } //3 bh6e5ln10dj2wbs1o7bpt00i8 // static void freeIntItem(Dt_t* d,intitem* obj,Dtdisc_t* disc) public static Object freeIntItem(Object... arg) { UNSUPPORTED("e2z2o5ybnr5tgpkt8ty7hwan1"); // static void UNSUPPORTED("criv4pqv4650sbunujgt3rbya"); // freeIntItem(Dt_t* d,intitem* obj,Dtdisc_t* disc) UNSUPPORTED("erg9i1970wdri39osu8hx2a6e"); // { UNSUPPORTED("4neq75mnpa0cym29pxiizrkz3"); // free (obj); UNSUPPORTED("c24nfmv9i7o5eoqaymbibp7m7"); // } throw new UnsupportedOperationException(); } //3 47m5dtm8t3vpv956ejy5w6th9 // static int cmpid(Dt_t* d, int* key1, int* key2, Dtdisc_t* disc) public static Object cmpid(Object... arg) { UNSUPPORTED("eyp5xkiyummcoc88ul2b6tkeg"); // static int UNSUPPORTED("1si01iycmvt6w5p4npvowvekx"); // cmpid(Dt_t* d, int* key1, int* key2, Dtdisc_t* disc) UNSUPPORTED("erg9i1970wdri39osu8hx2a6e"); // { UNSUPPORTED("1cu94gjng90rrt7xtp42ifr1"); // if (*key1 > *key2) return 1; UNSUPPORTED("3h1tdtwyfqaqhne2o7pz8yq9h"); // else if (*key1 < *key2) return -1; UNSUPPORTED("7lrkjjj5lce2uf86c1y9o9yoa"); // else return 0; UNSUPPORTED("c24nfmv9i7o5eoqaymbibp7m7"); // } throw new UnsupportedOperationException(); } //1 8lrju563ta1xqsy4xixotjo3l // static Dtdisc_t intSetDisc = //3 2xsz5eza9h7l039872rv37hsv // Dt_t* openIntSet (void) public static Object openIntSet(Object... arg) { UNSUPPORTED("96gezykql110n8xkno0gtdrmq"); // Dt_t* UNSUPPORTED("68yvszwdppo58pmasvi3gqm0a"); // openIntSet (void) UNSUPPORTED("erg9i1970wdri39osu8hx2a6e"); // { UNSUPPORTED("bjd7jaxyhis3c4zl58948rs5w"); // return dtopen(&intSetDisc,Dtoset); UNSUPPORTED("c24nfmv9i7o5eoqaymbibp7m7"); // } throw new UnsupportedOperationException(); } //3 djjwoeky7tlyxdg2zx3x4pdib // void addIntSet (Dt_t* is, int v) public static Object addIntSet(Object... arg) { UNSUPPORTED("347dderd02mvlozoheqo4ejwo"); // void UNSUPPORTED("uq3ewaypmpqc0nyc6mp0osz6"); // addIntSet (Dt_t* is, int v) UNSUPPORTED("erg9i1970wdri39osu8hx2a6e"); // { UNSUPPORTED("2rjvy9cij6zo2nowb4w37hnjt"); // intitem obj; UNSUPPORTED("5ne3fj3dr6iouu1y3bnx3lcd7"); // obj.id = v; UNSUPPORTED("2ugf3ujkfn9ofv5ykvcsvr6m5"); // (*(((Dt_t*)(is))->searchf))((is),(void*)(&obj),0000001); UNSUPPORTED("c24nfmv9i7o5eoqaymbibp7m7"); // } throw new UnsupportedOperationException(); } //3 7w5km9pct0qd0skzhgxezhr2x // int inIntSet (Dt_t* is, int v) public static Object inIntSet(Object... arg) { UNSUPPORTED("7zkpme13g8rxxwloxvpvvnbcw"); // int UNSUPPORTED("cg6pm0yggmopx83usksgtt395"); // inIntSet (Dt_t* is, int v) UNSUPPORTED("erg9i1970wdri39osu8hx2a6e"); // { UNSUPPORTED("eckrym421t3ug9lj77hzalv1a"); // return ((*(((Dt_t*)(is))->searchf))((is),(void*)(&v),0001000) != 0); UNSUPPORTED("c24nfmv9i7o5eoqaymbibp7m7"); // } throw new UnsupportedOperationException(); } }
Banno/sbt-plantuml-plugin
src/main/java/gen/lib/common/intset__c.java
Java
apache-2.0
6,223
import Tkinter as tk import Tkinter as tk2 import code as c def g1(): g1 = tk.Tk() A = tk.Button(g1, text ="About", command = c.b1) B = tk.Button(g1, text ="Date", command = c.b2) C = tk.Button(g1, text ="Time", command = c.b3) D = tk.Button(g1, text ="CountSeconds", command = c.b4) A.pack() B.pack() C.pack() D.pack() g1.mainloop() def g2(): g2 = tk2.Tk() A = tk2.Button(g2, text ="OSInfo", command = c.b5) B = tk2.Button(g2, text ="Ghost Game", command = c.b6) A.pack() B.pack() g2.mainloop()
Prouser123/python-tk
py/gui.py
Python
apache-2.0
514
/** Copyright 2008 University of Rochester Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package edu.ur.ir.web.action.institution; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import com.opensymphony.xwork2.ActionSupport; import com.opensymphony.xwork2.Preparable; import edu.ur.ir.file.IrFile; import edu.ur.ir.institution.InstitutionalCollection; import edu.ur.ir.institution.InstitutionalCollectionService; import edu.ur.ir.news.NewsItem; import edu.ur.ir.repository.RepositoryService; /** * Delete the institutional collection picture. * * @author Nathan Sarr * */ public class DeleteInstitutionalCollectionPicture extends ActionSupport implements Preparable { /** Eclipse generated id */ private static final long serialVersionUID = 3980266197680971615L; /** Logger. */ private static final Logger log = LogManager.getLogger(DeleteInstitutionalCollectionPicture.class); /** Repository service */ private RepositoryService repositoryService; /** Institutional Collection service */ private InstitutionalCollectionService institutionalCollectionService; /** Institutional collection */ private InstitutionalCollection collection; /** News item to remove the picture from*/ private NewsItem newsItem; /** Id for the collection to load */ private Long collectionId; /** determine if the primary picture should be removed */ private boolean primaryCollectionPicture; /** picture to remove*/ private Long pictureId; /** * Load the news service. * * @see com.opensymphony.xwork2.Preparable#prepare() */ public void prepare() throws Exception { collection = institutionalCollectionService.getCollection(collectionId, false); } /** * Execute the delete. * * @see com.opensymphony.xwork2.ActionSupport#execute() */ public String execute() { if( log.isDebugEnabled()) { log.debug("execute delete"); } if( primaryCollectionPicture) { if( log.isDebugEnabled()) { log.debug("delete primary picture"); } IrFile primaryPicture = collection.getPrimaryPicture(); collection.setPrimaryPicture(null); repositoryService.deleteIrFile(primaryPicture); } else { if( log.isDebugEnabled()) { log.debug("delete regular picture"); } IrFile picture = repositoryService.getIrFile(pictureId, false); if(collection.removePicture(picture) ) { repositoryService.deleteIrFile(picture); } } institutionalCollectionService.saveCollection(collection); return SUCCESS; } /** * Get the news item to delete the picture from. * * @return */ public NewsItem getNewsItem() { return newsItem; } /** * Set the news item to delete the picture from. * * @param newsItem */ public void setNewsItem(NewsItem newsItem) { this.newsItem = newsItem; } /** * Set to true if the picture to be deleted is the * primary picture. * * @return */ public boolean isPrimaryCollectionPicture() { return primaryCollectionPicture; } /** * Set to true if the picture to be deleted is the primary picture. * * @param primaryNewsPicture */ public void setPrimaryCollectionPicture(boolean primaryCollectionPicture) { this.primaryCollectionPicture = primaryCollectionPicture; } /** * Get the picture id to be deleted. * * @return */ public Long getPictureId() { return pictureId; } /** * Set the picture id to be deleted. * * @param pictureId */ public void setPictureId(Long pictureId) { this.pictureId = pictureId; } public RepositoryService getRepositoryService() { return repositoryService; } public void setRepositoryService(RepositoryService repositoryService) { this.repositoryService = repositoryService; } public InstitutionalCollection getCollection() { return collection; } public Long getCollectionId() { return collectionId; } public void setCollectionId(Long collectionId) { this.collectionId = collectionId; } public InstitutionalCollectionService getInstitutionalCollectionService() { return institutionalCollectionService; } public void setInstitutionalCollectionService( InstitutionalCollectionService institutionalCollectionService) { this.institutionalCollectionService = institutionalCollectionService; } }
nate-rcl/irplus
ir_web/src/edu/ur/ir/web/action/institution/DeleteInstitutionalCollectionPicture.java
Java
apache-2.0
5,025
package com.kryptnostic.rhizome.pods.hazelcast; import com.geekbeast.configuration.hazelcast.DurableExecutorConfiguration; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.hazelcast.client.config.ClientConfig; import com.hazelcast.client.config.ClientNetworkConfig; import com.hazelcast.config.*; import com.kryptnostic.rhizome.configuration.RhizomeConfiguration; import com.kryptnostic.rhizome.configuration.hazelcast.HazelcastConfiguration; import com.kryptnostic.rhizome.configuration.hazelcast.HazelcastConfigurationContainer; import com.kryptnostic.rhizome.configuration.hazelcast.ScheduledExecutorConfiguration; import com.kryptnostic.rhizome.pods.ConfigurationPod; import com.kryptnostic.rhizome.pods.HazelcastPod; import java.util.function.Function; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import javax.inject.Inject; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Optional; /** * This pod provides a basic hazelcast configuration without stream serializers, map stores, or queue stores. If * auto-registering of said object is desired use RegistryBasedHazelcastInstanceConfigurationPod. * * @author Drew Bailey &lt;drew@kryptnostic.com&gt; * @author Matthew Tamayo-Rios &lt;matthew@kryptnostic.com&gt; */ @Configuration @Import( { HazelcastPod.class, ConfigurationPod.class } ) public class BaseHazelcastInstanceConfigurationPod { public static final String defaultQueueName = "default"; private static final Logger logger = LoggerFactory .getLogger( BaseHazelcastInstanceConfigurationPod.class ); @Inject protected RhizomeConfiguration rhizomeConfiguration; @Bean public HazelcastConfigurationContainer getHazelcastConfiguration() { return new HazelcastConfigurationContainer( getHazelcastServerConfiguration(), getHazelcastClientConfiguration() ); } public Config getHazelcastServerConfiguration() { Optional<HazelcastConfiguration> maybeConfiguration = rhizomeConfiguration.getHazelcastConfiguration(); Preconditions.checkArgument( maybeConfiguration.isPresent(), "Hazelcast Configuration must be present to build hazelcast instance configuration." ); HazelcastConfiguration hzConfiguration = maybeConfiguration.get(); if ( hzConfiguration.isServer() ) { Config config = new Config( hzConfiguration.getInstanceName() ); config .setProperty( "hazelcast.logging.type", "slf4j" ) .setProperty( "hazelcast.slow.operation.detector.stacktrace.logging.enabled", "true" ) .setProperty( "hazelcast.map.load.chunk.size", "100000" ) .setClusterName( hzConfiguration.getGroup() ) .setSerializationConfig( serializationConfig() ) .setMapConfigs( mapConfigs() ) .setQueueConfigs( queueConfigs( config.getQueueConfig( defaultQueueName ) ) ) .setNetworkConfig( networkConfig( hzConfiguration ) ); hzConfiguration .getScheduledExecutors() .ifPresent( scheduledExecutors -> config.setScheduledExecutorConfigs( scheduledExecutorConfigs( scheduledExecutors ) ) ); hzConfiguration .getDurableExecutors() .ifPresent( durableExecutors -> config.setDurableExecutorConfigs( durableExecutorConfigs( durableExecutors ) ) ); config.getCPSubsystemConfig().setCPMemberCount( hzConfiguration.getCpMemberCount() ); if(hzConfiguration.getCpMemberCount()>0) { config.getCPSubsystemConfig().setGroupSize( hzConfiguration.getCpGroupSize() ); } return config; } return null; } private Map<String, DurableExecutorConfig> durableExecutorConfigs( List<DurableExecutorConfiguration> durableExecutors ) { return durableExecutors.stream() .map( durableExecutor -> { final var dec = new DurableExecutorConfig( durableExecutor.getName() ) .setPoolSize( durableExecutor.getPoolSize() ) .setCapacity( durableExecutor.getCapacity() ) .setDurability( durableExecutor.getDurability() ); if ( StringUtils.isNotBlank( durableExecutor.getSplitBrainProtectionName() ) ) { dec.setSplitBrainProtectionName( durableExecutor.getSplitBrainProtectionName() ); } return dec; } ) .collect( Collectors.toMap( DurableExecutorConfig::getName, Function.identity() ) ); } protected Map<String, ScheduledExecutorConfig> scheduledExecutorConfigs( List<ScheduledExecutorConfiguration> scheduledExecutors ) { return scheduledExecutors.stream() .map( scheduledExecutor -> { final var sec = new ScheduledExecutorConfig( scheduledExecutor.getName() ) .setPoolSize( scheduledExecutor.getPoolSize() ) .setCapacity( scheduledExecutor.getCapacity() ) .setDurability( scheduledExecutor.getDurability() ); if ( StringUtils.isNotBlank( scheduledExecutor.getSplitBrainProtectionName() ) ) { sec.setSplitBrainProtectionName( scheduledExecutor.getSplitBrainProtectionName() ); } return sec; } ) .collect( Collectors.toMap( ScheduledExecutorConfig::getName, Function.identity() ) ); } public ClientConfig getHazelcastClientConfiguration() { java.util.Optional<HazelcastConfiguration> maybeConfiguration = rhizomeConfiguration .getHazelcastConfiguration(); Preconditions.checkArgument( maybeConfiguration.isPresent(), "Hazelcast Configuration must be present to build hazelcast instance configuration." ); HazelcastConfiguration hzConfiguration = maybeConfiguration.get(); SerializationConfig serializationConfig = serializationConfig(); logger.info( "Registering the following serializers: {}", serializationConfig ); return hzConfiguration.isServer() ? null : new ClientConfig() .setNetworkConfig( clientNetworkConfig( hzConfiguration ) ) .setClusterName( hzConfiguration.getGroup() ) .setSerializationConfig( serializationConfig ) .setProperty( "hazelcast.logging.type", "slf4j" ) .setNearCacheConfigMap( nearCacheConfigs() ); } @Bean public SerializationConfig serializationConfig() { SerializationConfig config = new SerializationConfig() .setSerializerConfigs( serializerConfigs() ) .setAllowUnsafe( true ) .setUseNativeByteOrder( true ); return config; } protected Map<String, NearCacheConfig> nearCacheConfigs() { //As of Hz 3.12 there is no default near cache. If it is added in the future we may have to handle default case return ImmutableMap.of(); } protected Map<String, MapConfig> mapConfigs() { return ImmutableMap.of(); } protected Map<String, QueueConfig> queueConfigs( QueueConfig defaultConfig ) { return queueConfigs( ImmutableMap.of( defaultQueueName, defaultConfig ) ); } protected Map<String, QueueConfig> queueConfigs( Map<String, QueueConfig> configs ) { return ImmutableMap.copyOf( configs ); } protected Collection<SerializerConfig> serializerConfigs() { return ImmutableList.of(); } public static TcpIpConfig tcpIpConfig( List<String> nodes ) { return new TcpIpConfig().setMembers( nodes ).setEnabled( true ); } public static ClientNetworkConfig clientNetworkConfig( HazelcastConfiguration hzConfiguration ) { return new ClientNetworkConfig().setAddresses( hzConfiguration.getHazelcastSeedNodes() ); } protected static NetworkConfig networkConfig( HazelcastConfiguration hzConfiguration ) { return new NetworkConfig().setPort( hzConfiguration.getPort() ).setJoin( getJoinConfig( hzConfiguration.getHazelcastSeedNodes() ) ); } protected static JoinConfig getJoinConfig( List<String> nodes ) { return new JoinConfig().setMulticastConfig( new MulticastConfig().setEnabled( false ).setLoopbackModeEnabled( false ) ).setTcpIpConfig( tcpIpConfig( nodes ) ); } }
kryptnostic/rhizome
src/main/java/com/kryptnostic/rhizome/pods/hazelcast/BaseHazelcastInstanceConfigurationPod.java
Java
apache-2.0
9,126
# Copyright 2015 Google, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # [START gae_flex_storage_app] require "sinatra" require "google/cloud/storage" storage = Google::Cloud::Storage.new bucket = storage.bucket ENV["GOOGLE_CLOUD_STORAGE_BUCKET"] get "/" do # Present the user with an upload form ' <form method="POST" action="/upload" enctype="multipart/form-data"> <input type="file" name="file"> <input type="submit" value="Upload"> </form> ' end post "/upload" do file_path = params[:file][:tempfile].path file_name = params[:file][:filename] # Upload file to Google Cloud Storage bucket file = bucket.create_file file_path, file_name, acl: "public" # The public URL can be used to directly access the uploaded file via HTTP file.public_url end # [END gae_flex_storage_app]
GoogleCloudPlatform/ruby-docs-samples
appengine/storage/app.rb
Ruby
apache-2.0
1,327
import {PlayerServiceStub} from './../../testing/player-stubs'; import {PlayerService} from './../shared/player.service'; import {SpinnerComponent} from './../../core/directives/spinner/spinner.component'; import {FormsModule} from '@angular/forms'; import {By} from '@angular/platform-browser'; import {NgbModule} from '@ng-bootstrap/ng-bootstrap'; import {ChartComponentStub} from './../../testing/chart-stubs'; import {ActivatedRouteStub} from './../../testing/routing-stubs'; import {ActivatedRoute} from '@angular/router'; import {async, ComponentFixture, fakeAsync, TestBed, tick} from '@angular/core/testing'; import {PlayerStatisticsComponent} from './player-statistics.component'; import {PlayerStatsService} from "../shared/player-stats.service"; import {PlayerStatsServiceStub} from "../../testing/player-stats.stubs"; import {PLAYERS} from "../../testing/data/players"; describe('PlayerStatisticsComponent', () => { let component: PlayerStatisticsComponent; let fixture: ComponentFixture<PlayerStatisticsComponent>; let activatedRoute: ActivatedRouteStub; beforeEach(async(() => { activatedRoute = new ActivatedRouteStub(); TestBed.configureTestingModule({ declarations: [ PlayerStatisticsComponent, ChartComponentStub, SpinnerComponent ], imports: [ NgbModule.forRoot(), FormsModule ], providers: [ {provide: ActivatedRoute, useValue: activatedRoute}, {provide: PlayerService, useClass: PlayerServiceStub}, {provide: PlayerStatsService, useClass: PlayerStatsServiceStub} ] }) .compileComponents(); })); function createComponent() { fixture = TestBed.createComponent(PlayerStatisticsComponent); component = fixture.componentInstance; component.playerId = '111'; activatedRoute.testParams = {league_id: '123', player_id: '111'}; fixture.detectChanges(); tick(); } it('should create', fakeAsync(() => { createComponent(); expect(component).toBeTruthy(); })); it('should have player id', fakeAsync(() => { createComponent(); expect(component.playerId).toEqual('111'); })); it('should have league id', fakeAsync(() => { createComponent(); expect(component.leagueId).toEqual('123'); })); it('should have rating history chart data', fakeAsync(() => { createComponent(); let ratingHistory = component.ratingHistory; expect(ratingHistory).toBeTruthy(); expect(ratingHistory.title.text).toEqual('Rating history'); expect(ratingHistory.series[0].data.length).toEqual(4); })); it('should present rating history chart', fakeAsync(() => { createComponent(); fixture.detectChanges(); tick(); let debugElement = fixture.debugElement.query(By.css('div.card div.card-body div.row div.col-8 chart')); expect(debugElement.nativeElement).toBeTruthy(); })); it('should have matches history chart data', fakeAsync(() => { createComponent(); let matchesStats = component.matchesStats; expect(matchesStats.title).toEqual('Matches statistics'); expect(matchesStats.series[0].name).toEqual('Won matches'); expect(matchesStats.series[0].data.value).toEqual(2); expect(matchesStats.series[0].data.max).toEqual(6); expect(matchesStats.series[1].name).toEqual('Lost matches') expect(matchesStats.series[1].data.value).toEqual(3); expect(matchesStats.series[1].data.max).toEqual(6); expect(matchesStats.series[2].name).toEqual('Percentage of winnings'); expect(matchesStats.series[2].data.value).toEqual(33); expect(matchesStats.series[2].data.max).toEqual(100); expect(matchesStats.series[3].name).toEqual('Sets won'); expect(matchesStats.series[3].data.value).toEqual(6); expect(matchesStats.series[3].data.max).toEqual(14); expect(matchesStats.series[4].name).toEqual('Sets lost'); expect(matchesStats.series[4].data.value).toEqual(8); expect(matchesStats.series[4].data.max).toEqual(14); })); it('should have rating min max chart data', fakeAsync(() => { createComponent(); let ratingStats = component.minMaxRatingStats; expect(ratingStats.series[0].data.rating).toEqual(1200); expect(ratingStats.series[0].data.delta).toEqual(200); expect(ratingStats.series[1].data.rating).toEqual(900); expect(ratingStats.series[1].data.delta).toEqual(-100); })); it('should display alert if chart for current period is empty', fakeAsync(() => { createComponent(); component.ratingHistory.series[0].data = []; fixture.detectChanges(); tick(); let debugElement = fixture.debugElement.query(By.css('div.card div.card-body div.row div.col-8 div.alert.alert-info')); expect(debugElement.nativeElement).toBeTruthy(); })); it('should display alert if rating history chart is empty', fakeAsync(() => { createComponent(); component.player = PLAYERS.find(player => player.id == '222'); fixture.detectChanges(); expect(component.displayAlert()).toBeTruthy(); let debugElement = fixture.debugElement.query(By.css('div.alert.alert-info')); expect(debugElement.nativeElement).toBeTruthy(); })); });
tomek199/elo-rating
src/main/webapp/src/app/players/player-statistics/player-statistics.component.spec.ts
TypeScript
apache-2.0
5,149
package com.graduation.yinhua.maleambry.presenter; import com.graduation.yinhua.maleambry.contract.SingleContract; /** * Created by Administrator on 2016/11/13 */ public class SinglePresenter extends BasePresenter<SingleContract.View> implements SingleContract.Presenter{ }
yinhuagithub/MaleAmbry
app/src/main/java/com/graduation/yinhua/maleambry/presenter/SinglePresenter.java
Java
apache-2.0
276
// Copyright 2014 mint.zhao.chiu@gmail.com. All rights reserved. // Use of this source code is governed by a Apache License 2.0 // that can be found in the LICENSE file. package controllers import "github.com/astaxie/beego" type PublicController struct { BaseController } // @router /message [get] func (c *PublicController) Message() { beego.ReadFromRequest(&c.Controller) c.TplNames = "public/message.html" }
ogstations/globalWays
src/gwsAdmin/controllers/public.go
GO
apache-2.0
417
package com.github.kubatatami.judonetworking.fragments; import com.github.kubatatami.judonetworking.observers.ObservableController; import com.github.kubatatami.judonetworking.observers.ObservableWrapper; import com.github.kubatatami.judonetworking.observers.ObserverHelper; import com.github.kubatatami.judonetworking.observers.WrapperObserver; /** * Created with IntelliJ IDEA. * User: jbogacki * Date: 27.02.2013 * Time: 16:39 */ public class ObserverSupportFragment extends JudoSupportFragment implements ObservableController { private ObserverHelper observerHelper = new ObserverHelper(); @Override public void onDestroyView() { super.onDestroyView(); observerHelper.onDestroy(); } @Override public void addObserverToDelete(ObservableWrapper<?> observableWrapper, WrapperObserver<?> observer) { observerHelper.addObserverToDelete(observableWrapper, observer); } }
kubatatami/JudoNetworking
observers/src/main/java/com/github/kubatatami/judonetworking/fragments/ObserverSupportFragment.java
Java
apache-2.0
931
#!/usr/bin/env python # Licensed to Cloudera, Inc. under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. Cloudera, Inc. licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import atexit import getpass import logging import os import socket import subprocess import threading import time from nose.tools import assert_equal, assert_true from desktop.lib.paths import get_run_root from hadoop import pseudo_hdfs4 from liboozie.oozie_api import get_oozie from liboozie.conf import OOZIE_URL _oozie_lock = threading.Lock() LOG = logging.getLogger(__name__) class OozieServerProvider(object): """ Setup a Oozie server. """ OOZIE_TEST_PORT = '18080' OOZIE_HOME = get_run_root('ext/oozie/oozie') requires_hadoop = True is_oozie_running = False @classmethod def setup_class(cls): cls.cluster = pseudo_hdfs4.shared_cluster() cls.oozie, callback = cls._get_shared_oozie_server() cls.shutdown = [callback] @classmethod def wait_until_completion(cls, oozie_jobid, timeout=300.0, step=5): job = cls.oozie.get_job(oozie_jobid) start = time.time() while job.is_running() and time.time() - start < timeout: time.sleep(step) LOG.info('Checking status of %s...' % oozie_jobid) job = cls.oozie.get_job(oozie_jobid) LOG.info('[%d] Status after %d: %s' % (time.time(), time.time() - start, job)) logs = cls.oozie.get_job_log(oozie_jobid) if job.is_running(): msg = "[%d] %s took more than %d to complete: %s" % (time.time(), oozie_jobid, timeout, logs) LOG.info(msg) raise Exception(msg) else: LOG.info('[%d] Job %s took %d: %s' % (time.time(), job.id, time.time() - start, logs)) return job @classmethod def _start_oozie(cls, cluster): """ Start oozie process. """ args = [OozieServerProvider.OOZIE_HOME + '/bin/oozied.sh', 'run'] env = os.environ env['OOZIE_HTTP_PORT'] = OozieServerProvider.OOZIE_TEST_PORT conf_dir = os.path.join(cluster.log_dir, 'oozie') os.mkdir(conf_dir) env['OOZIE_LOG'] = conf_dir LOG.info("Executing %s, env %s, cwd %s" % (repr(args), repr(env), cluster._tmpdir)) process = subprocess.Popen(args=args, env=env, cwd=cluster._tmpdir, stdin=subprocess.PIPE) return process @classmethod def _reset_oozie(cls): env = os.environ args = ['rm', '-r', OozieServerProvider.OOZIE_HOME + '/data/oozie-db'] LOG.info("Executing %s, env %s" % (args, env)) subprocess.call(args, env=env) args = [OozieServerProvider.OOZIE_HOME + '/bin/ooziedb.sh', 'create', '-sqlfile', 'oozie.sql', '-run'] LOG.info("Executing %s, env %s" % (args, env)) subprocess.call(args, env=env) @classmethod def _setup_sharelib(cls): # At some point could reuse: # oozie-setup.sh sharelib create -fs FS_URI LOG.info("Copying Oozie sharelib") user_home = cls.cluster.fs.do_as_user(getpass.getuser(), cls.cluster.fs.get_home_dir) oozie_share_lib = user_home + '/share' cls.cluster.fs.do_as_user(getpass.getuser(), cls.cluster.fs.create_home_dir) cls.cluster.fs.do_as_user(getpass.getuser(), cls.cluster.fs.copyFromLocal, OozieServerProvider.OOZIE_HOME + '/share', oozie_share_lib) LOG.info("Oozie sharelib copied to %s" % oozie_share_lib) @classmethod def _get_shared_oozie_server(cls): callback = lambda: None _oozie_lock.acquire() if not OozieServerProvider.is_oozie_running: LOG.info('\nStarting a Mini Oozie. Requires "tools/jenkins/jenkins.sh" to be previously ran.\n') LOG.info('See https://issues.cloudera.org/browse/HUE-861\n') finish = ( OOZIE_URL.set_for_testing("http://%s:%s/oozie" % (socket.getfqdn(), OozieServerProvider.OOZIE_TEST_PORT)), ) # Setup cluster = pseudo_hdfs4.shared_cluster() cls._setup_sharelib() cls._reset_oozie() p = cls._start_oozie(cluster) def kill(): LOG.info("Killing Oozie server (pid %d)." % p.pid) os.kill(p.pid, 9) p.wait() atexit.register(kill) start = time.time() started = False sleep = 0.01 while not started and time.time() - start < 30.0: status = None try: LOG.info('Check Oozie status...') status = get_oozie().get_oozie_status() if status['systemMode'] == 'NORMAL': started = True break time.sleep(sleep) sleep *= 2 except Exception, e: LOG.info('Oozie server status not NORMAL yet: %s - %s' % (status, e)) time.sleep(sleep) sleep *= 2 pass if not started: raise Exception("Oozie server took too long to come up.") OozieServerProvider.is_oozie_running = True def shutdown(): for f in finish: f() cluster.stop() callback = shutdown _oozie_lock.release() return get_oozie(), callback class TestMiniOozie(OozieServerProvider): def test_oozie_status(self): assert_equal(get_oozie().get_oozie_status()['systemMode'], 'NORMAL') assert_true(self.cluster.fs.exists('/user/%(user)s/share/lib' % {'user': getpass.getuser()}))
2013Commons/HUE-SHARK
desktop/libs/liboozie/src/liboozie/oozie_api_test.py
Python
apache-2.0
5,769
using UnityEngine; using System.Collections; public class ShieldItem : ItemBase { public override void BeginUseItem() { base.BeginUseItem(); owner.GetComponent<AnimatorHandler>().RaiseShield(); } public override void EndUseItem() { base.EndUseItem(); owner.GetComponent<AnimatorHandler>().LowerShield(); } }
carlsc2/SuperstitionJam
Superstition Game/Assets/Scripts/Items/ShieldItem.cs
C#
apache-2.0
365
package ch.sourcepond.integrationtest.utils; import static ch.sourcepond.integrationtest.utils.Photocopier.copyTestProjectToTemporaryLocation; import java.io.File; import java.io.FileFilter; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.List; import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.FileFilterUtils; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.InitCommand; import org.eclipse.jgit.api.errors.GitAPIException; import org.w3c.dom.Document; import org.xml.sax.SAXException; import ch.sourcepond.integrationtest.E2ETest; public class TestProject { public static final String PLUGIN_VERSION_FOR_TESTS; static { final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); try { final DocumentBuilder builder = factory.newDocumentBuilder(); try (final InputStream in = new FileInputStream("./pom.xml")) { final Document doc = builder.parse(in); final XPathFactory xpathFactory = XPathFactory.newInstance(); final XPath xpath = xpathFactory.newXPath(); PLUGIN_VERSION_FOR_TESTS = xpath.evaluate("/project/parent/version", doc); } catch (SAXException | IOException | XPathExpressionException e) { throw new ExceptionInInitializerError(e); } } catch (ParserConfigurationException e) { throw new ExceptionInInitializerError(e); } } public final File originDir; public final Git origin; public final File localDir; public final Git local; private final AtomicInteger commitCounter = new AtomicInteger(1); private MvnRunner mvnRunner = E2ETest.mvn; private TestProject(final File originDir, final Git origin, final File localDir, final Git local) { this.originDir = originDir; this.origin = origin; this.localDir = localDir; this.local = local; } /** * Runs a mvn command against the local repo and returns the console output. */ public List<String> mvn(final String... arguments) throws IOException { return mvnRunner.runMaven(localDir, arguments); } public List<String> mvnRelease(final String buildNumber) throws IOException, InterruptedException { return mvnRunner.runMaven(localDir, "-DbuildNumber=" + buildNumber, "releaser:release"); } public List<String> mvnReleaserNext(final String buildNumber) throws IOException, InterruptedException { return mvnRunner.runMaven(localDir, "-DbuildNumber=" + buildNumber, "releaser:next"); } public List<String> mvnRelease(final String buildNumber, final String moduleToRelease) throws IOException, InterruptedException { return mvnRunner.runMaven(localDir, "-DbuildNumber=" + buildNumber, "-DmodulesToRelease=" + moduleToRelease, "releaser:release"); } public TestProject commitRandomFile(final String module) throws IOException, GitAPIException { final File moduleDir = new File(localDir, module); if (!moduleDir.isDirectory()) { throw new RuntimeException("Could not find " + moduleDir.getCanonicalPath()); } final File random = new File(moduleDir, UUID.randomUUID() + ".txt"); random.createNewFile(); final String modulePath = module.equals(".") ? "" : module + "/"; local.add().addFilepattern(modulePath + random.getName()).call(); local.commit().setMessage("Commit " + commitCounter.getAndIncrement() + ": adding " + random.getName()).call(); return this; } public void pushIt() throws GitAPIException { local.push().call(); } private static TestProject project(final String name) { try { final File originDir = copyTestProjectToTemporaryLocation(name); performPomSubstitution(originDir); final InitCommand initCommand = Git.init(); initCommand.setDirectory(originDir); final Git origin = initCommand.call(); origin.add().addFilepattern(".").call(); origin.commit().setMessage("Initial commit").call(); final File localDir = Photocopier.folderForSampleProject(name); final Git local = Git.cloneRepository().setBare(false).setDirectory(localDir) .setURI(originDir.toURI().toString()).call(); return new TestProject(originDir, origin, localDir, local); } catch (final Exception e) { throw new RuntimeException("Error while creating copies of the test project", e); } } public static void performPomSubstitution(final File sourceDir) throws IOException { final File pom = new File(sourceDir, "pom.xml"); if (pom.exists()) { String xml = FileUtils.readFileToString(pom, "UTF-8"); if (xml.contains("${scm.url}")) { xml = xml.replace("${scm.url}", dirToGitScmReference(sourceDir)); } xml = xml.replace("${current.plugin.version}", PLUGIN_VERSION_FOR_TESTS); FileUtils.writeStringToFile(pom, xml, "UTF-8"); } for (final File child : sourceDir.listFiles((FileFilter) FileFilterUtils.directoryFileFilter())) { performPomSubstitution(child); } } public static String pathOf(final File file) { String path; try { path = file.getCanonicalPath(); } catch (final IOException e1) { path = file.getAbsolutePath(); } return path; } public static String dirToGitScmReference(final File sourceDir) { return "scm:git:file://localhost/" + pathOf(sourceDir).replace('\\', '/'); } public static TestProject singleModuleProject() { return project("single-module"); } public static TestProject useLastNumber() { return project("use-last-number"); } public static TestProject nestedProject() { return project("nested-project"); } public static TestProject nestedProjectManagedDependencies() { return project("nested-project-managed-dependencies"); } public static TestProject nestedProjectVersionSubstitution() { return project("nested-project-version-substitution"); } public static TestProject moduleWithScmTag() { return project("module-with-scm-tag"); } public static TestProject moduleWithProfilesProject() { return project("module-with-profiles"); } public static TestProject inheritedVersionsFromParent() { return project("inherited-versions-from-parent"); } public static TestProject incrementSnapshotVersionAfterRelease() { return project("increment-snapshot-version-after-release"); } public static TestProject independentVersionsProject() { return project("independent-versions"); } public static TestProject parentAsSibilngProject() { return project("parent-as-sibling"); } public static TestProject deepDependenciesProject() { return project("deep-dependencies"); } public static TestProject moduleWithTestFailure() { return project("module-with-test-failure"); } public static TestProject moduleWithSnapshotDependencies() { return project("snapshot-dependencies"); } public void setMvnRunner(final MvnRunner mvn) { this.mvnRunner = mvn; } }
SourcePond/multi-module-release-maven-plugin
release-maven-plugin/src/test/java/ch/sourcepond/integrationtest/utils/TestProject.java
Java
apache-2.0
7,054
/* * Copyright 2005 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.compiler.kie.builder.impl; import java.io.File; import org.drools.compiler.compiler.io.memory.MemoryFileSystem; import org.kie.api.builder.ReleaseId; import org.kie.api.builder.model.KieModuleModel; import org.kie.api.internal.utils.ServiceRegistry; public interface InternalKieModuleProvider { default InternalKieModule createClasspathKieModule() { return null; } InternalKieModule createKieModule( ReleaseId releaseId, KieModuleModel kieProject, File file ); InternalKieModule createKieModule( ReleaseId releaseId, KieModuleModel kieProject, MemoryFileSystem mfs ); class DrlBasedKieModuleProvider implements InternalKieModuleProvider { @Override public InternalKieModule createKieModule( ReleaseId releaseId, KieModuleModel kieProject, File file ) { return file.isDirectory() ? new FileKieModule( releaseId, kieProject, file ) : new ZipKieModule( releaseId, kieProject, file ); } @Override public InternalKieModule createKieModule( ReleaseId releaseId, KieModuleModel kieProject, MemoryFileSystem mfs ) { return new MemoryKieModule(releaseId, kieProject, mfs); } } static InternalKieModule get( ReleaseId releaseId, KieModuleModel kieProject, File file ) { return Factory.get().createKieModule(releaseId, kieProject, file); } static InternalKieModule get( ReleaseId releaseId, KieModuleModel kieProject, MemoryFileSystem mfs ) { return Factory.get().createKieModule(releaseId, kieProject, mfs); } static InternalKieModule getFromClasspath() { return Factory.get().createClasspathKieModule(); } class Factory { private static class LazyHolder { private static InternalKieModuleProvider INSTANCE = createZipKieModuleProvider(); } private static InternalKieModuleProvider createZipKieModuleProvider() { InternalKieModuleProvider provider = ServiceRegistry.getService(InternalKieModuleProvider.class); return provider != null ? provider : new DrlBasedKieModuleProvider(); } public static InternalKieModuleProvider get() { return LazyHolder.INSTANCE; } } }
droolsjbpm/drools
drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/InternalKieModuleProvider.java
Java
apache-2.0
2,841
/** * @licstart The following is the entire license notice for the * Javascript code in this page * * Copyright 2021 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @licend The above is the entire license notice for the * Javascript code in this page */ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.XFAFactory = void 0; var _xfa_object = require("./xfa_object.js"); var _bind = require("./bind.js"); var _data = require("./data.js"); var _fonts = require("./fonts.js"); var _utils = require("./utils.js"); var _util = require("../../shared/util.js"); var _parser = require("./parser.js"); var _xhtml = require("./xhtml.js"); class XFAFactory { constructor(data) { try { this.root = new _parser.XFAParser().parse(XFAFactory._createDocument(data)); const binder = new _bind.Binder(this.root); this.form = binder.bind(); this.dataHandler = new _data.DataHandler(this.root, binder.getData()); this.form[_xfa_object.$globalData].template = this.form; } catch (e) { (0, _util.warn)(`XFA - an error occurred during parsing and binding: ${e}`); } } isValid() { return this.root && this.form; } _createPagesHelper() { const iterator = this.form[_xfa_object.$toPages](); return new Promise((resolve, reject) => { const nextIteration = () => { try { const value = iterator.next(); if (value.done) { resolve(value.value); } else { setTimeout(nextIteration, 0); } } catch (e) { reject(e); } }; setTimeout(nextIteration, 0); }); } async _createPages() { try { this.pages = await this._createPagesHelper(); this.dims = this.pages.children.map(c => { const { width, height } = c.attributes.style; return [0, 0, parseInt(width), parseInt(height)]; }); } catch (e) { (0, _util.warn)(`XFA - an error occurred during layout: ${e}`); } } getBoundingBox(pageIndex) { return this.dims[pageIndex]; } async getNumPages() { if (!this.pages) { await this._createPages(); } return this.dims.length; } setImages(images) { this.form[_xfa_object.$globalData].images = images; } setFonts(fonts) { this.form[_xfa_object.$globalData].fontFinder = new _fonts.FontFinder(fonts); const missingFonts = []; for (let typeface of this.form[_xfa_object.$globalData].usedTypefaces) { typeface = (0, _utils.stripQuotes)(typeface); const font = this.form[_xfa_object.$globalData].fontFinder.find(typeface); if (!font) { missingFonts.push(typeface); } } if (missingFonts.length > 0) { return missingFonts; } return null; } appendFonts(fonts, reallyMissingFonts) { this.form[_xfa_object.$globalData].fontFinder.add(fonts, reallyMissingFonts); } async getPages() { if (!this.pages) { await this._createPages(); } const pages = this.pages; this.pages = null; return pages; } serializeData(storage) { return this.dataHandler.serialize(storage); } static _createDocument(data) { if (!data["/xdp:xdp"]) { return data["xdp:xdp"]; } return Object.values(data).join(""); } static getRichTextAsHtml(rc) { if (!rc || typeof rc !== "string") { return null; } try { let root = new _parser.XFAParser(_xhtml.XhtmlNamespace, true).parse(rc); if (!["body", "xhtml"].includes(root[_xfa_object.$nodeName])) { const newRoot = _xhtml.XhtmlNamespace.body({}); newRoot[_xfa_object.$appendChild](root); root = newRoot; } const result = root[_xfa_object.$toHTML](); if (!result.success) { return null; } const { html } = result; const { attributes } = html; if (attributes) { if (attributes.class) { attributes.class = attributes.class.filter(attr => !attr.startsWith("xfa")); } attributes.dir = "auto"; } return { html, str: root[_xfa_object.$text]() }; } catch (e) { (0, _util.warn)(`XFA - an error occurred during parsing of rich text: ${e}`); } return null; } } exports.XFAFactory = XFAFactory;
mozilla/pdfjs-dist
lib/core/xfa/factory.js
JavaScript
apache-2.0
4,930
package br.com.caelum.vraptor.html.tags; import br.com.caelum.vraptor.html.attributes.Attribute; import br.com.caelum.vraptor.html.tags.interfaces.NestedElement; import br.com.caelum.vraptor.html.tags.interfaces.Tag; import br.com.caelum.vraptor.html.transformers.DefaultTagTransformer; import br.com.caelum.vraptor.html.transformers.TagTransformer; public class Table implements Tag { private NestedElement[] children = new NestedElement[0]; private final Attribute[] attributes; private final TagTransformer tagTransformer = new DefaultTagTransformer(); public Table(Attribute... attributes) { this.attributes = attributes; } public NestedElement[] getChildren() { return children; } public Attribute[] getAttributes() { return attributes; } public String toHtml() { return tagTransformer.transform(this); } public Tag with(NestedElement... children) { this.children = children; return this; } public Tag with(java.lang.Object content) { return with(new Text(content)); } public Tag with(NestedElement child) { this.children = new NestedElement[] { child }; return this; } }
luiz/vraptor-html-dsl
src/main/java/br/com/caelum/vraptor/html/tags/Table.java
Java
apache-2.0
1,122
//// [functionOverloads2.js] function foo(bar) { return bar; } ; var x = foo(true);
fdecampredon/jsx-typescript-old-version
tests/baselines/reference/functionOverloads2.js
JavaScript
apache-2.0
94
package com.joshhendo.numbersearch.base; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.params.HttpConnectionParams; import java.io.*; import java.util.ArrayList; /** * com.com.joshhendo.joshhendo.joshhendo.numbersearch Copyright 2011 * User: josh * Date: 17/06/11 * Time: 10:10 PM */ public class Fetcher<T> implements Serializable { protected final int TIMEOUT = 60000; protected String name; protected String initial; protected String location; protected Integer page; protected Integer totalPages; public Fetcher() { } public Integer getTotalPages() { return totalPages; } public void setPage(Integer page) { this.page = page; } public Fetcher(String name, String initial, String location, Integer page) { this.name = name.replace(' ', '+'); this.initial = initial.replace(' ', '+'); this.location = location.replace(' ', '+'); this.page = page; this.totalPages = null; } ArrayList<T> process() { return null; } public String fetchPage(String iURL) { String url = String.format(iURL, name, initial, location); if ( page > 1 ) url += String.format("&page=%d", page); // Connection HttpClient httpClient = new DefaultHttpClient(); // Set the connection timeout HttpConnectionParams.setConnectionTimeout(httpClient.getParams(), TIMEOUT); HttpConnectionParams.setSoTimeout(httpClient.getParams(), TIMEOUT); System.out.println("URL: " + url); HttpGet httpGet = new HttpGet(url); String strResponse = null; try { HttpResponse response = httpClient.execute(httpGet); strResponse = inputStreamToString(response.getEntity().getContent()).toString(); } catch (IOException e) { e.printStackTrace(); } return strResponse; } public StringBuilder inputStreamToString(InputStream is) { String line; StringBuilder total = new StringBuilder(); // Wrap a BufferedReader around the InputStream BufferedReader rd = new BufferedReader(new InputStreamReader(is)); // Read response until the end try { while ((line = rd.readLine()) != null) { total.append(line); } } catch (IOException e) { e.printStackTrace(); } // Return full string return total; } }
joshhendo/NumberSearchAndroid
src/com/joshhendo/numbersearch/base/Fetcher.java
Java
apache-2.0
2,750
package apple.foundation; import java.io.*; import java.nio.*; import java.util.*; import com.google.j2objc.annotations.*; import com.google.j2objc.runtime.*; import com.google.j2objc.runtime.block.*; import apple.audiotoolbox.*; import apple.corefoundation.*; import apple.coregraphics.*; import apple.coreservices.*; import apple.uikit.*; import apple.coreanimation.*; import apple.coredata.*; import apple.coremedia.*; import apple.security.*; import apple.dispatch.*; @Library("Foundation/Foundation.h") @Mapping("NSPredicateOperatorType") public final class NSPredicateOperatorType extends ObjCEnum { @GlobalConstant("NSLessThanPredicateOperatorType") public static final long LessThan = 0L; @GlobalConstant("NSLessThanOrEqualToPredicateOperatorType") public static final long LessThanOrEqualTo = 1L; @GlobalConstant("NSGreaterThanPredicateOperatorType") public static final long GreaterThan = 2L; @GlobalConstant("NSGreaterThanOrEqualToPredicateOperatorType") public static final long GreaterThanOrEqualTo = 3L; @GlobalConstant("NSEqualToPredicateOperatorType") public static final long EqualTo = 4L; @GlobalConstant("NSNotEqualToPredicateOperatorType") public static final long NotEqualTo = 5L; @GlobalConstant("NSMatchesPredicateOperatorType") public static final long Matches = 6L; @GlobalConstant("NSLikePredicateOperatorType") public static final long Like = 7L; @GlobalConstant("NSBeginsWithPredicateOperatorType") public static final long BeginsWith = 8L; @GlobalConstant("NSEndsWithPredicateOperatorType") public static final long EndsWith = 9L; @GlobalConstant("NSInPredicateOperatorType") public static final long In = 10L; @GlobalConstant("NSCustomSelectorPredicateOperatorType") public static final long CustomSelector = 11L; /** * @since Available in iOS 3.0 and later. */ @GlobalConstant("NSContainsPredicateOperatorType") public static final long Contains = 99L; /** * @since Available in iOS 3.0 and later. */ @GlobalConstant("NSBetweenPredicateOperatorType") public static final long Between = 100L; }
Sellegit/j2objc
runtime/src/main/java/apple/foundation/NSPredicateOperatorType.java
Java
apache-2.0
2,176
/** * Copyright 2015 The AMP HTML Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS-IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { closestByTag, closestBySelector, scopedQuerySelector, } from '../../../src/dom'; import {dev, user} from '../../../src/log'; import {resourcesForDoc} from '../../../src/resources'; import {getParentWindowFrameElement} from '../../../src/service'; import {timerFor} from '../../../src/timer'; import {isFiniteNumber} from '../../../src/types'; import {viewportForDoc} from '../../../src/viewport'; import {viewerForDoc} from '../../../src/viewer'; import {VisibilityState} from '../../../src/visibility-state'; import {startsWith} from '../../../src/string'; import {DEFAULT_THRESHOLD} from '../../../src/intersection-observer-polyfill'; /** @const {number} */ const LISTENER_INITIAL_RUN_DELAY_ = 20; // Variables that are passed to the callback. const MAX_CONTINUOUS_TIME = 'maxContinuousVisibleTime'; const TOTAL_VISIBLE_TIME = 'totalVisibleTime'; const FIRST_SEEN_TIME = 'firstSeenTime'; const LAST_SEEN_TIME = 'lastSeenTime'; const FIRST_VISIBLE_TIME = 'fistVisibleTime'; const LAST_VISIBLE_TIME = 'lastVisibleTime'; const MIN_VISIBLE = 'minVisiblePercentage'; const MAX_VISIBLE = 'maxVisiblePercentage'; const ELEMENT_X = 'elementX'; const ELEMENT_Y = 'elementY'; const ELEMENT_WIDTH = 'elementWidth'; const ELEMENT_HEIGHT = 'elementHeight'; const TOTAL_TIME = 'totalTime'; const LOAD_TIME_VISIBILITY = 'loadTimeVisibility'; const BACKGROUNDED = 'backgrounded'; const BACKGROUNDED_AT_START = 'backgroundedAtStart'; // Variables that are not exposed outside this class. const CONTINUOUS_TIME = 'cT'; const LAST_UPDATE = 'lU'; const IN_VIEWPORT = 'iV'; const TIME_LOADED = 'tL'; const SCHEDULED_RUN_ID = 'schId'; const LAST_CHANGE_ENTRY = 'lCE'; // Keys used in VisibilitySpec const CONTINUOUS_TIME_MAX = 'continuousTimeMax'; const CONTINUOUS_TIME_MIN = 'continuousTimeMin'; const TOTAL_TIME_MAX = 'totalTimeMax'; const TOTAL_TIME_MIN = 'totalTimeMin'; const VISIBLE_PERCENTAGE_MIN = 'visiblePercentageMin'; const VISIBLE_PERCENTAGE_MAX = 'visiblePercentageMax'; const TAG_ = 'Analytics.Visibility'; /** * Checks if the value is undefined or positive number like. * "", 1, 0, undefined, 100, 101 are positive. -1, NaN are not. * * Visible for testing. * * @param {number} num The number to verify. * @return {boolean} * @private */ export function isPositiveNumber_(num) { return num === undefined || (typeof num == 'number' && Math.sign(num) >= 0); } /** * Checks if the value is undefined or a number between 0 and 100. * "", 1, 0, undefined, 100 return true. -1, NaN and 101 return false. * * Visible for testing. * * @param {number} num The number to verify. * @return {boolean} */ export function isValidPercentage_(num) { return num === undefined || (typeof num == 'number' && Math.sign(num) >= 0 && num <= 100); } /** * Checks and outputs information about visibilitySpecValidation. * @param {!JSONType} config Configuration for instrumentation. * @return {boolean} True if the spec is valid. * @private */ export function isVisibilitySpecValid(config) { if (!config['visibilitySpec']) { return true; } const spec = config['visibilitySpec']; const selector = spec['selector']; if (!selector || (!startsWith(selector, '#') && !startsWith(selector, 'amp-') && selector != ':root' && selector != ':host')) { user().error(TAG_, 'Visibility spec requires an id selector, a tag ' + 'name starting with "amp-" or ":root"'); return false; } const ctMax = spec[CONTINUOUS_TIME_MAX]; const ctMin = spec[CONTINUOUS_TIME_MIN]; const ttMax = spec[TOTAL_TIME_MAX]; const ttMin = spec[TOTAL_TIME_MIN]; if (!isPositiveNumber_(ctMin) || !isPositiveNumber_(ctMax) || !isPositiveNumber_(ttMin) || !isPositiveNumber_(ttMax)) { user().error(TAG_, 'Timing conditions should be positive integers when specified.'); return false; } if (ctMax < ctMin || ttMax < ttMin) { user().warn('AMP-ANALYTICS', 'Max value in timing conditions should be ' + 'more than the min value.'); return false; } if (!isValidPercentage_(spec[VISIBLE_PERCENTAGE_MAX]) || !isValidPercentage_(spec[VISIBLE_PERCENTAGE_MIN])) { user().error(TAG_, 'visiblePercentage conditions should be between 0 and 100.'); return false; } if (spec[VISIBLE_PERCENTAGE_MAX] < spec[VISIBLE_PERCENTAGE_MIN]) { user().error(TAG_, 'visiblePercentageMax should be greater than ' + 'visiblePercentageMin'); return false; } return true; } /** * Returns the element that matches the selector. If the selector is an * id, the element with that id is returned. If the selector is a tag name, an * ancestor of the analytics element with that tag name is returned. * * @param {!../../../src/service/ampdoc-impl.AmpDoc} ampdoc. * @param {string} selector The selector for the element to track. * @param {!Element} analyticsEl Element whose ancestors to search. * @param {!String} selectionMethod The method to use to find the element. * @return {?Element} Element corresponding to the selector if found. */ export function getElement(ampdoc, selector, analyticsEl, selectionMethod) { if (!analyticsEl) { return null; } let foundEl; const friendlyFrame = getParentWindowFrameElement(analyticsEl, ampdoc.win); // Special case for root selector. if (selector == ':host' || selector == ':root') { // TODO(dvoytenko, #6794): Remove old `-amp-element` form after the new // form is in PROD for 1-2 weeks. foundEl = friendlyFrame ? closestBySelector( friendlyFrame, '.-amp-element,.i-amphtml-element') : null; } else if (selectionMethod == 'closest') { // Only tag names are supported currently. foundEl = closestByTag(analyticsEl, selector); } else if (selectionMethod == 'scope') { foundEl = scopedQuerySelector( dev().assertElement(analyticsEl.parentElement), selector); } else if (selector[0] == '#') { const containerDoc = friendlyFrame ? analyticsEl.ownerDocument : ampdoc; foundEl = containerDoc.getElementById(selector.slice(1)); } if (foundEl) { // Restrict result to be contained by ampdoc. const isContainedInDoc = ampdoc.contains(friendlyFrame || foundEl); if (isContainedInDoc) { return foundEl; } } return null; } /** * @typedef {{ * state: !Object, * config: !Object, * callback: function(!Object), * shouldBeVisible: boolean, * }} */ let VisibilityListenerDef; /** * Allows tracking of AMP elements in the viewport. * * This class allows a caller to specify conditions to evaluate when an element * is in viewport and for how long. If the conditions are satisfied, a provided * callback is called. */ export class Visibility { /** @param {!../../../src/service/ampdoc-impl.AmpDoc} ampdoc */ constructor(ampdoc) { /** @const {!../../../src/service/ampdoc-impl.AmpDoc} ampdoc */ this.ampdoc = ampdoc; /** * key: resource id. * value: [VisibilityListenerDef] * @type {!Object<!Array<VisibilityListenerDef>>} * @private */ this.listeners_ = Object.create(null); /** @const {!../../../src/service/timer-impl.Timer} */ this.timer_ = timerFor(this.ampdoc.win); /** @private {Array<!../../../src/service/resource.Resource>} */ this.resources_ = []; /** @private @const {function()} */ this.boundScrollListener_ = this.scrollListener_.bind(this); /** @private @const {function()} */ this.boundVisibilityListener_ = this.visibilityListener_.bind(this); /** @private {boolean} */ this.scrollListenerRegistered_ = false; /** @private {boolean} */ this.visibilityListenerRegistered_ = false; /** @private {!../../../src/service/resources-impl.Resources} */ this.resourcesService_ = resourcesForDoc(this.ampdoc); /** @private {number|string|null} */ this.scheduledRunId_ = null; /** @private {number} Amount of time to wait for next calculation. */ this.timeToWait_ = Infinity; /** @private {boolean} */ this.scheduledLoadedPromises_ = false; /** @private @const {!../../../src/service/viewer-impl.Viewer} */ this.viewer_ = viewerForDoc(this.ampdoc); /** @private {boolean} */ this.backgroundedAtStart_ = !this.viewer_.isVisible(); /** @private {boolean} */ this.backgrounded_ = this.backgroundedAtStart_; } /** @private */ registerForVisibilityEvents_() { if (!this.visibilityListenerRegistered_) { this.viewer_.onVisibilityChanged(this.boundVisibilityListener_); this.visibilityListenerRegistered_ = true; this.visibilityListener_(); } } /** @private */ registerForViewportEvents_() { if (!this.scrollListenerRegistered_) { const viewport = viewportForDoc(this.ampdoc); // Currently unlistens are not being used. In the event that no resources // are actively being monitored, the scrollListener should be very cheap. viewport.onScroll(this.boundScrollListener_); viewport.onChanged(this.boundScrollListener_); this.scrollListenerRegistered_ = true; } } /** * @param {!Object} config * @param {function(!Object)} callback * @param {boolean} shouldBeVisible True if the element should be visible * when callback is called. False otherwise. * @param {!Element} analyticsElement The amp-analytics element that the * config is associated with. */ listenOnce(config, callback, shouldBeVisible, analyticsElement) { const selector = config['selector']; const element = user().assertElement( getElement(this.ampdoc, selector, dev().assertElement(analyticsElement), config['selectionMethod']), 'Element not found for visibilitySpec: ' + selector); const resource = this.resourcesService_.getResourceForElementOptional(element); user().assert( resource, 'Visibility tracking not supported on element: ', element); this.registerForViewportEvents_(); this.registerForVisibilityEvents_(); const resId = resource.getId(); this.listeners_[resId] = (this.listeners_[resId] || []); const state = {}; state[TIME_LOADED] = Date.now(); this.listeners_[resId].push({config, callback, state, shouldBeVisible}); this.resources_.push(resource); if (this.scheduledRunId_ === null) { this.scheduledRunId_ = this.timer_.delay(() => { this.scrollListener_(); }, LISTENER_INITIAL_RUN_DELAY_); } } /** * @param {!Object} config * @param {function(!Object)} callback * @param {boolean} shouldBeVisible True if the element should be visible * when callback is called. False otherwise. * @param {!Element} analyticsElement The amp-analytics element that the * config is associated with. */ listenOnceV2(config, callback, shouldBeVisible, analyticsElement) { const selector = config['selector']; const element = user().assertElement( getElement(this.ampdoc, selector, dev().assertElement(analyticsElement), config['selectionMethod']), 'Element not found for visibilitySpec: ' + selector); const resource = this.resourcesService_.getResourceForElementOptional(element); user().assert( resource, 'Visibility tracking not supported on element: ', element); if (!this.intersectionObserver_) { const onIntersectionChange = this.onIntersectionChange_.bind(this); /** @private {!IntersectionObserver} */ this.intersectionObserver_ = // TODO: polyfill IntersectionObserver new this.ampdoc.win.IntersectionObserver(entries => { entries.forEach(onIntersectionChange); }, {threshold: DEFAULT_THRESHOLD}); } // Visible trigger resource.loadedOnce().then(() => { this.intersectionObserver_.observe(element); const resId = resource.getId(); this.listeners_[resId] = (this.listeners_[resId] || []); const state = {}; state[TIME_LOADED] = Date.now(); this.listeners_[resId].push({config, callback, state, shouldBeVisible}); this.resources_.push(resource); }); // Hidden trigger if (!shouldBeVisible && !this.visibilityListenerRegistered_) { this.viewer_.onVisibilityChanged(() => { if (!this.viewer_.isVisible()) { this.onDocumentHidden_(); } }); this.visibilityListenerRegistered_ = true; } } /** @private */ onIntersectionChange_(change) { const resource = this.resourcesService_.getResourceForElement(change.target); const listeners = this.listeners_[resource.getId()]; const visible = change.intersectionRatio * 100; for (let c = listeners.length - 1; c >= 0; c--) { const listener = listeners[c]; const shouldBeVisible = !!listener.shouldBeVisible; const config = listener.config; const state = listener.state; state[LAST_CHANGE_ENTRY] = change; // Update states and check if all conditions are satisfied const conditionsMet = this.updateCounters_(visible, listener, shouldBeVisible); if (!shouldBeVisible) { // For "hidden" trigger, only update state, don't trigger. continue; } if (conditionsMet) { if (state[SCHEDULED_RUN_ID]) { this.timer_.cancel(state[SCHEDULED_RUN_ID]); state[SCHEDULED_RUN_ID] = null; } this.prepareStateForCallback_(state, resource.getLayoutBox()); listener.callback(state); listeners.splice(c, 1); } else if (state[IN_VIEWPORT] && !state[SCHEDULED_RUN_ID]) { // There is unmet duration condition, schedule a check const timeToWait = this.computeTimeToWait_(state, config); if (timeToWait <= 0) { continue; } state[SCHEDULED_RUN_ID] = this.timer_.delay(() => { dev().assert(state[IN_VIEWPORT], 'should have been in viewport'); const lastChange = state[LAST_CHANGE_ENTRY]; if (this.updateCounters_( lastChange.intersectionRatio * 100, listener, /* shouldBeVisible */ true)) { this.prepareStateForCallback_(state, resource.getLayoutBox()); listener.callback(state); listeners.splice(listeners.indexOf(listener), 1); } }, timeToWait); } else if (!state[IN_VIEWPORT] && state[SCHEDULED_RUN_ID]) { this.timer_.cancel(state[SCHEDULED_RUN_ID]); state[SCHEDULED_RUN_ID] = null; } } // Remove target that have no listeners. if (listeners.length == 0) { this.intersectionObserver_.unobserve(change.target); } } /** @private */ onDocumentHidden_() { for (let i = 0; i < this.resources_.length; i++) { const resource = this.resources_[i]; if (!resource.hasLoadedOnce()) { continue; } const listeners = this.listeners_[resource.getId()]; for (let j = listeners.length - 1; j >= 0; j--) { const listener = listeners[j]; if (listener.shouldBeVisible) { continue; } const state = listener.state; const lastChange = state[LAST_CHANGE_ENTRY]; const lastVisible = lastChange ? lastChange.intersectionRatio * 100 : 0; if (this.updateCounters_( lastVisible, listener, /* shouldBeVisible */ false)) { this.prepareStateForCallback_(state, resource.getLayoutBox()); listener.callback(state); listeners.splice(j, 1); } } } } /** @private */ visibilityListener_() { const state = this.viewer_.getVisibilityState(); if (state == VisibilityState.HIDDEN || state == VisibilityState.PAUSED || state == VisibilityState.INACTIVE) { this.backgrounded_ = true; } this.scrollListener_(); } /** @private */ scrollListener_() { if (this.scheduledRunId_ != null) { this.timer_.cancel(this.scheduledRunId_); this.scheduledRunId_ = null; } const loadedPromises = []; for (let r = this.resources_.length - 1; r >= 0; r--) { const res = this.resources_[r]; if (!res.hasLoadedOnce()) { loadedPromises.push(res.loadedOnce()); continue; } const change = res.element.getIntersectionChangeEntry(); const visible = !isFiniteNumber(change.intersectionRatio) ? 0 : change.intersectionRatio * 100; const listeners = this.listeners_[res.getId()]; for (let c = listeners.length - 1; c >= 0; c--) { const shouldBeVisible = !!listeners[c]['shouldBeVisible']; if (this.updateCounters_(visible, listeners[c], shouldBeVisible) && this.viewer_.isVisible() == shouldBeVisible) { this.prepareStateForCallback_( listeners[c]['state'], res.getLayoutBox()); listeners[c].callback(listeners[c]['state']); listeners.splice(c, 1); } else { this.computeTimeToWait_( listeners[c]['state'], listeners[c]['config']); } } // Remove resources that have no listeners. if (listeners.length == 0) { this.resources_.splice(r, 1); } } // Schedule a calculation for the time when one of the conditions is // expected to be satisfied. if (this.scheduledRunId_ === null && this.timeToWait_ < Infinity && this.timeToWait_ > 0) { this.scheduledRunId_ = this.timer_.delay(() => { this.scrollListener_(); }, this.timeToWait_); } // Schedule a calculation for when a resource gets loaded. if (loadedPromises.length > 0 && !this.scheduledLoadedPromises_) { Promise.race(loadedPromises).then(() => { this.scheduledLoadedPromises_ = false; this.scrollListener_(); }); this.scheduledLoadedPromises_ = true; } } /** * Updates counters for a given listener. * @param {number} visible Percentage of element visible in viewport. * @param {Object<string,Object>} listener The listener whose counters need * updating. * @param {boolean} triggerType True if element should be visible. * False otherwise. * @return {boolean} true if all visibility conditions are satisfied * @private */ updateCounters_(visible, listener, triggerType) { const config = listener['config']; const state = listener['state'] || {}; if (visible > 0) { const timeElapsed = Date.now() - state[TIME_LOADED]; state[FIRST_SEEN_TIME] = state[FIRST_SEEN_TIME] || timeElapsed; state[LAST_SEEN_TIME] = timeElapsed; // Consider it as load time visibility if this happens within 300ms of // page load. if (state[LOAD_TIME_VISIBILITY] == undefined && timeElapsed < 300) { state[LOAD_TIME_VISIBILITY] = visible; } } const wasInViewport = state[IN_VIEWPORT]; const timeSinceLastUpdate = Date.now() - state[LAST_UPDATE]; state[IN_VIEWPORT] = this.isInViewport_(visible, config[VISIBLE_PERCENTAGE_MIN], config[VISIBLE_PERCENTAGE_MAX]); if (state[IN_VIEWPORT] && wasInViewport) { // Keep counting. this.setState_(state, visible, timeSinceLastUpdate); } else if (!state[IN_VIEWPORT] && wasInViewport) { // The resource went out of view. Do final calculations and reset state. dev().assert(state[LAST_UPDATE] > 0, 'lastUpdated time in weird state.'); state[MAX_CONTINUOUS_TIME] = Math.max(state[MAX_CONTINUOUS_TIME], state[CONTINUOUS_TIME] + timeSinceLastUpdate); state[LAST_UPDATE] = -1; state[TOTAL_VISIBLE_TIME] += timeSinceLastUpdate; state[CONTINUOUS_TIME] = 0; // Clear only after max is calculated above. state[LAST_VISIBLE_TIME] = Date.now() - state[TIME_LOADED]; } else if (state[IN_VIEWPORT] && !wasInViewport) { // The resource came into view. start counting. dev().assert(state[LAST_UPDATE] == undefined || state[LAST_UPDATE] == -1, 'lastUpdated time in weird state.'); state[FIRST_VISIBLE_TIME] = state[FIRST_VISIBLE_TIME] || Date.now() - state[TIME_LOADED]; this.setState_(state, visible, 0); } listener['state'] = state; return ((triggerType && state[IN_VIEWPORT]) || !triggerType) && (config[TOTAL_TIME_MIN] === undefined || state[TOTAL_VISIBLE_TIME] >= config[TOTAL_TIME_MIN]) && (config[TOTAL_TIME_MAX] === undefined || state[TOTAL_VISIBLE_TIME] <= config[TOTAL_TIME_MAX]) && (config[CONTINUOUS_TIME_MIN] === undefined || (state[MAX_CONTINUOUS_TIME] || 0) >= config[CONTINUOUS_TIME_MIN]) && (config[CONTINUOUS_TIME_MAX] === undefined || (state[MAX_CONTINUOUS_TIME] || 0) <= config[CONTINUOUS_TIME_MAX]); } /** * @param {!Object} state * @param {!Object} config * @return {number} * @private */ computeTimeToWait_(state, config) { const waitForContinuousTime = config[CONTINUOUS_TIME_MIN] > state[CONTINUOUS_TIME] ? config[CONTINUOUS_TIME_MIN] - state[CONTINUOUS_TIME] : 0; const waitForTotalTime = config[TOTAL_TIME_MIN] > state[TOTAL_VISIBLE_TIME] ? config[TOTAL_TIME_MIN] - state[TOTAL_VISIBLE_TIME] : 0; // Wait for minimum of (previous timeToWait, positive values of // waitForContinuousTime and waitForTotalTime). this.timeToWait_ = Math.min(this.timeToWait_, waitForContinuousTime || Infinity, waitForTotalTime || Infinity); // Return a max of wait time (used by V2) return Math.max(waitForContinuousTime, waitForTotalTime); } /** * For the purposes of these calculations, a resource is in viewport if the * visibility conditions are satisfied or they are not defined. * @param {number} visible Percentage of element visible * @param {number} min Lower bound of visibility condition. Not inclusive * @param {number} max Upper bound of visibility condition. Inclusive. * @return {boolean} true if the conditions are satisfied. * @private */ isInViewport_(visible, min, max) { return !!(visible > (min || 0) && visible <= (max || 100)); } /** * @param {!Object} s State of the listener * @param {number} visible Percentage of element visible * @param {number} sinceLast Milliseconds since last update * @private */ setState_(s, visible, sinceLast) { s[LAST_UPDATE] = Date.now(); s[TOTAL_VISIBLE_TIME] = s[TOTAL_VISIBLE_TIME] !== undefined ? s[TOTAL_VISIBLE_TIME] + sinceLast : 0; s[CONTINUOUS_TIME] = s[CONTINUOUS_TIME] !== undefined ? s[CONTINUOUS_TIME] + sinceLast : 0; s[MAX_CONTINUOUS_TIME] = s[MAX_CONTINUOUS_TIME] !== undefined ? Math.max(s[MAX_CONTINUOUS_TIME], s[CONTINUOUS_TIME]) : 0; s[MIN_VISIBLE] = s[MIN_VISIBLE] ? Math.min(s[MIN_VISIBLE], visible) : visible; s[MAX_VISIBLE] = s[MAX_VISIBLE] ? Math.max(s[MAX_VISIBLE], visible) : visible; s[LAST_VISIBLE_TIME] = Date.now() - s[TIME_LOADED]; } /** * Sets variable values for callback. Cleans up existing values. * @param {Object<string, *>} state The state object to populate * @param {!../../../src/layout-rect.LayoutRectDef} layoutBox The bounding rectangle * for the element * @private */ prepareStateForCallback_(state, layoutBox) { state[ELEMENT_X] = layoutBox.left; state[ELEMENT_Y] = layoutBox.top; state[ELEMENT_WIDTH] = layoutBox.width; state[ELEMENT_HEIGHT] = layoutBox.height; state[TOTAL_TIME] = this.getTotalTime_() || ''; state[LOAD_TIME_VISIBILITY] = state[LOAD_TIME_VISIBILITY] || 0; if (state[MIN_VISIBLE] !== undefined) { state[MIN_VISIBLE] = Math.round(dev().assertNumber(state[MIN_VISIBLE]) * 100) / 100; } if (state[MAX_VISIBLE] !== undefined) { state[MAX_VISIBLE] = Math.round(dev().assertNumber(state[MAX_VISIBLE]) * 100) / 100; } state[BACKGROUNDED] = this.backgrounded_ ? '1' : '0'; state[BACKGROUNDED_AT_START] = this.backgroundedAtStart_ ? '1' : '0'; // Remove the state that need not be public and call callback. delete state[CONTINUOUS_TIME]; delete state[LAST_UPDATE]; delete state[IN_VIEWPORT]; delete state[TIME_LOADED]; delete state[SCHEDULED_RUN_ID]; delete state[LAST_CHANGE_ENTRY]; for (const k in state) { if (state.hasOwnProperty(k)) { state[k] = String(state[k]); } } } getTotalTime_() { const perf = this.ampdoc.win.performance; return perf && perf.timing && perf.timing.domInteractive ? Date.now() - perf.timing.domInteractive : null; } }
DistroScale/amphtml
extensions/amp-analytics/0.1/visibility-impl.js
JavaScript
apache-2.0
25,331
/* * Copyright (C) 2019 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "JsMethodProxy.h" #include <algorithm> #include "Context.h" JsMethodProxy::JsMethodProxy(Context* context, JNIEnv* env, const char* name, jobject method) : name(name), methodId(env->FromReflectedMethod(method)) { const jclass methodClass = env->GetObjectClass(method); const jmethodID getReturnType = env->GetMethodID(methodClass, "getReturnType", "()Ljava/lang/Class;"); const auto returnedClass = static_cast<jclass>(env->CallObjectMethod(method, getReturnType)); resultLoader = context->getJsToJavaConverter(env, returnedClass, true); env->DeleteLocalRef(returnedClass); if (!env->ExceptionCheck()) { const jmethodID isVarArgsMethod = env->GetMethodID(methodClass, "isVarArgs", "()Z"); isVarArgs = env->CallBooleanMethod(method, isVarArgsMethod); const jmethodID getParameterTypes = env->GetMethodID(methodClass, "getParameterTypes", "()[Ljava/lang/Class;"); jobjectArray parameterTypes = static_cast<jobjectArray>(env->CallObjectMethod(method, getParameterTypes)); const jsize numArgs = env->GetArrayLength(parameterTypes); for (jsize i = 0; i < numArgs && !env->ExceptionCheck(); ++i) { auto parameterType = env->GetObjectArrayElement(parameterTypes, i); argumentLoaders.push_back( context->getJavaToJsConverter(env, static_cast<jclass>(parameterType), true)); env->DeleteLocalRef(parameterType); } env->DeleteLocalRef(parameterTypes); } env->DeleteLocalRef(methodClass); } jobject JsMethodProxy::call(Context* context, JNIEnv* env, JSValue thisPointer, jobjectArray args) const { const auto totalArgs = std::min<int>(argumentLoaders.size(), env->GetArrayLength(args)); std::vector<JSValue> arguments; int numArgs; jvalue arg; for (numArgs = 0; numArgs < totalArgs && !env->ExceptionCheck(); numArgs++) { arg.l = env->GetObjectArrayElement(args, numArgs); if (!isVarArgs || numArgs < totalArgs - 1) { arguments.push_back(argumentLoaders[numArgs](context, env, arg)); } else { auto varArgs = argumentLoaders[numArgs](context, env, arg); if (JS_IsArray(context->jsContext, varArgs)) { auto len = JS_GetPropertyStr(context->jsContext, varArgs, "length"); for (int i = 0, e = JS_VALUE_GET_INT(len); i < e; i++) { arguments.push_back(JS_GetPropertyUint32(context->jsContext, varArgs, i)); } JS_FreeValue(context->jsContext, len); JS_FreeValue(context->jsContext, varArgs); } else { arguments.push_back(varArgs); } } env->DeleteLocalRef(arg.l); } jobject result; if (!env->ExceptionCheck()) { auto property = JS_NewAtom(context->jsContext, name.c_str()); JSValue callResult = JS_Invoke(context->jsContext, thisPointer, property, arguments.size(), arguments.data()); JS_FreeAtom(context->jsContext, property); result = resultLoader(context, env, callResult).l; JS_FreeValue(context->jsContext, callResult); } else { result = nullptr; } for (JSValue argument : arguments) { JS_FreeValue(context->jsContext, argument); } return result; }
square/duktape-android
quickjs/src/main/jni/JsMethodProxy.cpp
C++
apache-2.0
3,750
package rabbitmq_server /* * File Generated by enaml generator * !!! Please do not edit this file !!! */ type Broker struct { /*Username - Descr: RabbitMQ admin username for broker Default: <nil> */ Username interface{} `yaml:"username,omitempty"` /*Password - Descr: RabbitMQ admin password for broker Default: <nil> */ Password interface{} `yaml:"password,omitempty"` }
enaml-ops/omg-product-bundle
products/p-rabbitmq/enaml-gen/rabbitmq-server/broker.go
GO
apache-2.0
379
class ClientsController < ApplicationController before_filter :find_client, :except => [:index, :create, :new] def index @clients = Client.all end def create client = Client.create(params[:client]) if client.save redirect_to client else render "new" end end def new @client = Client.new end def show end def edit end def update if @client.update_attributes(params[:client]) redirect_to client_path else render "edit" end end def destroy @client.destroy redirect_to clients_path end private def find_client @client = Client.find(params[:id]) end end
angeleah/sam
app/controllers/clients_controller.rb
Ruby
apache-2.0
755
/******************************************************************************* * Copyright 2011-2014 Sergey Tarasevich * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package com.nostra13.universalimageloader.cache.memory.impl; import android.graphics.Bitmap; import com.nostra13.universalimageloader.cache.memory.LimitedMemoryCache; import java.lang.ref.Reference; import java.lang.ref.WeakReference; import java.util.Collections; import java.util.LinkedList; import java.util.List; /** * Limited {@link Bitmap bitmap} cache. Provides {@link Bitmap bitmaps} storing. Size of all stored bitmaps will not to * exceed size limit. When cache reaches limit size then cache clearing is processed by FIFO principle.<br /> * <br /> * <b>NOTE:</b> This cache uses strong and weak references for stored Bitmaps. Strong references - for limited count of * Bitmaps (depends on cache size), weak references - for all other cached Bitmaps. * * @author Sergey Tarasevich (nostra13[at]gmail[dot]com) * @since 1.0.0 */ // 算法描述:1.新访问的Bitmap会添加到链表尾部 2.如果缓存超出限制,那么缓存会按照FIFO策略来释放缓存 public class FIFOLimitedMemoryCache extends LimitedMemoryCache { private final List<Bitmap> queue = Collections.synchronizedList(new LinkedList<Bitmap>()); public FIFOLimitedMemoryCache(int sizeLimit) { super(sizeLimit); } @Override public boolean put(String key, Bitmap value) { if (super.put(key, value)) { queue.add(value); return true; } else { return false; } } @Override public Bitmap remove(String key) { Bitmap value = super.get(key); if (value != null) { queue.remove(value); } return super.remove(key); } @Override public void clear() { queue.clear(); super.clear(); } @Override protected int getSize(Bitmap value) { return value.getRowBytes() * value.getHeight(); } @Override protected Bitmap removeNext() { // 删除的是第0个元素,即最先进入队列的会被删除释放 return queue.remove(0); } @Override protected Reference<Bitmap> createReference(Bitmap value) { // 引用策略是弱引用 return new WeakReference<Bitmap>(value); } }
droidranger/xygapp
xyg-library/src/main/java/com/nostra13/universalimageloader/cache/memory/impl/FIFOLimitedMemoryCache.java
Java
apache-2.0
2,778
#!/usr/bin/env python # # Copyright 2016 timercrack # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from abc import ABCMeta from functools import wraps def param_function(**out_kwargs): def _rest_handler(func): @wraps(func) def wrapper(self, *args, **kwargs): return func(self, *args, *kwargs) for key, value in out_kwargs.items(): setattr(wrapper, 'arg_{}'.format(key), value) setattr(wrapper, 'is_module_function', True) return wrapper return _rest_handler class ParamFunctionContainer(object, metaclass=ABCMeta): def __init__(self): self.module_arg_dict = dict() self._collect_all() def _collect_all(self): for fun_name in dir(self): fun = getattr(self, fun_name) if hasattr(fun, 'is_module_function'): params = dict() for arg in dir(fun): if arg.startswith('arg_'): params[arg[4:]] = getattr(fun, arg) self.module_arg_dict[fun_name] = params
timercrack/pydatacoll
pydatacoll/utils/func_container.py
Python
apache-2.0
1,572
/* * Copyright 2012-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sample.actuator; import java.util.Map; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.security.SecurityProperties; import org.springframework.boot.test.SpringApplicationConfiguration; import org.springframework.boot.test.TestRestTemplate; import org.springframework.boot.test.WebIntegrationTest; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import static org.assertj.core.api.Assertions.assertThat; /** * Integration tests for endpoints configuration. * * @author Dave Syer */ @RunWith(SpringJUnit4ClassRunner.class) @SpringApplicationConfiguration(SampleActuatorApplication.class) @WebIntegrationTest(randomPort = true) @DirtiesContext @ActiveProfiles("endpoints") public class EndpointsPropertiesSampleActuatorApplicationTests { @Autowired private SecurityProperties security; @Value("${local.server.port}") private int port; @Test public void testCustomErrorPath() throws Exception { @SuppressWarnings("rawtypes") ResponseEntity<Map> entity = new TestRestTemplate("user", getPassword()) .getForEntity("http://localhost:" + this.port + "/oops", Map.class); assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.INTERNAL_SERVER_ERROR); @SuppressWarnings("unchecked") Map<String, Object> body = entity.getBody(); assertThat(body.get("error")).isEqualTo("None"); assertThat(body.get("status")).isEqualTo(999); } @Test public void testCustomContextPath() throws Exception { ResponseEntity<String> entity = new TestRestTemplate("user", getPassword()) .getForEntity("http://localhost:" + this.port + "/admin/health", String.class); assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK); assertThat(entity.getBody()).contains("\"status\":\"UP\""); assertThat(entity.getBody()).contains("\"hello\":\"world\""); } private String getPassword() { return this.security.getUser().getPassword(); } }
joansmith/spring-boot
spring-boot-samples/spring-boot-sample-actuator/src/test/java/sample/actuator/EndpointsPropertiesSampleActuatorApplicationTests.java
Java
apache-2.0
2,902
<?php /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ namespace App\DTO\UploadDTO; /** * Description of BillEntryDto * used for bill entry in mysql table * @author niteen */ class BillEntryDto { public $billNo; public $netAmt; public $totalTaxAmt; public $totalPayAmt; public $userId; public $restaurantId; public $custId; public $tableId; public $takeawayNo; public $deliveryNo; public function __construct($billNo = null, $netAmt = null, $totalTaxAmt = null, $totalPayAmt = null, $userId = null, $restaurantId = null, $custId = null, $tableId = null, $takeawayNo = null, $deliveryNo = null) { $this->billNo = $billNo; $this->netAmt = $netAmt; $this->totalTaxAmt = $totalTaxAmt; $this->totalPayAmt = $totalPayAmt; $this->userId = $userId; $this->restaurantId = $restaurantId; $this->custId = $custId; $this->tableId = $tableId; $this->takeawayNo = $takeawayNo; $this->deliveryNo = $deliveryNo; } }
Vibeosys/RorderWeb
src/DTO/UploadDTO/BillEntryDto.php
PHP
apache-2.0
1,205
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.servicecomb.demo.springmvc.tests; import org.apache.servicecomb.core.SCBEngine; import org.junit.AfterClass; import org.junit.BeforeClass; public class RawSpringMvcIntegrationTest extends SpringMvcIntegrationTestBase { @BeforeClass public static void init() throws Exception { System.setProperty("servicecomb.uploads.directory", "/tmp"); setUpLocalRegistry(); SpringMvcTestMain.main(new String[0]); } @AfterClass public static void shutdown() { SCBEngine.getInstance().destroy(); } }
acsukesh/java-chassis
integration-tests/springmvc-tests/springmvc-tests-general/src/test/java/org/apache/servicecomb/demo/springmvc/tests/RawSpringMvcIntegrationTest.java
Java
apache-2.0
1,338
import { toLength, toBoolean } from "../utils/native"; import { UNDEFINED } from "../types/primitive-type"; import { executeCallback } from "./array-helpers"; import { assertIsNotNullOrUndefined, assertIsFunction } from "../utils/contracts"; export default function ($target, env, factory) { $target.define("findIndex", factory.createBuiltInFunction(function* (predicate, thisArg) { assertIsNotNullOrUndefined(this.object, "Array.prototype.findIndex"); let length = yield toLength(this.object); assertIsFunction(predicate, "predicate"); let i = 0; while (i < length) { let propInfo = this.object.getProperty(i); let value = propInfo ? propInfo.getValue() : UNDEFINED; let passed = toBoolean(yield executeCallback(env, predicate, { key: i, value }, thisArg, this.object)); if (passed) { return factory.createPrimitive(i); } i++; } return factory.createPrimitive(-1); }, 1, "Array.prototype.findIndex")); }
jrsearles/SandBoxr
src/es6/array.find-index.js
JavaScript
apache-2.0
986
package ru.stqa.pft.mantis.model;/* Created by Vrstiv on 23.04.2016 */ import com.google.common.collect.ForwardingSet; import java.util.Collection; import java.util.HashSet; import java.util.Set; public class Users extends ForwardingSet<UserData> { private Set<UserData> delegate; public Users(Collection<UserData> users) { this.delegate = new HashSet<UserData>(users); } @Override protected Set<UserData> delegate() { return delegate; } }
VrStiv/javaForQA_1
mantis-tests/src/test/java/ru/stqa/pft/mantis/model/Users.java
Java
apache-2.0
470
function getTableRpj(){ return 'tb_rpj'; } function dbLoad(){ var db = Ti.Database.open(Ti.App.Properties.getString(DATABASE_FILE)); return db; } function createRpj() { db = dbLoad(); db.execute('CREATE TABLE IF NOT EXISTS ' + getTableRpj() + '( id INTEGER PRIMARY KEY, rpj_id INTEGER, rpj_razao TEXT, rpj_cnpj TEXT, rpj_fantasia TEXT, rpj_ie TEXT, rpj_im TEXT, fk_rp INTEGER, ep_id INTEGER );'); db.close(); } function dropRpj() { db = dbLoad(); db.execute('DROP TABLE IF EXISTS ' + getTableRpj()); db.close(); } function insertRpj(rpj_id, rpj_razao, rpj_cnpj, rpj_fantasia, rpj_ie, rpj_im, fk_rp, ep_id) { db = dbLoad(); db.execute('INSERT INTO ' + getTableRpj() + ' ( rpj_id, rpj_razao, rpj_cnpj, rpj_fantasia, rpj_ie, rpj_im, fk_rp, ep_id) VALUES (?,?,?,?,?,?,?)', rpj_id, rpj_razao, rpj_cnpj, rpj_fantasia, rpj_ie, rpj_im, fk_rp, ep_id); db.close(); } function selectallRpj() { db = dbLoad(); var rpj = db.execute('SELECT * FROM ' + getTableRpj()); if (Ti.Platform.osname == "android") { db.close(); } return rpj; } function processRpj(jsonTxt) { dropRpj(); createRpj(); var jsonObject = JSON.parse(jsonTxt); for (var j = 0; j < jsonObject.length; j++) { var rpj_id = jsonObject[j].rpj_id; var rpj_razao = jsonObject[j].rpj_razao; var rpj_cnpj = jsonObject[j].rpj_cnpj; var rpj_fantasia = jsonObject[j].rpj_fantasia; var rpj_ie = jsonObject[j].rpj_ie; var rpj_im = jsonObject[j].rpj_im; var fk_rp = jsonObject[j].fk_rp; var ep_id = jsonObject[j].ep_id; insertRpj(rpj_id, rpj_razao, rpj_cnpj, rpj_fantasia, rpj_ie, rpj_im, fk_rp, ep_id); } }
lukas-conka/mobile-app
app/assets/database/rpj.js
JavaScript
apache-2.0
1,662
package com.google.api.ads.adwords.jaxws.v201601.express; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for getResponse element declaration. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;element name="getResponse"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="rval" type="{https://adwords.google.com/api/adwords/express/v201601}ExpressBusinessPage" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "rval" }) @XmlRootElement(name = "getResponse") public class ExpressBusinessServiceInterfacegetResponse { protected ExpressBusinessPage rval; /** * Gets the value of the rval property. * * @return * possible object is * {@link ExpressBusinessPage } * */ public ExpressBusinessPage getRval() { return rval; } /** * Sets the value of the rval property. * * @param value * allowed object is * {@link ExpressBusinessPage } * */ public void setRval(ExpressBusinessPage value) { this.rval = value; } }
gawkermedia/googleads-java-lib
modules/adwords_appengine/src/main/java/com/google/api/ads/adwords/jaxws/v201601/express/ExpressBusinessServiceInterfacegetResponse.java
Java
apache-2.0
1,603
/* * Copyright to the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.rioproject.impl.costmodel; import org.rioproject.costmodel.ResourceCostModel; import java.util.Collections; import java.util.Comparator; import java.util.Set; import java.util.TreeSet; /** * The GenericCostModel provides a straightforward implementation of a * {@link org.rioproject.costmodel.ResourceCostModel} * * @author Dennis Reedy */ public class GenericCostModel implements ResourceCostModel { @SuppressWarnings("unused") static final long serialVersionUID = 1L; /** Default description */ private static final String DEFAULT_DESCRIPTION = "Generic Cost Model"; /** The cost per unit */ private double costPerUnit; /** A description */ private String description = DEFAULT_DESCRIPTION; /** List of TimeBoundary objects */ private final Set<TimeBoundary> timeBoundaries = new TreeSet<TimeBoundary>(new Comparator<TimeBoundary>() { public int compare(TimeBoundary timeBoundary, TimeBoundary timeBoundary1) { return (timeBoundary.compareTo(timeBoundary1)); } }); /** * Create a GenericCostModel * * @param value The cost per unit, must be non-negative and not NaN */ public GenericCostModel(double value) { this(value, null, DEFAULT_DESCRIPTION); } /** * Create a GenericCostModel * * @param value The cost per unit, must be non-negative and not NaN * @param timeBoundaries An Array of TimeBoundary classes */ public GenericCostModel(double value, TimeBoundary[] timeBoundaries) { this(value, timeBoundaries, DEFAULT_DESCRIPTION); } /** * Create a GenericCostModel * * @param value The cost per unit, must be non-negative and not NaN * @param timeBoundaries An Array of TimeBoundary classes * @param description A Description for the GenericCostModel */ public GenericCostModel(double value, TimeBoundary[] timeBoundaries, String description) { if(value == Double.NaN) throw new IllegalArgumentException("value cannot be NaN"); if(value < 0) throw new IllegalArgumentException("value must be non-negative"); costPerUnit = value; if(timeBoundaries!=null) { Collections.addAll(this.timeBoundaries, timeBoundaries); } if(description!=null) this.description = description; } /** * @see ResourceCostModel#getCostPerUnit */ public double getCostPerUnit(long duration) { TimeBoundary[] tBoundaries = getTimeBoundaries(); for(int i = (tBoundaries.length - 1); i >= 0; i--) { if(duration >= tBoundaries[i].getBoundary()) { return (costPerUnit * tBoundaries[i].getMultiplier()); } } return (costPerUnit); } /** * @see ResourceCostModel#addTimeBoundary */ public void addTimeBoundary(TimeBoundary timeBoundary) { synchronized(timeBoundaries) { timeBoundaries.add(timeBoundary); } } /** * @see ResourceCostModel#getDescription */ public String getDescription() { return (description); } /* * Get all TimeBoundary instances from the Collection, sorted by boundary */ private TimeBoundary[] getTimeBoundaries() { TimeBoundary[] boundaries; synchronized(timeBoundaries) { boundaries = timeBoundaries.toArray( new TimeBoundary[timeBoundaries.size()]); } return (boundaries); } }
dreedyman/Rio
rio-lib/src/main/java/org/rioproject/impl/costmodel/GenericCostModel.java
Java
apache-2.0
4,322
/* * Copyright (C) 2018 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <gtest/gtest.h> #include "sca/GLSLToolsLite.h" #include <filamat/MaterialBuilder.h> #include <filamat/Enums.h> using namespace filamat; static ::testing::AssertionResult PropertyListsMatch(const MaterialBuilder::PropertyList& expected, const MaterialBuilder::PropertyList& actual) { for (size_t i = 0; i < MaterialBuilder::MATERIAL_PROPERTIES_COUNT; i++) { if (expected[i] != actual[i]) { const auto& propString = Enums::toString<Property>(Property(i)); return ::testing::AssertionFailure() << "actual[" << propString << "] (" << actual[i] << ") != expected[" << propString << "] (" << expected[i] << ")"; } } return ::testing::AssertionSuccess(); } class FilamatLite : public ::testing::Test { protected: FilamatLite() { } virtual ~FilamatLite() { } virtual void SetUp() { MaterialBuilder::init(); } }; TEST_F(FilamatLite, StaticCodeAnalyzerNothingDetected) { utils::CString shaderCode(R"( void material(inout MaterialInputs material) { prepareMaterial(material); } )"); GLSLToolsLite glslTools; MaterialBuilder::PropertyList properties {false}; glslTools.findProperties(filament::backend::FRAGMENT, shaderCode, properties); MaterialBuilder::PropertyList expected {false}; EXPECT_TRUE(PropertyListsMatch(expected, properties)); } TEST_F(FilamatLite, StaticCodeAnalyzerNothingDetectedinVertex) { utils::CString shaderCode(R"( void materialVertex(inout MaterialVertexInputs material) { } )"); GLSLToolsLite glslTools; MaterialBuilder::PropertyList properties {false}; glslTools.findProperties(filament::backend::VERTEX, shaderCode, properties); MaterialBuilder::PropertyList expected {false}; EXPECT_TRUE(PropertyListsMatch(expected, properties)); } TEST_F(FilamatLite, StaticCodeAnalyzerDirectAssign) { utils::CString shaderCode(R"( void material(inout MaterialInputs material) { prepareMaterial(material); material.baseColor = vec4(0.8); } )"); GLSLToolsLite glslTools; MaterialBuilder::PropertyList properties {false}; glslTools.findProperties(filament::backend::FRAGMENT, shaderCode, properties); MaterialBuilder::PropertyList expected {false}; expected[size_t(MaterialBuilder::Property::BASE_COLOR)] = true; EXPECT_TRUE(PropertyListsMatch(expected, properties)); } TEST_F(FilamatLite, StaticCodeAnalyzerDirectAssignVertex) { utils::CString shaderCode(R"( void materialVertex(inout MaterialVertexInputs material) { material.clipSpaceTransform = mat4(2.0); } )"); GLSLToolsLite glslTools; MaterialBuilder::PropertyList properties {false}; glslTools.findProperties(filament::backend::VERTEX, shaderCode, properties); MaterialBuilder::PropertyList expected {false}; expected[size_t(MaterialBuilder::Property::CLIP_SPACE_TRANSFORM)] = true; EXPECT_TRUE(PropertyListsMatch(expected, properties)); } TEST_F(FilamatLite, StaticCodeAnalyzerAssignMultiple) { utils::CString shaderCode(R"( void material(inout MaterialInputs material) { material.clearCoat = 1.0; prepareMaterial(material); material.baseColor = vec4(0.8); material.metallic = 1.0; } )"); GLSLToolsLite glslTools; MaterialBuilder::PropertyList properties {false}; glslTools.findProperties(filament::backend::FRAGMENT, shaderCode, properties); MaterialBuilder::PropertyList expected {false}; expected[size_t(MaterialBuilder::Property::CLEAR_COAT)] = true; expected[size_t(MaterialBuilder::Property::BASE_COLOR)] = true; expected[size_t(MaterialBuilder::Property::METALLIC)] = true; EXPECT_TRUE(PropertyListsMatch(expected, properties)); } TEST_F(FilamatLite, StaticCodeAnalyzerDirectAssignWithSwizzling) { utils::CString shaderCode(R"( void material(inout MaterialInputs material) { prepareMaterial(material); material.subsurfaceColor.rgb = vec3(1.0, 0.4, 0.8); } )"); GLSLToolsLite glslTools; MaterialBuilder::PropertyList properties {false}; glslTools.findProperties(filament::backend::FRAGMENT, shaderCode, properties); MaterialBuilder::PropertyList expected {false}; expected[size_t(MaterialBuilder::Property::SUBSURFACE_COLOR)] = true; EXPECT_TRUE(PropertyListsMatch(expected, properties)); } TEST_F(FilamatLite, StaticCodeAnalyzerNoSpace) { utils::CString shaderCode(R"( void material(inout MaterialInputs material) { prepareMaterial(material); material.ambientOcclusion=vec3(1.0); } )"); GLSLToolsLite glslTools; MaterialBuilder::PropertyList properties {false}; glslTools.findProperties(filament::backend::FRAGMENT, shaderCode, properties); MaterialBuilder::PropertyList expected {false}; expected[size_t(MaterialBuilder::Property::AMBIENT_OCCLUSION)] = true; EXPECT_TRUE(PropertyListsMatch(expected, properties)); } TEST_F(FilamatLite, StaticCodeAnalyzerWhitespace) { utils::CString shaderCode(R"( void material(inout MaterialInputs material) { prepareMaterial(material); material .subsurfaceColor = vec3(1.0); material . ambientOcclusion = vec3(1.0); material . baseColor = vec3(1.0); } )"); GLSLToolsLite glslTools; MaterialBuilder::PropertyList properties {false}; glslTools.findProperties(filament::backend::FRAGMENT, shaderCode, properties); MaterialBuilder::PropertyList expected {false}; expected[size_t(MaterialBuilder::Property::SUBSURFACE_COLOR)] = true; expected[size_t(MaterialBuilder::Property::AMBIENT_OCCLUSION)] = true; expected[size_t(MaterialBuilder::Property::BASE_COLOR)] = true; EXPECT_TRUE(PropertyListsMatch(expected, properties)); } TEST_F(FilamatLite, StaticCodeAnalyzerEndOfShader) { utils::CString shaderCode(R"( void material(inout MaterialInputs material) { material.)"); GLSLToolsLite glslTools; MaterialBuilder::PropertyList properties {false}; glslTools.findProperties(filament::backend::FRAGMENT, shaderCode, properties); MaterialBuilder::PropertyList expected {false}; EXPECT_TRUE(PropertyListsMatch(expected, properties)); } TEST_F(FilamatLite, StaticCodeAnalyzerSlashComments) { utils::CString shaderCode(R"( void material(inout MaterialInputs material) { prepareMaterial(material); material.metallic = 1.0; // material.baseColor = vec4(1.0); // material.baseColor = vec4(1.0); // material.ambientOcclusion = vec3(1.0); material.clearCoat = 0.5; material.anisotropy = -1.0; } )"); GLSLToolsLite glslTools; MaterialBuilder::PropertyList properties {false}; glslTools.findProperties(filament::backend::FRAGMENT, shaderCode, properties); MaterialBuilder::PropertyList expected {false}; expected[size_t(MaterialBuilder::Property::METALLIC)] = true; expected[size_t(MaterialBuilder::Property::CLEAR_COAT)] = true; expected[size_t(MaterialBuilder::Property::ANISOTROPY)] = true; EXPECT_TRUE(PropertyListsMatch(expected, properties)); } TEST_F(FilamatLite, StaticCodeAnalyzerMultilineComments) { utils::CString shaderCode(R"( void material(inout MaterialInputs material) { prepareMaterial(material); material.metallic = 1.0; /* material.baseColor = vec4(1.0); // material.baseColor = vec4(1.0); material.ambientOcclusion = vec3(1.0); */ material.clearCoat = 0.5; } )"); GLSLToolsLite glslTools; MaterialBuilder::PropertyList properties {false}; glslTools.findProperties(filament::backend::FRAGMENT, shaderCode, properties); MaterialBuilder::PropertyList expected {false}; expected[size_t(MaterialBuilder::Property::METALLIC)] = true; expected[size_t(MaterialBuilder::Property::CLEAR_COAT)] = true; EXPECT_TRUE(PropertyListsMatch(expected, properties)); } TEST_F(FilamatLite, RemoveLineDirectivesOneLine) { { std::string shaderCode("#line 10 \"foobar\""); GLSLToolsLite glslTools; glslTools.removeGoogleLineDirectives(shaderCode); EXPECT_STREQ("", shaderCode.c_str()); } { // Ignore non-Google extension line directives std::string shaderCode("#line 100"); GLSLToolsLite glslTools; glslTools.removeGoogleLineDirectives(shaderCode); EXPECT_STREQ("#line 100", shaderCode.c_str()); } } TEST_F(FilamatLite, RemoveLineDirectives) { std::string shaderCode(R"( aaa #line 10 "foobar" bbb ccc #line 100 )"); std::string expected(R"( aaa bbb ccc #line 100 )"); GLSLToolsLite glslTools; glslTools.removeGoogleLineDirectives(shaderCode); EXPECT_STREQ(expected.c_str(), shaderCode.c_str()); } int main(int argc, char** argv) { ::testing::InitGoogleTest(&argc, argv); return RUN_ALL_TESTS(); }
google/filament
libs/filamat/tests/test_filamat_lite.cpp
C++
apache-2.0
9,812
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.waf.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/waf-regional-2016-11-28/PutPermissionPolicy" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class PutPermissionPolicyResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof PutPermissionPolicyResult == false) return false; PutPermissionPolicyResult other = (PutPermissionPolicyResult) obj; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; return hashCode; } @Override public PutPermissionPolicyResult clone() { try { return (PutPermissionPolicyResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
aws/aws-sdk-java
aws-java-sdk-waf/src/main/java/com/amazonaws/services/waf/model/PutPermissionPolicyResult.java
Java
apache-2.0
2,365
var ANIMATION_SPEED = 1000, snap = Snap(), els = {}, container, els_map = {}; Snap.load('face.svg', add); function add(fragment) { snap.append(fragment); container = snap.select('g'); container.drag(); setElements(); animateAll(); } function setElements() { els = { lefteye: snap.select('#moon #left-eye'), righteye: snap.select('#moon #right-eye'), mouth: snap.select('#moon #mouth'), nose: snap.select('#moon #nose'), body: snap.select('#moon #body') }; els_map = { lefteye: { d: 'M182.665,140.929c0,0,1.334-10-44.666-10s-46,10-46,10s9.731-39.428,45.041-39.428 C172.351,101.501,182.665,140.929,182.665,140.929z' }, righteye: { d: 'M375.329,140.929c0,0,1.334-10-44.666-10s-46.002,10-46.002,10 s9.732-39.428,45.043-39.428S375.329,140.929,375.329,140.929z' }, mouth: { d: 'M51.999,212.167h363.333c0,0-60.832,118-180.5,118 C115.166,330.167,51.999,212.167,51.999,212.167z' }, nose: { d: 'M232.01,100c0,0,9.832,68.999,9.666,77s-18.334,13-18.334,13s32.5,3.999,36.334-1.667 S232.01,100,232.01,100z' }, body: { d: 'M588.167,292.167H951.5c0,0-60.833,118-180.5,118S588.167,292.167,588.167,292.167z', fill: '#EDD64F' } }; } function animateAll() { // set slight timeout so it doesn't // animate as soon as the page loads. setTimeout(function(){ for(var el in els) { animate(el, els_map[el]); } }, 1000); } function animate(elem, attrs) { els[elem].animate(attrs, ANIMATION_SPEED); }
christabor/etude
11-07-2013/animatey.js
JavaScript
apache-2.0
1,447
import logging from coursedashboards.util.retry import retry from uw_sws.registration import get_active_registrations_by_section from restclients_core.exceptions import DataFailureException from urllib3.exceptions import MaxRetryError logger = logging.getLogger(__name__) @retry(MaxRetryError, tries=5, delay=3, logger=logger) def get_active_registrations_for_section(section): try: return get_active_registrations_by_section(section) except DataFailureException as ex: if ex.status == 404: logger.info(" {}".format(ex)) else: logger.error(" {}".format(ex)) return []
uw-it-aca/course-dashboards
coursedashboards/dao/registration.py
Python
apache-2.0
633
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * @author max */ package com.intellij.psi.impl.source; import com.intellij.lang.*; import com.intellij.lang.java.JavaParserDefinition; import com.intellij.lang.java.parser.FileParser; import com.intellij.lang.java.parser.JavaParserUtil; import com.intellij.lexer.JavaLexer; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.PsiManager; import com.intellij.psi.StubBuilder; import com.intellij.psi.impl.java.stubs.PsiJavaFileStub; import com.intellij.psi.impl.java.stubs.impl.PsiJavaFileStubImpl; import com.intellij.psi.impl.source.parsing.FileTextParsing; import com.intellij.psi.impl.source.tree.FileElement; import com.intellij.psi.impl.source.tree.java.JavaFileElement; import com.intellij.psi.stubs.IndexSink; import com.intellij.psi.stubs.StubElement; import com.intellij.psi.stubs.StubInputStream; import com.intellij.psi.stubs.StubOutputStream; import com.intellij.psi.tree.ILightStubFileElementType; import com.intellij.psi.util.PsiUtil; import com.intellij.util.diff.FlyweightCapableTreeStructure; import com.intellij.util.io.StringRef; import java.io.IOException; public class JavaFileElementType extends ILightStubFileElementType<PsiJavaFileStub> { public static boolean USE_NEW_STUB_BUILDER = true & JavaParserDefinition.USE_NEW_PARSER; public static final int STUB_VERSION = JavaParserDefinition.USE_NEW_PARSER ? USE_NEW_STUB_BUILDER ? 6 : 5 : 4; public JavaFileElementType() { super("java.FILE", StdLanguages.JAVA); } @Override public StubBuilder getBuilder() { return USE_NEW_STUB_BUILDER ? new JavaLightStubBuilder() : new JavaFileStubBuilder(); } @Override public int getStubVersion() { return STUB_VERSION; } @Override public boolean shouldBuildStubFor(final VirtualFile file) { final VirtualFile dir = file.getParent(); return dir == null || dir.getUserData(LanguageLevel.KEY) != null; } @Override public ASTNode createNode(final CharSequence text) { return new JavaFileElement(text); } @Override public FlyweightCapableTreeStructure<LighterASTNode> parseContentsLight(final ASTNode chameleon) { final PsiBuilder builder = JavaParserUtil.createBuilder(chameleon); doParse(builder); return builder.getLightTree(); } @Override public ASTNode parseContents(final ASTNode chameleon) { if (JavaParserDefinition.USE_NEW_PARSER) { final PsiBuilder builder = JavaParserUtil.createBuilder(chameleon); doParse(builder); return builder.getTreeBuilt().getFirstChildNode(); } FileElement node = (FileElement)chameleon; final CharSequence seq = node.getChars(); final PsiManager manager = node.getManager(); final JavaLexer lexer = new JavaLexer(PsiUtil.getLanguageLevel(node.getPsi())); return FileTextParsing.parseFileText(manager, lexer, seq, 0, seq.length(), node.getCharTable()); } private void doParse(final PsiBuilder builder) { final PsiBuilder.Marker root = builder.mark(); FileParser.parse(builder); root.done(this); } @Override public String getExternalId() { return "java.FILE"; } @Override public void serialize(final PsiJavaFileStub stub, final StubOutputStream dataStream) throws IOException { dataStream.writeBoolean(stub.isCompiled()); dataStream.writeName(stub.getPackageName()); } @Override public PsiJavaFileStub deserialize(final StubInputStream dataStream, final StubElement parentStub) throws IOException { boolean compiled = dataStream.readBoolean(); StringRef packName = dataStream.readName(); return new PsiJavaFileStubImpl(null, packName, compiled); } @Override public void indexStub(final PsiJavaFileStub stub, final IndexSink sink) { } }
joewalnes/idea-community
java/java-impl/src/com/intellij/psi/impl/source/JavaFileElementType.java
Java
apache-2.0
4,357
# # Cookbook Name:: nginx # Recipes:: devel # # Author:: Arthur Freyman (<afreyman@riotgames.com>) # # Copyright 2013, Riot Games # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # devel_src_filename = ::File.basename(node['nginx']['devel']['url']) devel_src_filepath = "#{Chef::Config['file_cache_path']}/#{devel_src_filename}" devel_extract_path = "#{Chef::Config['file_cache_path']}/nginx-devel-#{node['nginx']['devel']['version']}" remote_file devel_src_filepath do source node['nginx']['devel']['url'] checksum node['nginx']['devel']['checksum'] owner 'root' group node['root_group'] mode '0644' end bash 'extract_devel_module' do cwd ::File.dirname(devel_src_filepath) code <<-EOH mkdir -p #{devel_extract_path} tar xzf #{devel_src_filename} --no-same-owner -C #{devel_extract_path} EOH not_if { ::File.exist?(devel_extract_path) } end node.run_state['nginx_configure_flags'] = node.run_state['nginx_configure_flags'] | ["--add-module=#{devel_extract_path}/ngx_devel_kit-#{node['nginx']['devel']['version']}"]
ffuenf/nginx
recipes/ngx_devel_module.rb
Ruby
apache-2.0
1,558