code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE23_Relative_Path_Traversal__char_file_ifstream_08.cpp Label Definition File: CWE23_Relative_Path_Traversal.label.xml Template File: sources-sink-08.tmpl.cpp */ /* * @description * CWE: 23 Relative Path Traversal * BadSource: file Read input from a file * GoodSource: Use a fixed file name * Sink: ifstream * BadSink : Open the file named in data using ifstream::open() * Flow Variant: 08 Control flow: if(staticReturnsTrue()) and if(staticReturnsFalse()) * * */ #include "std_testcase.h" #ifdef _WIN32 #define BASEPATH "c:\\temp\\" #else #include <wchar.h> #define BASEPATH "/tmp/" #endif #ifdef _WIN32 #define FILENAME "C:\\temp\\file.txt" #else #define FILENAME "/tmp/file.txt" #endif #include <fstream> using namespace std; /* The two function below always return the same value, so a tool should be able to identify that calls to the functions will always return a fixed value. */ static int staticReturnsTrue() { return 1; } static int staticReturnsFalse() { return 0; } namespace CWE23_Relative_Path_Traversal__char_file_ifstream_08 { #ifndef OMITBAD void bad() { char * data; char dataBuffer[FILENAME_MAX] = BASEPATH; data = dataBuffer; if(staticReturnsTrue()) { { /* Read input from a file */ size_t dataLen = strlen(data); FILE * pFile; /* if there is room in data, attempt to read the input from a file */ if (FILENAME_MAX-dataLen > 1) { pFile = fopen(FILENAME, "r"); if (pFile != NULL) { /* POTENTIAL FLAW: Read data from a file */ if (fgets(data+dataLen, (int)(FILENAME_MAX-dataLen), pFile) == NULL) { printLine("fgets() failed"); /* Restore NUL terminator if fgets fails */ data[dataLen] = '\0'; } fclose(pFile); } } } } { ifstream inputFile; /* POTENTIAL FLAW: Possibly opening a file without validating the file name or path */ inputFile.open((char *)data); inputFile.close(); } } #endif /* OMITBAD */ #ifndef OMITGOOD /* goodG2B1() - use goodsource and badsink by changing the staticReturnsTrue() to staticReturnsFalse() */ static void goodG2B1() { char * data; char dataBuffer[FILENAME_MAX] = BASEPATH; data = dataBuffer; if(staticReturnsFalse()) { /* INCIDENTAL: CWE 561 Dead Code, the code below will never run */ printLine("Benign, fixed string"); } else { /* FIX: Use a fixed file name */ strcat(data, "file.txt"); } { ifstream inputFile; /* POTENTIAL FLAW: Possibly opening a file without validating the file name or path */ inputFile.open((char *)data); inputFile.close(); } } /* goodG2B2() - use goodsource and badsink by reversing the blocks in the if statement */ static void goodG2B2() { char * data; char dataBuffer[FILENAME_MAX] = BASEPATH; data = dataBuffer; if(staticReturnsTrue()) { /* FIX: Use a fixed file name */ strcat(data, "file.txt"); } { ifstream inputFile; /* POTENTIAL FLAW: Possibly opening a file without validating the file name or path */ inputFile.open((char *)data); inputFile.close(); } } void good() { goodG2B1(); goodG2B2(); } #endif /* OMITGOOD */ } /* close namespace */ /* Below is the main(). It is only used when building this testcase on its own for testing or for building a binary to use in testing binary analysis tools. It is not used when compiling all the testcases as one application, which is how source code analysis tools are tested. */ #ifdef INCLUDEMAIN using namespace CWE23_Relative_Path_Traversal__char_file_ifstream_08; /* so that we can use good and bad easily */ int main(int argc, char * argv[]) { /* seed randomness */ srand( (unsigned)time(NULL) ); #ifndef OMITGOOD printLine("Calling good()..."); good(); printLine("Finished good()"); #endif /* OMITGOOD */ #ifndef OMITBAD printLine("Calling bad()..."); bad(); printLine("Finished bad()"); #endif /* OMITBAD */ return 0; } #endif
JianpingZeng/xcc
xcc/test/juliet/testcases/CWE23_Relative_Path_Traversal/s02/CWE23_Relative_Path_Traversal__char_file_ifstream_08.cpp
C++
bsd-3-clause
4,569
package org.hisp.dhis.sms.listener; /* * Copyright (c) 2004-2018, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import java.util.*; import org.apache.commons.lang3.StringUtils; import org.hisp.dhis.organisationunit.OrganisationUnit; import org.hisp.dhis.program.Program; import org.hisp.dhis.program.ProgramInstanceService; import org.hisp.dhis.sms.command.SMSCommand; import org.hisp.dhis.sms.command.SMSCommandService; import org.hisp.dhis.sms.command.code.SMSCode; import org.hisp.dhis.sms.incoming.IncomingSms; import org.hisp.dhis.sms.incoming.SmsMessageStatus; import org.hisp.dhis.sms.parse.ParserType; import org.hisp.dhis.sms.parse.SMSParserException; import org.hisp.dhis.system.util.SmsUtils; import org.hisp.dhis.trackedentity.TrackedEntityAttribute; import org.hisp.dhis.trackedentity.TrackedEntityInstance; import org.hisp.dhis.trackedentity.TrackedEntityInstanceService; import org.hisp.dhis.trackedentity.TrackedEntityTypeService; import org.hisp.dhis.trackedentityattributevalue.TrackedEntityAttributeValue; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Transactional; @Transactional public class TrackedEntityRegistrationSMSListener extends BaseSMSListener { private static final String SUCCESS_MESSAGE = "Tracked Entity Registered Successfully with uid. "; // ------------------------------------------------------------------------- // Dependencies // ------------------------------------------------------------------------- @Autowired private SMSCommandService smsCommandService; @Autowired private TrackedEntityTypeService trackedEntityTypeService; @Autowired private TrackedEntityInstanceService trackedEntityInstanceService; @Autowired private ProgramInstanceService programInstanceService; // ------------------------------------------------------------------------- // IncomingSmsListener implementation // ------------------------------------------------------------------------- @Override protected void postProcess( IncomingSms sms, SMSCommand smsCommand, Map<String, String> parsedMessage ) { String message = sms.getText(); Date date = SmsUtils.lookForDate( message ); String senderPhoneNumber = StringUtils.replace( sms.getOriginator(), "+", "" ); Collection<OrganisationUnit> orgUnits = getOrganisationUnits( sms ); Program program = smsCommand.getProgram(); OrganisationUnit orgUnit = SmsUtils.selectOrganisationUnit( orgUnits, parsedMessage, smsCommand ); if ( !program.hasOrganisationUnit( orgUnit ) ) { sendFeedback( SMSCommand.NO_OU_FOR_PROGRAM, senderPhoneNumber, WARNING ); throw new SMSParserException( SMSCommand.NO_OU_FOR_PROGRAM ); } TrackedEntityInstance trackedEntityInstance = new TrackedEntityInstance(); trackedEntityInstance.setOrganisationUnit( orgUnit ); trackedEntityInstance.setTrackedEntityType( trackedEntityTypeService.getTrackedEntityByName( smsCommand.getProgram().getTrackedEntityType().getName() ) ); Set<TrackedEntityAttributeValue> patientAttributeValues = new HashSet<>(); smsCommand.getCodes().stream() .filter( code -> parsedMessage.containsKey( code.getCode() ) ) .forEach( code -> { TrackedEntityAttributeValue trackedEntityAttributeValue = this.createTrackedEntityAttributeValue( parsedMessage, code, trackedEntityInstance) ; patientAttributeValues.add( trackedEntityAttributeValue ); }); int trackedEntityInstanceId = 0; if ( patientAttributeValues.size() > 0 ) { trackedEntityInstanceId = trackedEntityInstanceService.createTrackedEntityInstance( trackedEntityInstance, null, null, patientAttributeValues ); } else { sendFeedback( "No TrackedEntityAttribute found", senderPhoneNumber, WARNING ); } TrackedEntityInstance tei = trackedEntityInstanceService.getTrackedEntityInstance( trackedEntityInstanceId ); programInstanceService.enrollTrackedEntityInstance( tei, smsCommand.getProgram(), new Date(), date, orgUnit ); sendFeedback( StringUtils.defaultIfBlank( smsCommand.getSuccessMessage(), SUCCESS_MESSAGE + tei.getUid() ), senderPhoneNumber, INFO ); update( sms, SmsMessageStatus.PROCESSED, true ); } @Override protected SMSCommand getSMSCommand( IncomingSms sms ) { return smsCommandService.getSMSCommand( SmsUtils.getCommandString( sms ), ParserType.TRACKED_ENTITY_REGISTRATION_PARSER ); } private TrackedEntityAttributeValue createTrackedEntityAttributeValue( Map<String, String> parsedMessage, SMSCode code, TrackedEntityInstance trackedEntityInstance ) { String value = parsedMessage.get( code.getCode() ); TrackedEntityAttribute trackedEntityAttribute = code.getTrackedEntityAttribute(); TrackedEntityAttributeValue trackedEntityAttributeValue = new TrackedEntityAttributeValue(); trackedEntityAttributeValue.setAttribute( trackedEntityAttribute ); trackedEntityAttributeValue.setEntityInstance( trackedEntityInstance ); trackedEntityAttributeValue.setValue( value ); return trackedEntityAttributeValue; } }
msf-oca-his/dhis-core
dhis-2/dhis-services/dhis-service-core/src/main/java/org/hisp/dhis/sms/listener/TrackedEntityRegistrationSMSListener.java
Java
bsd-3-clause
7,054
//===--- CodeGenAction.cpp - LLVM Code Generation Frontend Action ---------===// // // The LLVM Compiler Infrastructure // // This file is distributed under the University of Illinois Open Source // License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// #include "clang/CodeGen/CodeGenAction.h" #include "CodeGenModule.h" #include "CoverageMappingGen.h" #include "MacroPPCallbacks.h" #include "clang/AST/ASTConsumer.h" #include "clang/AST/ASTContext.h" #include "clang/AST/DeclCXX.h" #include "clang/AST/DeclGroup.h" #include "clang/Basic/FileManager.h" #include "clang/Basic/SourceManager.h" #include "clang/Basic/TargetInfo.h" #include "clang/CodeGen/BackendUtil.h" #include "clang/CodeGen/ModuleBuilder.h" #include "clang/Frontend/CompilerInstance.h" #include "clang/Frontend/FrontendDiagnostic.h" #include "clang/Lex/Preprocessor.h" #include "llvm/Bitcode/BitcodeReader.h" #include "llvm/CodeGen/MachineOptimizationRemarkEmitter.h" #include "llvm/IR/DebugInfo.h" #include "llvm/IR/DiagnosticInfo.h" #include "llvm/IR/DiagnosticPrinter.h" #include "llvm/IR/GlobalValue.h" #include "llvm/IR/LLVMContext.h" #include "llvm/IR/Module.h" #include "llvm/IRReader/IRReader.h" #include "llvm/Linker/Linker.h" #include "llvm/Pass.h" #include "llvm/Support/MemoryBuffer.h" #include "llvm/Support/SourceMgr.h" #include "llvm/Support/Timer.h" #include "llvm/Support/ToolOutputFile.h" #include "llvm/Support/YAMLTraits.h" #include "llvm/Transforms/IPO/Internalize.h" #include <memory> using namespace clang; using namespace llvm; namespace clang { class BackendConsumer; class ClangDiagnosticHandler final : public DiagnosticHandler { public: ClangDiagnosticHandler(const CodeGenOptions &CGOpts, BackendConsumer *BCon) : CodeGenOpts(CGOpts), BackendCon(BCon) {} bool handleDiagnostics(const DiagnosticInfo &DI) override; bool isAnalysisRemarkEnabled(StringRef PassName) const override { return (CodeGenOpts.OptimizationRemarkAnalysisPattern && CodeGenOpts.OptimizationRemarkAnalysisPattern->match(PassName)); } bool isMissedOptRemarkEnabled(StringRef PassName) const override { return (CodeGenOpts.OptimizationRemarkMissedPattern && CodeGenOpts.OptimizationRemarkMissedPattern->match(PassName)); } bool isPassedOptRemarkEnabled(StringRef PassName) const override { return (CodeGenOpts.OptimizationRemarkPattern && CodeGenOpts.OptimizationRemarkPattern->match(PassName)); } bool isAnyRemarkEnabled() const override { return (CodeGenOpts.OptimizationRemarkAnalysisPattern || CodeGenOpts.OptimizationRemarkMissedPattern || CodeGenOpts.OptimizationRemarkPattern); } private: const CodeGenOptions &CodeGenOpts; BackendConsumer *BackendCon; }; class BackendConsumer : public ASTConsumer { using LinkModule = CodeGenAction::LinkModule; virtual void anchor(); DiagnosticsEngine &Diags; BackendAction Action; const HeaderSearchOptions &HeaderSearchOpts; const CodeGenOptions &CodeGenOpts; const TargetOptions &TargetOpts; const LangOptions &LangOpts; std::unique_ptr<raw_pwrite_stream> AsmOutStream; ASTContext *Context; Timer LLVMIRGeneration; unsigned LLVMIRGenerationRefCount; /// True if we've finished generating IR. This prevents us from generating /// additional LLVM IR after emitting output in HandleTranslationUnit. This /// can happen when Clang plugins trigger additional AST deserialization. bool IRGenFinished = false; std::unique_ptr<CodeGenerator> Gen; SmallVector<LinkModule, 4> LinkModules; // This is here so that the diagnostic printer knows the module a diagnostic // refers to. llvm::Module *CurLinkModule = nullptr; public: BackendConsumer(BackendAction Action, DiagnosticsEngine &Diags, const HeaderSearchOptions &HeaderSearchOpts, const PreprocessorOptions &PPOpts, const CodeGenOptions &CodeGenOpts, const TargetOptions &TargetOpts, const LangOptions &LangOpts, bool TimePasses, const std::string &InFile, SmallVector<LinkModule, 4> LinkModules, std::unique_ptr<raw_pwrite_stream> OS, LLVMContext &C, CoverageSourceInfo *CoverageInfo = nullptr) : Diags(Diags), Action(Action), HeaderSearchOpts(HeaderSearchOpts), CodeGenOpts(CodeGenOpts), TargetOpts(TargetOpts), LangOpts(LangOpts), AsmOutStream(std::move(OS)), Context(nullptr), LLVMIRGeneration("irgen", "LLVM IR Generation Time"), LLVMIRGenerationRefCount(0), Gen(CreateLLVMCodeGen(Diags, InFile, HeaderSearchOpts, PPOpts, CodeGenOpts, C, CoverageInfo)), LinkModules(std::move(LinkModules)) { FrontendTimesIsEnabled = TimePasses; llvm::TimePassesIsEnabled = TimePasses; } llvm::Module *getModule() const { return Gen->GetModule(); } std::unique_ptr<llvm::Module> takeModule() { return std::unique_ptr<llvm::Module>(Gen->ReleaseModule()); } CodeGenerator *getCodeGenerator() { return Gen.get(); } void HandleCXXStaticMemberVarInstantiation(VarDecl *VD) override { Gen->HandleCXXStaticMemberVarInstantiation(VD); } void Initialize(ASTContext &Ctx) override { assert(!Context && "initialized multiple times"); Context = &Ctx; if (FrontendTimesIsEnabled) LLVMIRGeneration.startTimer(); Gen->Initialize(Ctx); if (FrontendTimesIsEnabled) LLVMIRGeneration.stopTimer(); } bool HandleTopLevelDecl(DeclGroupRef D) override { PrettyStackTraceDecl CrashInfo(*D.begin(), SourceLocation(), Context->getSourceManager(), "LLVM IR generation of declaration"); // Recurse. if (FrontendTimesIsEnabled) { LLVMIRGenerationRefCount += 1; if (LLVMIRGenerationRefCount == 1) LLVMIRGeneration.startTimer(); } Gen->HandleTopLevelDecl(D); if (FrontendTimesIsEnabled) { LLVMIRGenerationRefCount -= 1; if (LLVMIRGenerationRefCount == 0) LLVMIRGeneration.stopTimer(); } return true; } void HandleInlineFunctionDefinition(FunctionDecl *D) override { PrettyStackTraceDecl CrashInfo(D, SourceLocation(), Context->getSourceManager(), "LLVM IR generation of inline function"); if (FrontendTimesIsEnabled) LLVMIRGeneration.startTimer(); Gen->HandleInlineFunctionDefinition(D); if (FrontendTimesIsEnabled) LLVMIRGeneration.stopTimer(); } void HandleInterestingDecl(DeclGroupRef D) override { // Ignore interesting decls from the AST reader after IRGen is finished. if (!IRGenFinished) HandleTopLevelDecl(D); } // Links each entry in LinkModules into our module. Returns true on error. bool LinkInModules() { for (auto &LM : LinkModules) { if (LM.PropagateAttrs) for (Function &F : *LM.Module) Gen->CGM().AddDefaultFnAttrs(F); CurLinkModule = LM.Module.get(); bool Err; if (LM.Internalize) { Err = Linker::linkModules( *getModule(), std::move(LM.Module), LM.LinkFlags, [](llvm::Module &M, const llvm::StringSet<> &GVS) { internalizeModule(M, [&GVS](const llvm::GlobalValue &GV) { return !GV.hasName() || (GVS.count(GV.getName()) == 0); }); }); } else { Err = Linker::linkModules(*getModule(), std::move(LM.Module), LM.LinkFlags); } if (Err) return true; } return false; // success } void HandleTranslationUnit(ASTContext &C) override { { PrettyStackTraceString CrashInfo("Per-file LLVM IR generation"); if (FrontendTimesIsEnabled) { LLVMIRGenerationRefCount += 1; if (LLVMIRGenerationRefCount == 1) LLVMIRGeneration.startTimer(); } Gen->HandleTranslationUnit(C); if (FrontendTimesIsEnabled) { LLVMIRGenerationRefCount -= 1; if (LLVMIRGenerationRefCount == 0) LLVMIRGeneration.stopTimer(); } IRGenFinished = true; } // Silently ignore if we weren't initialized for some reason. if (!getModule()) return; // Install an inline asm handler so that diagnostics get printed through // our diagnostics hooks. LLVMContext &Ctx = getModule()->getContext(); LLVMContext::InlineAsmDiagHandlerTy OldHandler = Ctx.getInlineAsmDiagnosticHandler(); void *OldContext = Ctx.getInlineAsmDiagnosticContext(); Ctx.setInlineAsmDiagnosticHandler(InlineAsmDiagHandler, this); std::unique_ptr<DiagnosticHandler> OldDiagnosticHandler = Ctx.getDiagnosticHandler(); Ctx.setDiagnosticHandler(llvm::make_unique<ClangDiagnosticHandler>( CodeGenOpts, this)); Ctx.setDiagnosticsHotnessRequested(CodeGenOpts.DiagnosticsWithHotness); if (CodeGenOpts.DiagnosticsHotnessThreshold != 0) Ctx.setDiagnosticsHotnessThreshold( CodeGenOpts.DiagnosticsHotnessThreshold); std::unique_ptr<llvm::ToolOutputFile> OptRecordFile; if (!CodeGenOpts.OptRecordFile.empty()) { std::error_code EC; OptRecordFile = llvm::make_unique<llvm::ToolOutputFile>( CodeGenOpts.OptRecordFile, EC, sys::fs::F_None); if (EC) { Diags.Report(diag::err_cannot_open_file) << CodeGenOpts.OptRecordFile << EC.message(); return; } Ctx.setDiagnosticsOutputFile( llvm::make_unique<yaml::Output>(OptRecordFile->os())); if (CodeGenOpts.getProfileUse() != CodeGenOptions::ProfileNone) Ctx.setDiagnosticsHotnessRequested(true); } // Link each LinkModule into our module. if (LinkInModules()) return; EmbedBitcode(getModule(), CodeGenOpts, llvm::MemoryBufferRef()); EmitBackendOutput(Diags, HeaderSearchOpts, CodeGenOpts, TargetOpts, LangOpts, C.getTargetInfo().getDataLayout(), getModule(), Action, std::move(AsmOutStream)); Ctx.setInlineAsmDiagnosticHandler(OldHandler, OldContext); Ctx.setDiagnosticHandler(std::move(OldDiagnosticHandler)); if (OptRecordFile) OptRecordFile->keep(); } void HandleTagDeclDefinition(TagDecl *D) override { PrettyStackTraceDecl CrashInfo(D, SourceLocation(), Context->getSourceManager(), "LLVM IR generation of declaration"); Gen->HandleTagDeclDefinition(D); } void HandleTagDeclRequiredDefinition(const TagDecl *D) override { Gen->HandleTagDeclRequiredDefinition(D); } void CompleteTentativeDefinition(VarDecl *D) override { Gen->CompleteTentativeDefinition(D); } void AssignInheritanceModel(CXXRecordDecl *RD) override { Gen->AssignInheritanceModel(RD); } void HandleVTable(CXXRecordDecl *RD) override { Gen->HandleVTable(RD); } static void InlineAsmDiagHandler(const llvm::SMDiagnostic &SM,void *Context, unsigned LocCookie) { SourceLocation Loc = SourceLocation::getFromRawEncoding(LocCookie); ((BackendConsumer*)Context)->InlineAsmDiagHandler2(SM, Loc); } /// Get the best possible source location to represent a diagnostic that /// may have associated debug info. const FullSourceLoc getBestLocationFromDebugLoc(const llvm::DiagnosticInfoWithLocationBase &D, bool &BadDebugInfo, StringRef &Filename, unsigned &Line, unsigned &Column) const; void InlineAsmDiagHandler2(const llvm::SMDiagnostic &, SourceLocation LocCookie); void DiagnosticHandlerImpl(const llvm::DiagnosticInfo &DI); /// Specialized handler for InlineAsm diagnostic. /// \return True if the diagnostic has been successfully reported, false /// otherwise. bool InlineAsmDiagHandler(const llvm::DiagnosticInfoInlineAsm &D); /// Specialized handler for StackSize diagnostic. /// \return True if the diagnostic has been successfully reported, false /// otherwise. bool StackSizeDiagHandler(const llvm::DiagnosticInfoStackSize &D); /// Specialized handler for unsupported backend feature diagnostic. void UnsupportedDiagHandler(const llvm::DiagnosticInfoUnsupported &D); /// Specialized handlers for optimization remarks. /// Note that these handlers only accept remarks and they always handle /// them. void EmitOptimizationMessage(const llvm::DiagnosticInfoOptimizationBase &D, unsigned DiagID); void OptimizationRemarkHandler(const llvm::DiagnosticInfoOptimizationBase &D); void OptimizationRemarkHandler( const llvm::OptimizationRemarkAnalysisFPCommute &D); void OptimizationRemarkHandler( const llvm::OptimizationRemarkAnalysisAliasing &D); void OptimizationFailureHandler( const llvm::DiagnosticInfoOptimizationFailure &D); }; void BackendConsumer::anchor() {} } bool ClangDiagnosticHandler::handleDiagnostics(const DiagnosticInfo &DI) { BackendCon->DiagnosticHandlerImpl(DI); return true; } /// ConvertBackendLocation - Convert a location in a temporary llvm::SourceMgr /// buffer to be a valid FullSourceLoc. static FullSourceLoc ConvertBackendLocation(const llvm::SMDiagnostic &D, SourceManager &CSM) { // Get both the clang and llvm source managers. The location is relative to // a memory buffer that the LLVM Source Manager is handling, we need to add // a copy to the Clang source manager. const llvm::SourceMgr &LSM = *D.getSourceMgr(); // We need to copy the underlying LLVM memory buffer because llvm::SourceMgr // already owns its one and clang::SourceManager wants to own its one. const MemoryBuffer *LBuf = LSM.getMemoryBuffer(LSM.FindBufferContainingLoc(D.getLoc())); // Create the copy and transfer ownership to clang::SourceManager. // TODO: Avoid copying files into memory. std::unique_ptr<llvm::MemoryBuffer> CBuf = llvm::MemoryBuffer::getMemBufferCopy(LBuf->getBuffer(), LBuf->getBufferIdentifier()); // FIXME: Keep a file ID map instead of creating new IDs for each location. FileID FID = CSM.createFileID(std::move(CBuf)); // Translate the offset into the file. unsigned Offset = D.getLoc().getPointer() - LBuf->getBufferStart(); SourceLocation NewLoc = CSM.getLocForStartOfFile(FID).getLocWithOffset(Offset); return FullSourceLoc(NewLoc, CSM); } /// InlineAsmDiagHandler2 - This function is invoked when the backend hits an /// error parsing inline asm. The SMDiagnostic indicates the error relative to /// the temporary memory buffer that the inline asm parser has set up. void BackendConsumer::InlineAsmDiagHandler2(const llvm::SMDiagnostic &D, SourceLocation LocCookie) { // There are a couple of different kinds of errors we could get here. First, // we re-format the SMDiagnostic in terms of a clang diagnostic. // Strip "error: " off the start of the message string. StringRef Message = D.getMessage(); if (Message.startswith("error: ")) Message = Message.substr(7); // If the SMDiagnostic has an inline asm source location, translate it. FullSourceLoc Loc; if (D.getLoc() != SMLoc()) Loc = ConvertBackendLocation(D, Context->getSourceManager()); unsigned DiagID; switch (D.getKind()) { case llvm::SourceMgr::DK_Error: DiagID = diag::err_fe_inline_asm; break; case llvm::SourceMgr::DK_Warning: DiagID = diag::warn_fe_inline_asm; break; case llvm::SourceMgr::DK_Note: DiagID = diag::note_fe_inline_asm; break; case llvm::SourceMgr::DK_Remark: llvm_unreachable("remarks unexpected"); } // If this problem has clang-level source location information, report the // issue in the source with a note showing the instantiated // code. if (LocCookie.isValid()) { Diags.Report(LocCookie, DiagID).AddString(Message); if (D.getLoc().isValid()) { DiagnosticBuilder B = Diags.Report(Loc, diag::note_fe_inline_asm_here); // Convert the SMDiagnostic ranges into SourceRange and attach them // to the diagnostic. for (const std::pair<unsigned, unsigned> &Range : D.getRanges()) { unsigned Column = D.getColumnNo(); B << SourceRange(Loc.getLocWithOffset(Range.first - Column), Loc.getLocWithOffset(Range.second - Column)); } } return; } // Otherwise, report the backend issue as occurring in the generated .s file. // If Loc is invalid, we still need to report the issue, it just gets no // location info. Diags.Report(Loc, DiagID).AddString(Message); } #define ComputeDiagID(Severity, GroupName, DiagID) \ do { \ switch (Severity) { \ case llvm::DS_Error: \ DiagID = diag::err_fe_##GroupName; \ break; \ case llvm::DS_Warning: \ DiagID = diag::warn_fe_##GroupName; \ break; \ case llvm::DS_Remark: \ llvm_unreachable("'remark' severity not expected"); \ break; \ case llvm::DS_Note: \ DiagID = diag::note_fe_##GroupName; \ break; \ } \ } while (false) #define ComputeDiagRemarkID(Severity, GroupName, DiagID) \ do { \ switch (Severity) { \ case llvm::DS_Error: \ DiagID = diag::err_fe_##GroupName; \ break; \ case llvm::DS_Warning: \ DiagID = diag::warn_fe_##GroupName; \ break; \ case llvm::DS_Remark: \ DiagID = diag::remark_fe_##GroupName; \ break; \ case llvm::DS_Note: \ DiagID = diag::note_fe_##GroupName; \ break; \ } \ } while (false) bool BackendConsumer::InlineAsmDiagHandler(const llvm::DiagnosticInfoInlineAsm &D) { unsigned DiagID; ComputeDiagID(D.getSeverity(), inline_asm, DiagID); std::string Message = D.getMsgStr().str(); // If this problem has clang-level source location information, report the // issue as being a problem in the source with a note showing the instantiated // code. SourceLocation LocCookie = SourceLocation::getFromRawEncoding(D.getLocCookie()); if (LocCookie.isValid()) Diags.Report(LocCookie, DiagID).AddString(Message); else { // Otherwise, report the backend diagnostic as occurring in the generated // .s file. // If Loc is invalid, we still need to report the diagnostic, it just gets // no location info. FullSourceLoc Loc; Diags.Report(Loc, DiagID).AddString(Message); } // We handled all the possible severities. return true; } bool BackendConsumer::StackSizeDiagHandler(const llvm::DiagnosticInfoStackSize &D) { if (D.getSeverity() != llvm::DS_Warning) // For now, the only support we have for StackSize diagnostic is warning. // We do not know how to format other severities. return false; if (const Decl *ND = Gen->GetDeclForMangledName(D.getFunction().getName())) { // FIXME: Shouldn't need to truncate to uint32_t Diags.Report(ND->getASTContext().getFullLoc(ND->getLocation()), diag::warn_fe_frame_larger_than) << static_cast<uint32_t>(D.getStackSize()) << Decl::castToDeclContext(ND); return true; } return false; } const FullSourceLoc BackendConsumer::getBestLocationFromDebugLoc( const llvm::DiagnosticInfoWithLocationBase &D, bool &BadDebugInfo, StringRef &Filename, unsigned &Line, unsigned &Column) const { SourceManager &SourceMgr = Context->getSourceManager(); FileManager &FileMgr = SourceMgr.getFileManager(); SourceLocation DILoc; if (D.isLocationAvailable()) { D.getLocation(&Filename, &Line, &Column); const FileEntry *FE = FileMgr.getFile(Filename); if (FE && Line > 0) { // If -gcolumn-info was not used, Column will be 0. This upsets the // source manager, so pass 1 if Column is not set. DILoc = SourceMgr.translateFileLineCol(FE, Line, Column ? Column : 1); } BadDebugInfo = DILoc.isInvalid(); } // If a location isn't available, try to approximate it using the associated // function definition. We use the definition's right brace to differentiate // from diagnostics that genuinely relate to the function itself. FullSourceLoc Loc(DILoc, SourceMgr); if (Loc.isInvalid()) if (const Decl *FD = Gen->GetDeclForMangledName(D.getFunction().getName())) Loc = FD->getASTContext().getFullLoc(FD->getLocation()); if (DILoc.isInvalid() && D.isLocationAvailable()) // If we were not able to translate the file:line:col information // back to a SourceLocation, at least emit a note stating that // we could not translate this location. This can happen in the // case of #line directives. Diags.Report(Loc, diag::note_fe_backend_invalid_loc) << Filename << Line << Column; return Loc; } void BackendConsumer::UnsupportedDiagHandler( const llvm::DiagnosticInfoUnsupported &D) { // We only support errors. assert(D.getSeverity() == llvm::DS_Error); StringRef Filename; unsigned Line, Column; bool BadDebugInfo = false; FullSourceLoc Loc = getBestLocationFromDebugLoc(D, BadDebugInfo, Filename, Line, Column); Diags.Report(Loc, diag::err_fe_backend_unsupported) << D.getMessage().str(); if (BadDebugInfo) // If we were not able to translate the file:line:col information // back to a SourceLocation, at least emit a note stating that // we could not translate this location. This can happen in the // case of #line directives. Diags.Report(Loc, diag::note_fe_backend_invalid_loc) << Filename << Line << Column; } void BackendConsumer::EmitOptimizationMessage( const llvm::DiagnosticInfoOptimizationBase &D, unsigned DiagID) { // We only support warnings and remarks. assert(D.getSeverity() == llvm::DS_Remark || D.getSeverity() == llvm::DS_Warning); StringRef Filename; unsigned Line, Column; bool BadDebugInfo = false; FullSourceLoc Loc = getBestLocationFromDebugLoc(D, BadDebugInfo, Filename, Line, Column); std::string Msg; raw_string_ostream MsgStream(Msg); MsgStream << D.getMsg(); if (D.getHotness()) MsgStream << " (hotness: " << *D.getHotness() << ")"; Diags.Report(Loc, DiagID) << AddFlagValue(D.getPassName()) << MsgStream.str(); if (BadDebugInfo) // If we were not able to translate the file:line:col information // back to a SourceLocation, at least emit a note stating that // we could not translate this location. This can happen in the // case of #line directives. Diags.Report(Loc, diag::note_fe_backend_invalid_loc) << Filename << Line << Column; } void BackendConsumer::OptimizationRemarkHandler( const llvm::DiagnosticInfoOptimizationBase &D) { // Without hotness information, don't show noisy remarks. if (D.isVerbose() && !D.getHotness()) return; if (D.isPassed()) { // Optimization remarks are active only if the -Rpass flag has a regular // expression that matches the name of the pass name in \p D. if (CodeGenOpts.OptimizationRemarkPattern && CodeGenOpts.OptimizationRemarkPattern->match(D.getPassName())) EmitOptimizationMessage(D, diag::remark_fe_backend_optimization_remark); } else if (D.isMissed()) { // Missed optimization remarks are active only if the -Rpass-missed // flag has a regular expression that matches the name of the pass // name in \p D. if (CodeGenOpts.OptimizationRemarkMissedPattern && CodeGenOpts.OptimizationRemarkMissedPattern->match(D.getPassName())) EmitOptimizationMessage( D, diag::remark_fe_backend_optimization_remark_missed); } else { assert(D.isAnalysis() && "Unknown remark type"); bool ShouldAlwaysPrint = false; if (auto *ORA = dyn_cast<llvm::OptimizationRemarkAnalysis>(&D)) ShouldAlwaysPrint = ORA->shouldAlwaysPrint(); if (ShouldAlwaysPrint || (CodeGenOpts.OptimizationRemarkAnalysisPattern && CodeGenOpts.OptimizationRemarkAnalysisPattern->match(D.getPassName()))) EmitOptimizationMessage( D, diag::remark_fe_backend_optimization_remark_analysis); } } void BackendConsumer::OptimizationRemarkHandler( const llvm::OptimizationRemarkAnalysisFPCommute &D) { // Optimization analysis remarks are active if the pass name is set to // llvm::DiagnosticInfo::AlwasyPrint or if the -Rpass-analysis flag has a // regular expression that matches the name of the pass name in \p D. if (D.shouldAlwaysPrint() || (CodeGenOpts.OptimizationRemarkAnalysisPattern && CodeGenOpts.OptimizationRemarkAnalysisPattern->match(D.getPassName()))) EmitOptimizationMessage( D, diag::remark_fe_backend_optimization_remark_analysis_fpcommute); } void BackendConsumer::OptimizationRemarkHandler( const llvm::OptimizationRemarkAnalysisAliasing &D) { // Optimization analysis remarks are active if the pass name is set to // llvm::DiagnosticInfo::AlwasyPrint or if the -Rpass-analysis flag has a // regular expression that matches the name of the pass name in \p D. if (D.shouldAlwaysPrint() || (CodeGenOpts.OptimizationRemarkAnalysisPattern && CodeGenOpts.OptimizationRemarkAnalysisPattern->match(D.getPassName()))) EmitOptimizationMessage( D, diag::remark_fe_backend_optimization_remark_analysis_aliasing); } void BackendConsumer::OptimizationFailureHandler( const llvm::DiagnosticInfoOptimizationFailure &D) { EmitOptimizationMessage(D, diag::warn_fe_backend_optimization_failure); } /// This function is invoked when the backend needs /// to report something to the user. void BackendConsumer::DiagnosticHandlerImpl(const DiagnosticInfo &DI) { unsigned DiagID = diag::err_fe_inline_asm; llvm::DiagnosticSeverity Severity = DI.getSeverity(); // Get the diagnostic ID based. switch (DI.getKind()) { case llvm::DK_InlineAsm: if (InlineAsmDiagHandler(cast<DiagnosticInfoInlineAsm>(DI))) return; ComputeDiagID(Severity, inline_asm, DiagID); break; case llvm::DK_StackSize: if (StackSizeDiagHandler(cast<DiagnosticInfoStackSize>(DI))) return; ComputeDiagID(Severity, backend_frame_larger_than, DiagID); break; case DK_Linker: assert(CurLinkModule); // FIXME: stop eating the warnings and notes. if (Severity != DS_Error) return; DiagID = diag::err_fe_cannot_link_module; break; case llvm::DK_OptimizationRemark: // Optimization remarks are always handled completely by this // handler. There is no generic way of emitting them. OptimizationRemarkHandler(cast<OptimizationRemark>(DI)); return; case llvm::DK_OptimizationRemarkMissed: // Optimization remarks are always handled completely by this // handler. There is no generic way of emitting them. OptimizationRemarkHandler(cast<OptimizationRemarkMissed>(DI)); return; case llvm::DK_OptimizationRemarkAnalysis: // Optimization remarks are always handled completely by this // handler. There is no generic way of emitting them. OptimizationRemarkHandler(cast<OptimizationRemarkAnalysis>(DI)); return; case llvm::DK_OptimizationRemarkAnalysisFPCommute: // Optimization remarks are always handled completely by this // handler. There is no generic way of emitting them. OptimizationRemarkHandler(cast<OptimizationRemarkAnalysisFPCommute>(DI)); return; case llvm::DK_OptimizationRemarkAnalysisAliasing: // Optimization remarks are always handled completely by this // handler. There is no generic way of emitting them. OptimizationRemarkHandler(cast<OptimizationRemarkAnalysisAliasing>(DI)); return; case llvm::DK_MachineOptimizationRemark: // Optimization remarks are always handled completely by this // handler. There is no generic way of emitting them. OptimizationRemarkHandler(cast<MachineOptimizationRemark>(DI)); return; case llvm::DK_MachineOptimizationRemarkMissed: // Optimization remarks are always handled completely by this // handler. There is no generic way of emitting them. OptimizationRemarkHandler(cast<MachineOptimizationRemarkMissed>(DI)); return; case llvm::DK_MachineOptimizationRemarkAnalysis: // Optimization remarks are always handled completely by this // handler. There is no generic way of emitting them. OptimizationRemarkHandler(cast<MachineOptimizationRemarkAnalysis>(DI)); return; case llvm::DK_OptimizationFailure: // Optimization failures are always handled completely by this // handler. OptimizationFailureHandler(cast<DiagnosticInfoOptimizationFailure>(DI)); return; case llvm::DK_Unsupported: UnsupportedDiagHandler(cast<DiagnosticInfoUnsupported>(DI)); return; default: // Plugin IDs are not bound to any value as they are set dynamically. ComputeDiagRemarkID(Severity, backend_plugin, DiagID); break; } std::string MsgStorage; { raw_string_ostream Stream(MsgStorage); DiagnosticPrinterRawOStream DP(Stream); DI.print(DP); } if (DiagID == diag::err_fe_cannot_link_module) { Diags.Report(diag::err_fe_cannot_link_module) << CurLinkModule->getModuleIdentifier() << MsgStorage; return; } // Report the backend message using the usual diagnostic mechanism. FullSourceLoc Loc; Diags.Report(Loc, DiagID).AddString(MsgStorage); } #undef ComputeDiagID CodeGenAction::CodeGenAction(unsigned _Act, LLVMContext *_VMContext) : Act(_Act), VMContext(_VMContext ? _VMContext : new LLVMContext), OwnsVMContext(!_VMContext) {} CodeGenAction::~CodeGenAction() { TheModule.reset(); if (OwnsVMContext) delete VMContext; } bool CodeGenAction::hasIRSupport() const { return true; } void CodeGenAction::EndSourceFileAction() { // If the consumer creation failed, do nothing. if (!getCompilerInstance().hasASTConsumer()) return; // Steal the module from the consumer. TheModule = BEConsumer->takeModule(); } std::unique_ptr<llvm::Module> CodeGenAction::takeModule() { return std::move(TheModule); } llvm::LLVMContext *CodeGenAction::takeLLVMContext() { OwnsVMContext = false; return VMContext; } static std::unique_ptr<raw_pwrite_stream> GetOutputStream(CompilerInstance &CI, StringRef InFile, BackendAction Action) { switch (Action) { case Backend_EmitAssembly: return CI.createDefaultOutputFile(false, InFile, "s"); case Backend_EmitLL: return CI.createDefaultOutputFile(false, InFile, "ll"); case Backend_EmitBC: return CI.createDefaultOutputFile(true, InFile, "bc"); case Backend_EmitNothing: return nullptr; case Backend_EmitMCNull: return CI.createNullOutputFile(); case Backend_EmitObj: return CI.createDefaultOutputFile(true, InFile, "o"); } llvm_unreachable("Invalid action!"); } std::unique_ptr<ASTConsumer> CodeGenAction::CreateASTConsumer(CompilerInstance &CI, StringRef InFile) { BackendAction BA = static_cast<BackendAction>(Act); std::unique_ptr<raw_pwrite_stream> OS = CI.takeOutputStream(); if (!OS) OS = GetOutputStream(CI, InFile, BA); if (BA != Backend_EmitNothing && !OS) return nullptr; // Load bitcode modules to link with, if we need to. if (LinkModules.empty()) for (const CodeGenOptions::BitcodeFileToLink &F : CI.getCodeGenOpts().LinkBitcodeFiles) { auto BCBuf = CI.getFileManager().getBufferForFile(F.Filename); if (!BCBuf) { CI.getDiagnostics().Report(diag::err_cannot_open_file) << F.Filename << BCBuf.getError().message(); LinkModules.clear(); return nullptr; } Expected<std::unique_ptr<llvm::Module>> ModuleOrErr = getOwningLazyBitcodeModule(std::move(*BCBuf), *VMContext); if (!ModuleOrErr) { handleAllErrors(ModuleOrErr.takeError(), [&](ErrorInfoBase &EIB) { CI.getDiagnostics().Report(diag::err_cannot_open_file) << F.Filename << EIB.message(); }); LinkModules.clear(); return nullptr; } LinkModules.push_back({std::move(ModuleOrErr.get()), F.PropagateAttrs, F.Internalize, F.LinkFlags}); } CoverageSourceInfo *CoverageInfo = nullptr; // Add the preprocessor callback only when the coverage mapping is generated. if (CI.getCodeGenOpts().CoverageMapping) { CoverageInfo = new CoverageSourceInfo; CI.getPreprocessor().addPPCallbacks( std::unique_ptr<PPCallbacks>(CoverageInfo)); } std::unique_ptr<BackendConsumer> Result(new BackendConsumer( BA, CI.getDiagnostics(), CI.getHeaderSearchOpts(), CI.getPreprocessorOpts(), CI.getCodeGenOpts(), CI.getTargetOpts(), CI.getLangOpts(), CI.getFrontendOpts().ShowTimers, InFile, std::move(LinkModules), std::move(OS), *VMContext, CoverageInfo)); BEConsumer = Result.get(); // Enable generating macro debug info only when debug info is not disabled and // also macro debug info is enabled. if (CI.getCodeGenOpts().getDebugInfo() != codegenoptions::NoDebugInfo && CI.getCodeGenOpts().MacroDebugInfo) { std::unique_ptr<PPCallbacks> Callbacks = llvm::make_unique<MacroPPCallbacks>(BEConsumer->getCodeGenerator(), CI.getPreprocessor()); CI.getPreprocessor().addPPCallbacks(std::move(Callbacks)); } return std::move(Result); } static void BitcodeInlineAsmDiagHandler(const llvm::SMDiagnostic &SM, void *Context, unsigned LocCookie) { SM.print(nullptr, llvm::errs()); auto Diags = static_cast<DiagnosticsEngine *>(Context); unsigned DiagID; switch (SM.getKind()) { case llvm::SourceMgr::DK_Error: DiagID = diag::err_fe_inline_asm; break; case llvm::SourceMgr::DK_Warning: DiagID = diag::warn_fe_inline_asm; break; case llvm::SourceMgr::DK_Note: DiagID = diag::note_fe_inline_asm; break; case llvm::SourceMgr::DK_Remark: llvm_unreachable("remarks unexpected"); } Diags->Report(DiagID).AddString("cannot compile inline asm"); } std::unique_ptr<llvm::Module> CodeGenAction::loadModule(MemoryBufferRef MBRef) { CompilerInstance &CI = getCompilerInstance(); SourceManager &SM = CI.getSourceManager(); // For ThinLTO backend invocations, ensure that the context // merges types based on ODR identifiers. We also need to read // the correct module out of a multi-module bitcode file. if (!CI.getCodeGenOpts().ThinLTOIndexFile.empty()) { VMContext->enableDebugTypeODRUniquing(); auto DiagErrors = [&](Error E) -> std::unique_ptr<llvm::Module> { unsigned DiagID = CI.getDiagnostics().getCustomDiagID(DiagnosticsEngine::Error, "%0"); handleAllErrors(std::move(E), [&](ErrorInfoBase &EIB) { CI.getDiagnostics().Report(DiagID) << EIB.message(); }); return {}; }; Expected<std::vector<BitcodeModule>> BMsOrErr = getBitcodeModuleList(MBRef); if (!BMsOrErr) return DiagErrors(BMsOrErr.takeError()); BitcodeModule *Bm = FindThinLTOModule(*BMsOrErr); // We have nothing to do if the file contains no ThinLTO module. This is // possible if ThinLTO compilation was not able to split module. Content of // the file was already processed by indexing and will be passed to the // linker using merged object file. if (!Bm) { auto M = llvm::make_unique<llvm::Module>("empty", *VMContext); M->setTargetTriple(CI.getTargetOpts().Triple); return M; } Expected<std::unique_ptr<llvm::Module>> MOrErr = Bm->parseModule(*VMContext); if (!MOrErr) return DiagErrors(MOrErr.takeError()); return std::move(*MOrErr); } llvm::SMDiagnostic Err; if (std::unique_ptr<llvm::Module> M = parseIR(MBRef, Err, *VMContext)) return M; // Translate from the diagnostic info to the SourceManager location if // available. // TODO: Unify this with ConvertBackendLocation() SourceLocation Loc; if (Err.getLineNo() > 0) { assert(Err.getColumnNo() >= 0); Loc = SM.translateFileLineCol(SM.getFileEntryForID(SM.getMainFileID()), Err.getLineNo(), Err.getColumnNo() + 1); } // Strip off a leading diagnostic code if there is one. StringRef Msg = Err.getMessage(); if (Msg.startswith("error: ")) Msg = Msg.substr(7); unsigned DiagID = CI.getDiagnostics().getCustomDiagID(DiagnosticsEngine::Error, "%0"); CI.getDiagnostics().Report(Loc, DiagID) << Msg; return {}; } void CodeGenAction::ExecuteAction() { // If this is an IR file, we have to treat it specially. if (getCurrentFileKind().getLanguage() == InputKind::LLVM_IR) { BackendAction BA = static_cast<BackendAction>(Act); CompilerInstance &CI = getCompilerInstance(); std::unique_ptr<raw_pwrite_stream> OS = GetOutputStream(CI, getCurrentFile(), BA); if (BA != Backend_EmitNothing && !OS) return; bool Invalid; SourceManager &SM = CI.getSourceManager(); FileID FID = SM.getMainFileID(); llvm::MemoryBuffer *MainFile = SM.getBuffer(FID, &Invalid); if (Invalid) return; TheModule = loadModule(*MainFile); if (!TheModule) return; const TargetOptions &TargetOpts = CI.getTargetOpts(); if (TheModule->getTargetTriple() != TargetOpts.Triple) { CI.getDiagnostics().Report(SourceLocation(), diag::warn_fe_override_module) << TargetOpts.Triple; TheModule->setTargetTriple(TargetOpts.Triple); } EmbedBitcode(TheModule.get(), CI.getCodeGenOpts(), MainFile->getMemBufferRef()); LLVMContext &Ctx = TheModule->getContext(); Ctx.setInlineAsmDiagnosticHandler(BitcodeInlineAsmDiagHandler, &CI.getDiagnostics()); EmitBackendOutput(CI.getDiagnostics(), CI.getHeaderSearchOpts(), CI.getCodeGenOpts(), TargetOpts, CI.getLangOpts(), CI.getTarget().getDataLayout(), TheModule.get(), BA, std::move(OS)); return; } // Otherwise follow the normal AST path. this->ASTFrontendAction::ExecuteAction(); } // void EmitAssemblyAction::anchor() { } EmitAssemblyAction::EmitAssemblyAction(llvm::LLVMContext *_VMContext) : CodeGenAction(Backend_EmitAssembly, _VMContext) {} void EmitBCAction::anchor() { } EmitBCAction::EmitBCAction(llvm::LLVMContext *_VMContext) : CodeGenAction(Backend_EmitBC, _VMContext) {} void EmitLLVMAction::anchor() { } EmitLLVMAction::EmitLLVMAction(llvm::LLVMContext *_VMContext) : CodeGenAction(Backend_EmitLL, _VMContext) {} void EmitLLVMOnlyAction::anchor() { } EmitLLVMOnlyAction::EmitLLVMOnlyAction(llvm::LLVMContext *_VMContext) : CodeGenAction(Backend_EmitNothing, _VMContext) {} void EmitCodeGenOnlyAction::anchor() { } EmitCodeGenOnlyAction::EmitCodeGenOnlyAction(llvm::LLVMContext *_VMContext) : CodeGenAction(Backend_EmitMCNull, _VMContext) {} void EmitObjAction::anchor() { } EmitObjAction::EmitObjAction(llvm::LLVMContext *_VMContext) : CodeGenAction(Backend_EmitObj, _VMContext) {}
youtube/cobalt
third_party/llvm-project/clang/lib/CodeGen/CodeGenAction.cpp
C++
bsd-3-clause
41,495
# frozen_string_literal: true module Stupidedi using Refinements module Schema # # The 5010 X12 "data segment requirement designator"s include # M - Mandatory # O - Optional # # @see X222 B.1.3.12.6 Data Segment Requirement Designators # # The HIPAA implementation guides "industry usage" include # SITUATIONAL # REQUIRED # class SegmentReq def initialize(required, forbidden, to_s) @required, @forbidden, @to_s = required, forbidden, to_s end def required? @required end def forbidden? @forbidden end def optional? not (@required or @forbidden) end # @return [void] def pretty_print(q) q.text @to_s end # @return [String] def inspect @to_s end end end end
kputnam/stupidedi
lib/stupidedi/schema/segment_req.rb
Ruby
bsd-3-clause
871
// Copyright 2010-2021, Google Inc. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include "prediction/dictionary_predictor.h" #include <algorithm> #include <cctype> #include <climits> // INT_MAX #include <cmath> #include <cstdint> #include <list> #include <map> #include <set> #include <string> #include <utility> #include <vector> #include "base/japanese_util.h" #include "base/logging.h" #include "base/number_util.h" #include "base/util.h" #include "composer/composer.h" #include "converter/connector.h" #include "converter/converter_interface.h" #include "converter/immutable_converter_interface.h" #include "converter/node_list_builder.h" #include "converter/segmenter.h" #include "converter/segments.h" #include "dictionary/dictionary_interface.h" #include "dictionary/pos_matcher.h" #include "prediction/predictor_interface.h" #include "prediction/suggestion_filter.h" #include "prediction/zero_query_dict.h" #include "protocol/commands.pb.h" #include "protocol/config.pb.h" #include "request/conversion_request.h" #include "usage_stats/usage_stats.h" #include "absl/container/flat_hash_map.h" #include "absl/flags/flag.h" #include "absl/strings/match.h" #include "absl/strings/string_view.h" #ifndef NDEBUG #define MOZC_DEBUG #define MOZC_WORD_LOG_MESSAGE(message) \ absl::StrCat(__FILE__, ":", __LINE__, " ", message, "\n") #define MOZC_WORD_LOG(result, message) \ (result).log.append(MOZC_WORD_LOG_MESSAGE(message)) #else // NDEBUG #define MOZC_WORD_LOG(result, message) \ {} #endif // NDEBUG namespace mozc { namespace { using ::mozc::commands::Request; using ::mozc::dictionary::DictionaryInterface; using ::mozc::dictionary::PosMatcher; using ::mozc::dictionary::Token; using ::mozc::usage_stats::UsageStats; // Used to emulate positive infinity for cost. This value is set for those // candidates that are thought to be aggressive; thus we can eliminate such // candidates from suggestion or prediction. Note that for this purpose we don't // want to use INT_MAX because someone might further add penalty after cost is // set to INT_MAX, which leads to overflow and consequently aggressive // candidates would appear in the top results. constexpr int kInfinity = (2 << 20); // Note that PREDICTION mode is much slower than SUGGESTION. // Number of prediction calls should be minimized. constexpr size_t kSuggestionMaxResultsSize = 256; constexpr size_t kPredictionMaxResultsSize = 100000; bool IsEnableNewSpatialScoring(const ConversionRequest &request) { return request.request() .decoder_experiment_params() .enable_new_spatial_scoring(); } // Returns true if the |target| may be reduncant result. bool MaybeRedundant(const std::string &reference, const std::string &target) { return absl::StartsWith(target, reference); } bool IsLatinInputMode(const ConversionRequest &request) { return (request.has_composer() && (request.composer().GetInputMode() == transliteration::HALF_ASCII || request.composer().GetInputMode() == transliteration::FULL_ASCII)); } bool IsQwertyMobileTable(const ConversionRequest &request) { const auto table = request.request().special_romanji_table(); return (table == commands::Request::QWERTY_MOBILE_TO_HIRAGANA || table == commands::Request::QWERTY_MOBILE_TO_HALFWIDTHASCII); } bool IsLanguageAwareInputEnabled(const ConversionRequest &request) { const auto lang_aware = request.request().language_aware_input(); return lang_aware == commands::Request::LANGUAGE_AWARE_SUGGESTION; } // Returns true if |segments| contains number history. // Normalized number will be set to |number_key| // Note: // Now this function supports arabic number candidates only and // we don't support kanji number candidates for now. // This is because We have several kanji number styles, for example, // "一二", "十二", "壱拾弐", etc for 12. // TODO(toshiyuki): Define the spec and support Kanji. bool GetNumberHistory(const Segments &segments, std::string *number_key) { DCHECK(number_key); const size_t history_size = segments.history_segments_size(); if (history_size <= 0) { return false; } const Segment &last_segment = segments.history_segment(history_size - 1); DCHECK_GT(last_segment.candidates_size(), 0); const std::string &history_value = last_segment.candidate(0).value; if (!NumberUtil::IsArabicNumber(history_value)) { return false; } japanese_util::FullWidthToHalfWidth(history_value, number_key); return true; } bool IsMixedConversionEnabled(const commands::Request &request) { return request.mixed_conversion(); } bool IsTypingCorrectionEnabled(const ConversionRequest &request) { return request.config().use_typing_correction(); } bool HasHistoryKeyLongerThanOrEqualTo(const Segments &segments, size_t utf8_len) { const size_t history_segments_size = segments.history_segments_size(); if (history_segments_size == 0) { return false; } const Segment &history_segment = segments.history_segment(history_segments_size - 1); if (history_segment.candidates_size() == 0) { return false; } return Util::CharsLen(history_segment.candidate(0).key) >= utf8_len; } bool IsLongKeyForRealtimeCandidates(const Segments &segments) { constexpr int kFewResultThreshold = 8; return (segments.segments_size() > 0 && Util::CharsLen(segments.segment(0).key()) >= kFewResultThreshold); } size_t GetMaxSizeForRealtimeCandidates(const ConversionRequest &request, const Segments &segments, bool is_long_key) { const auto &segment = segments.conversion_segment(0); const size_t size = (request.max_dictionary_prediction_candidates_size() - segment.candidates_size()); return is_long_key ? std::min<size_t>(size, 8) : size; } size_t GetDefaultSizeForRealtimeCandidates(bool is_long_key) { return is_long_key ? 5 : 10; } ConversionRequest GetConversionRequestForRealtimeCandidates( const ConversionRequest &request, size_t realtime_candidates_size, size_t current_candidates_size) { ConversionRequest ret = request; ret.set_max_conversion_candidates_size(current_candidates_size + realtime_candidates_size); return ret; } } // namespace class DictionaryPredictor::PredictiveLookupCallback : public DictionaryInterface::Callback { public: PredictiveLookupCallback(DictionaryPredictor::PredictionTypes types, size_t limit, size_t original_key_len, const std::set<std::string> *subsequent_chars, Segment::Candidate::SourceInfo source_info, int unknown_id, absl::string_view non_expanded_original_key, const SpatialCostParams &spatial_cost_params, std::vector<DictionaryPredictor::Result> *results) : penalty_(0), types_(types), limit_(limit), original_key_len_(original_key_len), subsequent_chars_(subsequent_chars), source_info_(source_info), unknown_id_(unknown_id), non_expanded_original_key_(non_expanded_original_key), spatial_cost_params_(spatial_cost_params), results_(results) {} PredictiveLookupCallback(const PredictiveLookupCallback &) = delete; PredictiveLookupCallback &operator=(const PredictiveLookupCallback &) = delete; ResultType OnKey(absl::string_view key) override { if (subsequent_chars_ == nullptr) { return TRAVERSE_CONTINUE; } // If |subsequent_chars_| was provided, check if the substring of |key| // obtained by removing the original lookup key starts with a string in the // set. For example, if original key is "he" and "hello" was found, // continue traversing only when one of "l", "ll", or "llo" is in // |subsequent_chars_|. // Implementation note: Although absl::StartsWith is called at most N times // where N = subsequent_chars_.size(), N is very small in practice, less // than 10. Thus, this linear order algorithm is fast enough. // Theoretically, we can construct a trie of strings in |subsequent_chars_| // to get more performance but it's overkill here. // TODO(noriyukit): std::vector<string> would be better than set<string>. // To this end, we need to fix Comopser as well. const absl::string_view rest = absl::ClippedSubstr(key, original_key_len_); for (const std::string &chr : *subsequent_chars_) { if (absl::StartsWith(rest, chr)) { return TRAVERSE_CONTINUE; } } return TRAVERSE_NEXT_KEY; } ResultType OnActualKey(absl::string_view key, absl::string_view actual_key, int num_expanded) override { penalty_ = 0; if (num_expanded > 0 || (!non_expanded_original_key_.empty() && !absl::StartsWith(actual_key, non_expanded_original_key_))) { penalty_ = spatial_cost_params_.GetPenalty(key); } return TRAVERSE_CONTINUE; } ResultType OnToken(absl::string_view key, absl::string_view actual_key, const Token &token) override { // If the token is from user dictionary and its POS is unknown, it is // suggest-only words. Such words are looked up only when their keys // exactly match |key|. Otherwise, unigram suggestion can be annoying. For // example, suppose a user registers their email address as める. Then, // we don't want to show the email address from め but exactly from める. if ((token.attributes & Token::USER_DICTIONARY) != 0 && token.lid == unknown_id_) { const auto orig_key = absl::ClippedSubstr(key, 0, original_key_len_); if (token.key != orig_key) { return TRAVERSE_CONTINUE; } } results_->push_back(Result()); results_->back().InitializeByTokenAndTypes(token, types_); results_->back().wcost += penalty_; results_->back().source_info |= source_info_; return (results_->size() < limit_) ? TRAVERSE_CONTINUE : TRAVERSE_DONE; } protected: int32_t penalty_; const DictionaryPredictor::PredictionTypes types_; const size_t limit_; const size_t original_key_len_; const std::set<std::string> *subsequent_chars_; const Segment::Candidate::SourceInfo source_info_; const int unknown_id_; absl::string_view non_expanded_original_key_; const SpatialCostParams spatial_cost_params_; std::vector<DictionaryPredictor::Result> *results_; }; class DictionaryPredictor::PredictiveBigramLookupCallback : public PredictiveLookupCallback { public: PredictiveBigramLookupCallback( DictionaryPredictor::PredictionTypes types, size_t limit, size_t original_key_len, const std::set<std::string> *subsequent_chars, absl::string_view history_value, Segment::Candidate::SourceInfo source_info, int unknown_id, absl::string_view non_expanded_original_key, const SpatialCostParams spatial_cost_params, std::vector<DictionaryPredictor::Result> *results) : PredictiveLookupCallback(types, limit, original_key_len, subsequent_chars, source_info, unknown_id, non_expanded_original_key, spatial_cost_params, results), history_value_(history_value) {} PredictiveBigramLookupCallback(const PredictiveBigramLookupCallback &) = delete; PredictiveBigramLookupCallback &operator=( const PredictiveBigramLookupCallback &) = delete; ResultType OnToken(absl::string_view key, absl::string_view expanded_key, const Token &token) override { // Skip the token if its value doesn't start with the previous user input, // |history_value_|. if (!absl::StartsWith(token.value, history_value_) || token.value.size() <= history_value_.size()) { return TRAVERSE_CONTINUE; } ResultType result_type = PredictiveLookupCallback::OnToken(key, expanded_key, token); return result_type; } private: absl::string_view history_value_; }; // Comparator for sorting prediction candidates. // If we have words A and AB, for example "六本木" and "六本木ヒルズ", // assume that cost(A) < cost(AB). class DictionaryPredictor::ResultWCostLess { public: bool operator()(const DictionaryPredictor::Result &lhs, const DictionaryPredictor::Result &rhs) const { return lhs.wcost < rhs.wcost; } }; class DictionaryPredictor::ResultCostLess { public: bool operator()(const DictionaryPredictor::Result &lhs, const DictionaryPredictor::Result &rhs) const { return lhs.cost > rhs.cost; } }; DictionaryPredictor::DictionaryPredictor( const DataManagerInterface &data_manager, const ConverterInterface *converter, const ImmutableConverterInterface *immutable_converter, const DictionaryInterface *dictionary, const DictionaryInterface *suffix_dictionary, const Connector *connector, const Segmenter *segmenter, const PosMatcher *pos_matcher, const SuggestionFilter *suggestion_filter) : converter_(converter), immutable_converter_(immutable_converter), dictionary_(dictionary), suffix_dictionary_(suffix_dictionary), connector_(connector), segmenter_(segmenter), suggestion_filter_(suggestion_filter), counter_suffix_word_id_(pos_matcher->GetCounterSuffixWordId()), general_symbol_id_(pos_matcher->GetGeneralSymbolId()), unknown_id_(pos_matcher->GetUnknownId()), predictor_name_("DictionaryPredictor") { absl::string_view zero_query_token_array_data; absl::string_view zero_query_string_array_data; absl::string_view zero_query_number_token_array_data; absl::string_view zero_query_number_string_array_data; data_manager.GetZeroQueryData(&zero_query_token_array_data, &zero_query_string_array_data, &zero_query_number_token_array_data, &zero_query_number_string_array_data); zero_query_dict_.Init(zero_query_token_array_data, zero_query_string_array_data); zero_query_number_dict_.Init(zero_query_number_token_array_data, zero_query_number_string_array_data); } DictionaryPredictor::~DictionaryPredictor() {} void DictionaryPredictor::Finish(const ConversionRequest &request, Segments *segments) { if (segments->request_type() == Segments::REVERSE_CONVERSION) { // Do nothing for REVERSE_CONVERSION. return; } const Segment &segment = segments->conversion_segment(0); if (segment.candidates_size() < 1) { VLOG(2) << "candidates size < 1"; return; } const Segment::Candidate &candidate = segment.candidate(0); if (segment.segment_type() != Segment::FIXED_VALUE) { VLOG(2) << "segment is not FIXED_VALUE" << candidate.value; return; } MaybeRecordUsageStats(candidate); } void DictionaryPredictor::MaybeRecordUsageStats( const Segment::Candidate &candidate) const { if (candidate.source_info & Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_NONE) { UsageStats::IncrementCount("CommitDictionaryPredictorZeroQueryTypeNone"); } if (candidate.source_info & Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_NUMBER_SUFFIX) { UsageStats::IncrementCount( "CommitDictionaryPredictorZeroQueryTypeNumberSuffix"); } if (candidate.source_info & Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_EMOTICON) { UsageStats::IncrementCount( "CommitDictionaryPredictorZeroQueryTypeEmoticon"); } if (candidate.source_info & Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_EMOJI) { UsageStats::IncrementCount("CommitDictionaryPredictorZeroQueryTypeEmoji"); } if (candidate.source_info & Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_BIGRAM) { UsageStats::IncrementCount("CommitDictionaryPredictorZeroQueryTypeBigram"); } if (candidate.source_info & Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_SUFFIX) { UsageStats::IncrementCount("CommitDictionaryPredictorZeroQueryTypeSuffix"); } } bool DictionaryPredictor::PredictForRequest(const ConversionRequest &request, Segments *segments) const { if (segments == nullptr) { return false; } if (segments->request_type() == Segments::CONVERSION) { VLOG(2) << "request type is CONVERSION"; return false; } if (segments->conversion_segments_size() < 1) { VLOG(2) << "segment size < 1"; return false; } std::vector<Result> results; // Mixed conversion is the feature that mixes prediction and // conversion, meaning that results may include the candidates whose // key is exactly the same as the composition. This mode is used in mobile. const bool is_mixed_conversion = IsMixedConversionEnabled(request.request()); AggregatePredictionForRequest(request, segments, &results); if (results.empty()) { return false; } if (is_mixed_conversion) { SetPredictionCostForMixedConversion(*segments, &results); if (!IsEnableNewSpatialScoring(request)) { ApplyPenaltyForKeyExpansion(*segments, &results); } // Currently, we don't have spelling correction feature when in // the mixed conversion mode, so RemoveMissSpelledCandidates() is // not called. return AddPredictionToCandidates( request, true, // Include exact key result even if it's a bad suggestion. segments, &results); } // Normal prediction. SetPredictionCost(*segments, &results); if (!IsEnableNewSpatialScoring(request)) { ApplyPenaltyForKeyExpansion(*segments, &results); } const std::string &input_key = segments->conversion_segment(0).key(); const size_t input_key_len = Util::CharsLen(input_key); RemoveMissSpelledCandidates(input_key_len, &results); return AddPredictionToCandidates(request, false, // Remove exact key result. segments, &results); } DictionaryPredictor::PredictionTypes DictionaryPredictor::AggregatePredictionForRequest( const ConversionRequest &request, Segments *segments, std::vector<Result> *results) const { const bool is_mixed_conversion = IsMixedConversionEnabled(request.request()); // In mixed conversion mode, the number of real time candidates is increased. const size_t realtime_max_size = GetRealtimeCandidateMaxSize(request, *segments, is_mixed_conversion); const auto &unigram_config = GetUnigramConfig(request, *segments); return AggregatePrediction(request, realtime_max_size, unigram_config, segments, results); } DictionaryPredictor::UnigramConfig DictionaryPredictor::GetUnigramConfig( const ConversionRequest &request, const Segments &segments) const { const bool is_mixed_conversion = IsMixedConversionEnabled(request.request()); if (IsLatinInputMode(request)) { // For SUGGESTION request in Desktop, We don't look up English words when // key length is one. const size_t min_key_len_for_latin_input = (is_mixed_conversion || segments.request_type() == Segments::PREDICTION) ? 1 : 2; return {&DictionaryPredictor::AggregateUnigramCandidateForLatinInput, min_key_len_for_latin_input}; } if (is_mixed_conversion) { // In mixed conversion mode, we want to show unigram candidates even for // short keys to emulate PREDICTION mode. constexpr size_t kMinUnigramKeyLen = 1; return {&DictionaryPredictor::AggregateUnigramCandidateForMixedConversion, kMinUnigramKeyLen}; } // Normal prediction. const size_t min_unigram_key_len = (segments.request_type() == Segments::PREDICTION) ? 1 : 3; return {&DictionaryPredictor::AggregateUnigramCandidate, min_unigram_key_len}; } DictionaryPredictor::PredictionTypes DictionaryPredictor::AggregatePrediction( const ConversionRequest &request, size_t realtime_max_size, const UnigramConfig &unigram_config, Segments *segments, std::vector<Result> *results) const { DCHECK(segments); DCHECK(results); // Zero query prediction. if (segments->conversion_segment(0).key().empty()) { return AggregatePredictionForZeroQuery(request, segments, results); } const std::string &key = segments->conversion_segment(0).key(); const size_t key_len = Util::CharsLen(key); // TODO(toshiyuki): Check if we can remove this SUGGESTION check. // i.e. can we return NO_PREDICTION here for both of SUGGESTION and // PREDICTION? if (segments->request_type() == Segments::SUGGESTION) { if (!request.config().use_dictionary_suggest()) { VLOG(2) << "no_dictionary_suggest"; return NO_PREDICTION; } // Never trigger prediction if the key looks like zip code. if (DictionaryPredictor::IsZipCodeRequest(key) && key_len < 6) { return NO_PREDICTION; } } PredictionTypes selected_types = NO_PREDICTION; if (ShouldAggregateRealTimeConversionResults(request, *segments)) { AggregateRealtimeConversion(request, realtime_max_size, segments, results); selected_types |= REALTIME; } // In partial suggestion or prediction, only realtime candidates are used. if (segments->request_type() == Segments::PARTIAL_SUGGESTION || segments->request_type() == Segments::PARTIAL_PREDICTION) { return selected_types; } // Add unigram candidates. const size_t min_unigram_key_len = unigram_config.min_key_len; if (key_len >= min_unigram_key_len) { const auto &unigram_fn = unigram_config.unigram_fn; PredictionType type = (this->*unigram_fn)(request, *segments, results); selected_types |= type; } // Add bigram candidates. constexpr int kMinHistoryKeyLen = 3; if (HasHistoryKeyLongerThanOrEqualTo(*segments, kMinHistoryKeyLen)) { AggregateBigramPrediction(request, *segments, Segment::Candidate::SOURCE_INFO_NONE, results); selected_types |= BIGRAM; } // Add english candidates. if (IsLanguageAwareInputEnabled(request) && IsQwertyMobileTable(request) && key_len >= min_unigram_key_len) { AggregateEnglishPredictionUsingRawInput(request, *segments, results); selected_types |= ENGLISH; } // Add typing correction candidates. constexpr int kMinTypingCorrectionKeyLen = 3; if (IsTypingCorrectionEnabled(request) && key_len >= kMinTypingCorrectionKeyLen) { AggregateTypeCorrectingPrediction(request, *segments, results); selected_types |= TYPING_CORRECTION; } return selected_types; } bool DictionaryPredictor::AddPredictionToCandidates( const ConversionRequest &request, bool include_exact_key, Segments *segments, std::vector<Result> *results) const { DCHECK(segments); DCHECK(results); const std::string &input_key = segments->conversion_segment(0).key(); const size_t input_key_len = Util::CharsLen(input_key); std::string history_key, history_value; GetHistoryKeyAndValue(*segments, &history_key, &history_value); // exact_bigram_key does not contain ambiguity expansion, because // this is used for exact matching for the key. const std::string exact_bigram_key = history_key + input_key; Segment *segment = segments->mutable_conversion_segment(0); DCHECK(segment); // Instead of sorting all the results, we construct a heap. // This is done in linear time and // we can pop as many results as we need efficiently. std::make_heap(results->begin(), results->end(), ResultCostLess()); const size_t size = std::min( request.max_dictionary_prediction_candidates_size(), results->size()); int added = 0; std::set<std::string> seen; int added_suffix = 0; bool cursor_at_tail = request.has_composer() && request.composer().GetCursor() == request.composer().GetLength(); absl::flat_hash_map<std::string, int32_t> merged_types; #ifndef NDEBUG const bool is_debug = true; #else // NDEBUG // TODO(taku): Sets more advanced debug info depending on the verbose_level. const bool is_debug = request.config().verbose_level() >= 1; #endif // NDEBUG if (is_debug) { for (const auto &result : *results) { if (!result.removed) { merged_types[result.value] |= result.types; } } } auto add_candidate = [&](const Result &result, const std::string &key, const std::string &value, Segment::Candidate *candidate) { DCHECK(candidate); candidate->Init(); candidate->content_key = key; candidate->content_value = value; candidate->key = key; candidate->value = value; candidate->lid = result.lid; candidate->rid = result.rid; candidate->wcost = result.wcost; candidate->cost = result.cost; candidate->attributes = result.candidate_attributes; if ((!(candidate->attributes & Segment::Candidate::SPELLING_CORRECTION) && IsLatinInputMode(request)) || (result.types & SUFFIX)) { candidate->attributes |= Segment::Candidate::NO_VARIANTS_EXPANSION; candidate->attributes |= Segment::Candidate::NO_EXTRA_DESCRIPTION; } if (candidate->attributes & Segment::Candidate::PARTIALLY_KEY_CONSUMED) { candidate->consumed_key_size = result.consumed_key_size; // There are two scenarios to reach here. // 1. Auto partial suggestion. // e.g. composition わたしのなまえ| -> candidate 私の // 2. Partial suggestion. // e.g. composition わたしの|なまえ -> candidate 私の // To distinguish auto partial suggestion from (non-auto) partial // suggestion, see the cursor position. If the cursor is at the tail // of the composition, this is auto partial suggestion. if (cursor_at_tail) { candidate->attributes |= Segment::Candidate::AUTO_PARTIAL_SUGGESTION; } } candidate->source_info = result.source_info; if (result.types & REALTIME) { candidate->inner_segment_boundary = result.inner_segment_boundary; } if (result.types & TYPING_CORRECTION) { candidate->attributes |= Segment::Candidate::TYPING_CORRECTION; } SetDescription(result.types, candidate->attributes, &candidate->description); if (is_debug) { SetDebugDescription(merged_types[result.value], &candidate->description); } #ifdef MOZC_DEBUG candidate->log += "\n" + result.log; #endif // MOZC_DEBUG }; #ifdef MOZC_DEBUG auto add_debug_candidate = [&](Result result, const std::string &log) { std::string key, value; if (result.types & BIGRAM) { // remove the prefix of history key and history value. key = result.key.substr(history_key.size(), result.key.size() - history_key.size()); value = result.value.substr(history_value.size(), result.value.size() - history_value.size()); } else { key = result.key; value = result.value; } result.log.append(log); Segment::Candidate candidate; add_candidate(result, key, value, &candidate); segment->removed_candidates_for_debug_.push_back(std::move(candidate)); }; #define MOZC_ADD_DEBUG_CANDIDATE(result, log) \ add_debug_candidate(result, MOZC_WORD_LOG_MESSAGE(log)) #else // MOZC_DEBUG #define MOZC_ADD_DEBUG_CANDIDATE(result, log) \ {} #endif // MOZC_DEBUG for (size_t i = 0; i < results->size(); ++i) { // Pop a result from a heap. Please pay attention not to use results->at(i). std::pop_heap(results->begin(), results->end() - i, ResultCostLess()); const Result &result = results->at(results->size() - i - 1); if (added >= size || result.cost >= kInfinity) { break; } if (result.removed) { MOZC_ADD_DEBUG_CANDIDATE(result, "Removed flag is on"); continue; } // When |include_exact_key| is true, we don't filter the results // which have the exactly same key as the input even if it's a bad // suggestion. if (!(include_exact_key && (result.key == input_key)) && suggestion_filter_->IsBadSuggestion(result.value)) { MOZC_ADD_DEBUG_CANDIDATE(result, "Bad suggestion"); continue; } // Don't suggest exactly the same candidate as key. // if |include_exact_key| is true, that's not the case. if (!include_exact_key && !(result.types & REALTIME) && (((result.types & BIGRAM) && exact_bigram_key == result.value) || (!(result.types & BIGRAM) && input_key == result.value))) { MOZC_ADD_DEBUG_CANDIDATE(result, "Key == candidate"); continue; } std::string key, value; if (result.types & BIGRAM) { // remove the prefix of history key and history value. key = result.key.substr(history_key.size(), result.key.size() - history_key.size()); value = result.value.substr(history_value.size(), result.value.size() - history_value.size()); } else { key = result.key; value = result.value; } if (!seen.insert(value).second) { MOZC_ADD_DEBUG_CANDIDATE(result, "Duplicated"); continue; } // User input: "おーすとり" (len = 5) // key/value: "おーすとりら" "オーストラリア" (miss match pos = 4) if ((result.candidate_attributes & Segment::Candidate::SPELLING_CORRECTION) && key != input_key && input_key_len <= GetMissSpelledPosition(key, value) + 1) { MOZC_ADD_DEBUG_CANDIDATE(result, "Spelling correction"); continue; } if (result.types == SUFFIX && added_suffix++ >= 20) { // TODO(toshiyuki): Need refactoring for controlling suffix // prediction number after we will fix the appropriate number. MOZC_ADD_DEBUG_CANDIDATE(result, "Added suffix >= 20"); continue; } Segment::Candidate *candidate = segment->push_back_candidate(); add_candidate(result, key, value, candidate); ++added; } return added > 0; #undef MOZC_ADD_DEBUG_CANDIDATE } DictionaryPredictor::PredictionTypes DictionaryPredictor::AggregatePredictionForZeroQuery( const ConversionRequest &request, Segments *segments, std::vector<Result> *results) const { DCHECK(segments); DCHECK(results); if (!request.request().zero_query_suggestion()) { // Zero query is disabled by request. return NO_PREDICTION; } PredictionTypes selected_types = NO_PREDICTION; constexpr int kMinHistoryKeyLenForZeroQuery = 2; if (HasHistoryKeyLongerThanOrEqualTo(*segments, kMinHistoryKeyLenForZeroQuery)) { AggregateBigramPrediction( request, *segments, Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_BIGRAM, results); selected_types |= BIGRAM; } if (segments->history_segments_size() > 0) { AggregateZeroQuerySuffixPrediction(request, *segments, results); selected_types |= SUFFIX; } return selected_types; } DictionaryPredictor::PredictionType DictionaryPredictor::AggregateUnigramCandidateForLatinInput( const ConversionRequest &request, const Segments &segments, std::vector<Result> *results) const { AggregateEnglishPrediction(request, segments, results); return ENGLISH; } void DictionaryPredictor::SetDescription(PredictionTypes types, uint32_t attributes, std::string *description) { if (types & TYPING_CORRECTION) { Util::AppendStringWithDelimiter(" ", "補正", description); } if (attributes & Segment::Candidate::AUTO_PARTIAL_SUGGESTION) { Util::AppendStringWithDelimiter(" ", "部分", description); } } void DictionaryPredictor::SetDebugDescription(PredictionTypes types, std::string *description) { std::string debug_desc; if (types & UNIGRAM) { debug_desc.append(1, 'U'); } if (types & BIGRAM) { debug_desc.append(1, 'B'); } if (types & REALTIME_TOP) { debug_desc.append("R1"); } else if (types & REALTIME) { debug_desc.append(1, 'R'); } if (types & SUFFIX) { debug_desc.append(1, 'S'); } if (types & ENGLISH) { debug_desc.append(1, 'E'); } // Note that description for TYPING_CORRECTION is omitted // because it is appended by SetDescription. if (!debug_desc.empty()) { Util::AppendStringWithDelimiter(" ", debug_desc, description); } } // Returns cost for |result| when it's transitioned from |rid|. Suffix penalty // is also added for non-realtime results. int DictionaryPredictor::GetLMCost(const Result &result, int rid) const { const int cost_with_context = connector_->GetTransitionCost(rid, result.lid); int lm_cost = 0; if (result.types & SUFFIX) { // We always respect the previous context to calculate the cost of SUFFIX. // Otherwise, the suffix that doesn't match the context will be promoted. lm_cost = cost_with_context + result.wcost; } else { // Sometimes transition cost is too high and causes a bug like b/18112966. // For example, "接続詞 が" -> "始まる 動詞,五段活用,基本形" has very large // cost and "始まる" is demoted. To prevent such cases, ImmutableConverter // computes transition from BOS/EOS too; see // ImmutableConverterImpl::MakeLatticeNodesForHistorySegments(). // Here, taking the minimum of |cost1| and |cost2| has a similar effect. const int cost_without_context = connector_->GetTransitionCost(0, result.lid); lm_cost = std::min(cost_with_context, cost_without_context) + result.wcost; } if (!(result.types & REALTIME)) { // Relatime conversion already adds perfix/suffix penalties to the result. // Note that we don't add prefix penalty the role of "bunsetsu" is // ambiguous on zero-query suggestion. lm_cost += segmenter_->GetSuffixPenalty(result.rid); } return lm_cost; } namespace { class FindValueCallback : public DictionaryInterface::Callback { public: explicit FindValueCallback(absl::string_view target_value) : target_value_(target_value), found_(false) {} ResultType OnToken(absl::string_view, // key absl::string_view, // actual_key const Token &token) override { if (token.value != target_value_) { return TRAVERSE_CONTINUE; } found_ = true; token_ = token; return TRAVERSE_DONE; } bool found() const { return found_; } const Token &token() const { return token_; } private: absl::string_view target_value_; bool found_; Token token_; DISALLOW_COPY_AND_ASSIGN(FindValueCallback); }; } // namespace void DictionaryPredictor::Result::InitializeByTokenAndTypes( const Token &token, PredictionTypes types) { SetTypesAndTokenAttributes(types, token.attributes); key = token.key; value = token.value; wcost = token.cost; lid = token.lid; rid = token.rid; } void DictionaryPredictor::Result::SetTypesAndTokenAttributes( PredictionTypes prediction_types, Token::AttributesBitfield token_attr) { types = prediction_types; candidate_attributes = 0; if (types & TYPING_CORRECTION) { candidate_attributes |= Segment::Candidate::TYPING_CORRECTION; } if (types & (REALTIME | REALTIME_TOP)) { candidate_attributes |= Segment::Candidate::REALTIME_CONVERSION; } if (token_attr & Token::SPELLING_CORRECTION) { candidate_attributes |= Segment::Candidate::SPELLING_CORRECTION; } if (token_attr & Token::USER_DICTIONARY) { candidate_attributes |= (Segment::Candidate::USER_DICTIONARY | Segment::Candidate::NO_VARIANTS_EXPANSION); } } void DictionaryPredictor::Result::SetSourceInfoForZeroQuery( ZeroQueryType type) { switch (type) { case ZERO_QUERY_NONE: source_info |= Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_NONE; return; case ZERO_QUERY_NUMBER_SUFFIX: source_info |= Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_NUMBER_SUFFIX; return; case ZERO_QUERY_EMOTICON: source_info |= Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_EMOTICON; return; case ZERO_QUERY_EMOJI: source_info |= Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_EMOJI; return; case ZERO_QUERY_BIGRAM: source_info |= Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_BIGRAM; return; case ZERO_QUERY_SUFFIX: source_info |= Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_SUFFIX; return; default: LOG(ERROR) << "Should not come here"; return; } } bool DictionaryPredictor::Result::IsUserDictionaryResult() const { return (candidate_attributes & Segment::Candidate::USER_DICTIONARY) != 0; } // Here, we treat the word as English when its key consists of Latin // characters. bool DictionaryPredictor::Result::IsEnglishEntryResult() const { return Util::IsEnglishTransliteration(key); } bool DictionaryPredictor::GetHistoryKeyAndValue(const Segments &segments, std::string *key, std::string *value) const { DCHECK(key); DCHECK(value); if (segments.history_segments_size() == 0) { return false; } const Segment &history_segment = segments.history_segment(segments.history_segments_size() - 1); if (history_segment.candidates_size() == 0) { return false; } key->assign(history_segment.candidate(0).key); value->assign(history_segment.candidate(0).value); return true; } void DictionaryPredictor::SetPredictionCost( const Segments &segments, std::vector<Result> *results) const { DCHECK(results); int rid = 0; // 0 (BOS) is default if (segments.history_segments_size() > 0) { const Segment &history_segment = segments.history_segment(segments.history_segments_size() - 1); if (history_segment.candidates_size() > 0) { rid = history_segment.candidate(0).rid; // use history segment's id } } const std::string &input_key = segments.conversion_segment(0).key(); std::string history_key, history_value; GetHistoryKeyAndValue(segments, &history_key, &history_value); const std::string bigram_key = history_key + input_key; const bool is_suggestion = (segments.request_type() == Segments::SUGGESTION); // use the same scoring function for both unigram/bigram. // Bigram will be boosted because we pass the previous // key as a context information. const size_t bigram_key_len = Util::CharsLen(bigram_key); const size_t unigram_key_len = Util::CharsLen(input_key); // In the loop below, we track the minimum cost among those REALTIME // candidates that have the same key length as |input_key| so that we can set // a slightly smaller cost to REALTIME_TOP than these. int realtime_cost_min = kInfinity; Result *realtime_top_result = nullptr; for (size_t i = 0; i < results->size(); ++i) { const Result &result = results->at(i); // The cost of REALTIME_TOP is determined after the loop based on the // minimum cost for REALTIME. Just remember the pointer of result. if (result.types & REALTIME_TOP) { realtime_top_result = &results->at(i); continue; } const int cost = GetLMCost(result, rid); const size_t query_len = (result.types & BIGRAM) ? bigram_key_len : unigram_key_len; const size_t key_len = Util::CharsLen(result.key); if (IsAggressiveSuggestion(query_len, key_len, cost, is_suggestion, results->size())) { results->at(i).cost = kInfinity; continue; } // cost = -500 * log(lang_prob(w) * (1 + remain_length)) -- (1) // where lang_prob(w) is a language model probability of the word "w", and // remain_length the length of key user must type to input "w". // // Example: // key/value = "とうきょう/東京" // user_input = "とう" // remain_length = len("とうきょう") - len("とう") = 3 // // By taking the log of (1), // cost = -500 [log(lang_prob(w)) + log(1 + ramain_length)] // = -500 * log(lang_prob(w)) + 500 * log(1 + remain_length) // = cost - 500 * log(1 + remain_length) // Because 500 * log(lang_prob(w)) = -cost. // // lang_prob(w) * (1 + remain_length) represents how user can reduce // the total types by choosing this candidate. // Before this simple algorithm, we have been using an SVM-base scoring, // but we stop usign it with the following reasons. // 1) Hard to maintain the ranking. // 2) Hard to control the final results of SVM. // 3) Hard to debug. // 4) Since we used the log(remain_length) as a feature, // the new ranking algorithm and SVM algorithm was essentially // the same. // 5) Since we used the length of value as a feature, we find // inconsistencies between the conversion and the prediction // -- the results of top prediction and the top conversion // (the candidate shown after the space key) may differ. // // The new function brings consistent results. If two candidate // have the same reading (key), they should have the same cost bonus // from the length part. This implies that the result is reranked by // the language model probability as long as the key part is the same. // This behavior is baisically the same as the converter. // // TODO(team): want find the best parameter instead of kCostFactor. constexpr int kCostFactor = 500; results->at(i).cost = cost - kCostFactor * log(1.0 + std::max<int>(0, key_len - query_len)); // Update the minimum cost for REALTIME candidates that have the same key // length as input_key. if (result.types & REALTIME && result.cost < realtime_cost_min && result.key.size() == input_key.size()) { realtime_cost_min = result.cost; } } // Ensure that the REALTIME_TOP candidate has relatively smaller cost than // those of REALTIME candidates. if (realtime_top_result != nullptr) { realtime_top_result->cost = std::max(0, realtime_cost_min - 10); } } void DictionaryPredictor::SetPredictionCostForMixedConversion( const Segments &segments, std::vector<Result> *results) const { DCHECK(results); // ranking for mobile int rid = 0; // 0 (BOS) is default int prev_cost = 0; // cost of the last history candidate. if (segments.history_segments_size() > 0) { const Segment &history_segment = segments.history_segment(segments.history_segments_size() - 1); if (history_segment.candidates_size() > 0) { rid = history_segment.candidate(0).rid; // use history segment's id prev_cost = history_segment.candidate(0).cost; if (prev_cost == 0) { // if prev_cost is set to be 0 for some reason, use default cost. prev_cost = 5000; } } } const size_t input_key_len = Util::CharsLen(segments.conversion_segment(0).key()); for (Result &result : *results) { int cost = GetLMCost(result, rid); MOZC_WORD_LOG(result, absl::StrCat("GetLMCost: ", cost)); // Demote filtered word here, because they are not filtered for exact match. // Even for exact match, we don't want to show aggressive words with high // ranking. if (suggestion_filter_->IsBadSuggestion(result.value)) { // Cost penalty means for bad suggestion. // 3453 = 500 * log(1000) constexpr int kBadSuggestionPenalty = 3453; cost += kBadSuggestionPenalty; MOZC_WORD_LOG(result, absl::StrCat("BadSuggestionPenalty: ", cost)); } // Make exact candidates to have higher ranking. // Because for mobile, suggestion is the main candidates and // users expect the candidates for the input key on the candidates. if (result.types & (UNIGRAM | TYPING_CORRECTION)) { const size_t key_len = Util::CharsLen(result.key); if (key_len > input_key_len) { // Cost penalty means that exact candidates are evaluated // 50 times bigger in frequency. // Note that the cost is calculated by cost = -500 * log(prob) // 1956 = 500 * log(50) constexpr int kNotExactPenalty = 1956; cost += kNotExactPenalty; MOZC_WORD_LOG(result, absl::StrCat("Unigram | Typing correction: ", cost)); } } if (result.types & BIGRAM) { // When user inputs "六本木" and there is an entry // "六本木ヒルズ" in the dictionary, we can suggest // "ヒルズ" as a ZeroQuery suggestion. In this case, // We can't calcurate the transition cost between "六本木" // and "ヒルズ". If we ignore the transition cost, // bigram-based suggestion will be overestimated. // Here we use kDefaultTransitionCost as an // transition cost between "六本木" and "ヒルズ". Currently, // the cost is basically the same as the cost between // "名詞,一般" and "名詞,一般". // TODO(taku): Adjust these parameters. // Seems the bigram is overestimated. constexpr int kDefaultTransitionCost = 1347; // Promoting bigram candidates. constexpr int kBigramBonus = 800; // ~= 500*ln(5) cost += (kDefaultTransitionCost - kBigramBonus - prev_cost); MOZC_WORD_LOG(result, absl::StrCat("Bigram: ", cost)); } if (result.candidate_attributes & Segment::Candidate::USER_DICTIONARY && result.lid != general_symbol_id_) { // Decrease cost for words from user dictionary in order to promote them, // provided that it is not a general symbol (Note: emoticons are mapped to // general symbol). Currently user dictionary words are evaluated 5 times // bigger in frequency, being capped by 1000 (this number is adhoc, so // feel free to adjust). constexpr int kUserDictionaryPromotionFactor = 804; // 804 = 500 * log(5) constexpr int kUserDictionaryCostUpperLimit = 1000; cost = std::min(cost - kUserDictionaryPromotionFactor, kUserDictionaryCostUpperLimit); MOZC_WORD_LOG(result, absl::StrCat("User dictionary: ", cost)); } // Note that the cost is defined as -500 * log(prob). // Even after the ad hoc manipulations, cost must remain larger than 0. result.cost = std::max(1, cost); MOZC_WORD_LOG(result, absl::StrCat("SetLMCost: ", result.cost)); } } // This method should be deprecated, as it is unintentionally adding extra // spatial penalty to the candidate. void DictionaryPredictor::ApplyPenaltyForKeyExpansion( const Segments &segments, std::vector<Result> *results) const { if (segments.conversion_segments_size() == 0) { return; } // Cost penalty 1151 means that expanded candidates are evaluated // 10 times smaller in frequency. // Note that the cost is calcurated by cost = -500 * log(prob) // 1151 = 500 * log(10) constexpr int kKeyExpansionPenalty = 1151; const std::string &conversion_key = segments.conversion_segment(0).key(); for (size_t i = 0; i < results->size(); ++i) { Result &result = results->at(i); if (result.types & TYPING_CORRECTION) { continue; } if (!absl::StartsWith(result.key, conversion_key)) { result.cost += kKeyExpansionPenalty; MOZC_WORD_LOG(result, absl::StrCat("KeyExpansionPenalty: ", result.cost)); } } } size_t DictionaryPredictor::GetMissSpelledPosition( const std::string &key, const std::string &value) const { std::string hiragana_value; japanese_util::KatakanaToHiragana(value, &hiragana_value); // value is mixed type. return true if key == request_key. if (Util::GetScriptType(hiragana_value) != Util::HIRAGANA) { return Util::CharsLen(key); } // Find the first position of character where miss spell occurs. int position = 0; ConstChar32Iterator key_iter(key); for (ConstChar32Iterator hiragana_iter(hiragana_value); !hiragana_iter.Done() && !key_iter.Done(); hiragana_iter.Next(), key_iter.Next(), ++position) { if (hiragana_iter.Get() != key_iter.Get()) { return position; } } // not find. return the length of key. while (!key_iter.Done()) { ++position; key_iter.Next(); } return position; } void DictionaryPredictor::RemoveMissSpelledCandidates( size_t request_key_len, std::vector<Result> *results) const { DCHECK(results); if (results->size() <= 1) { return; } int spelling_correction_size = 5; for (size_t i = 0; i < results->size(); ++i) { const Result &result = (*results)[i]; if (!(result.candidate_attributes & Segment::Candidate::SPELLING_CORRECTION)) { continue; } // Only checks at most 5 spelling corrections to avoid the case // like all candidates have SPELLING_CORRECTION. if (--spelling_correction_size == 0) { return; } std::vector<size_t> same_key_index, same_value_index; for (size_t j = 0; j < results->size(); ++j) { if (i == j) { continue; } const Result &target_result = (*results)[j]; if (target_result.candidate_attributes & Segment::Candidate::SPELLING_CORRECTION) { continue; } if (target_result.key == result.key) { same_key_index.push_back(j); } if (target_result.value == result.value) { same_value_index.push_back(j); } } // delete same_key_index and same_value_index if (!same_key_index.empty() && !same_value_index.empty()) { results->at(i).removed = true; MOZC_WORD_LOG(results->at(i), "Removed. same_(key|value)_index."); for (size_t k = 0; k < same_key_index.size(); ++k) { results->at(same_key_index[k]).removed = true; MOZC_WORD_LOG(results->at(i), "Removed. same_(key|value)_index."); } } else if (same_key_index.empty() && !same_value_index.empty()) { results->at(i).removed = true; MOZC_WORD_LOG(results->at(i), "Removed. same_value_index."); } else if (!same_key_index.empty() && same_value_index.empty()) { for (size_t k = 0; k < same_key_index.size(); ++k) { results->at(same_key_index[k]).removed = true; MOZC_WORD_LOG(results->at(i), "Removed. same_key_index."); } if (request_key_len <= GetMissSpelledPosition(result.key, result.value)) { results->at(i).removed = true; MOZC_WORD_LOG(results->at(i), "Removed. Invalid MissSpelledPosition."); } } } } bool DictionaryPredictor::IsAggressiveSuggestion( size_t query_len, size_t key_len, int cost, bool is_suggestion, size_t total_candidates_size) const { // Temporal workaround for fixing the problem where longer sentence-like // suggestions are shown when user input is very short. // "ただしい" => "ただしいけめんにかぎる" // "それでもぼ" => "それでもぼくはやっていない". // If total_candidates_size is small enough, we don't perform // special filtering. e.g., "せんとち" has only two candidates, so // showing "千と千尋の神隠し" is OK. // Also, if the cost is too small (< 5000), we allow to display // long phrases. Examples include "よろしくおねがいします". if (is_suggestion && total_candidates_size >= 10 && key_len >= 8 && cost >= 5000 && query_len <= static_cast<size_t>(0.4 * key_len)) { return true; } return false; } size_t DictionaryPredictor::GetRealtimeCandidateMaxSize( const ConversionRequest &request, const Segments &segments, bool mixed_conversion) const { const Segments::RequestType request_type = segments.request_type(); DCHECK(request_type == Segments::PREDICTION || request_type == Segments::SUGGESTION || request_type == Segments::PARTIAL_PREDICTION || request_type == Segments::PARTIAL_SUGGESTION); if (segments.conversion_segments_size() == 0) { return 0; } const bool is_long_key = IsLongKeyForRealtimeCandidates(segments); const size_t max_size = GetMaxSizeForRealtimeCandidates(request, segments, is_long_key); const size_t default_size = GetDefaultSizeForRealtimeCandidates(is_long_key); size_t size = 0; switch (request_type) { case Segments::PREDICTION: size = mixed_conversion ? max_size : default_size; break; case Segments::SUGGESTION: // Fewer candidatats are needed basically. // But on mixed_conversion mode we should behave like as conversion mode. size = mixed_conversion ? default_size : 1; break; case Segments::PARTIAL_PREDICTION: // This is kind of prediction so richer result than PARTIAL_SUGGESTION // is needed. size = max_size; break; case Segments::PARTIAL_SUGGESTION: // PARTIAL_SUGGESTION works like as conversion mode so returning // some candidates is needed. size = default_size; break; default: size = 0; // Never reach here } return std::min(max_size, size); } bool DictionaryPredictor::PushBackTopConversionResult( const ConversionRequest &request, const Segments &segments, std::vector<Result> *results) const { DCHECK_EQ(1, segments.conversion_segments_size()); Segments tmp_segments = segments; ConversionRequest tmp_request = request; tmp_request.set_max_conversion_candidates_size(20); tmp_request.set_composer_key_selection(ConversionRequest::PREDICTION_KEY); // Some rewriters cause significant performance loss. So we skip them. tmp_request.set_skip_slow_rewriters(true); // This method emulates usual converter's behavior so here disable // partial candidates. tmp_request.set_create_partial_candidates(false); if (!converter_->StartConversionForRequest(tmp_request, &tmp_segments)) { return false; } results->push_back(Result()); Result *result = &results->back(); result->key = segments.conversion_segment(0).key(); result->lid = tmp_segments.conversion_segment(0).candidate(0).lid; result->rid = tmp_segments .conversion_segment(tmp_segments.conversion_segments_size() - 1) .candidate(0) .rid; result->SetTypesAndTokenAttributes(REALTIME | REALTIME_TOP, Token::NONE); result->candidate_attributes |= Segment::Candidate::NO_VARIANTS_EXPANSION; // Concatenate the top candidates. // Note that since StartConversionForRequest() runs in conversion mode, the // resulting |tmp_segments| doesn't have inner_segment_boundary. We need to // construct it manually here. // TODO(noriyukit): This is code duplicate in converter/nbest_generator.cc and // we should refactor code after finding more good design. bool inner_segment_boundary_success = true; for (size_t i = 0; i < tmp_segments.conversion_segments_size(); ++i) { const Segment &segment = tmp_segments.conversion_segment(i); const Segment::Candidate &candidate = segment.candidate(0); result->value.append(candidate.value); result->wcost += candidate.cost; uint32_t encoded_lengths; if (inner_segment_boundary_success && Segment::Candidate::EncodeLengths( candidate.key.size(), candidate.value.size(), candidate.content_key.size(), candidate.content_value.size(), &encoded_lengths)) { result->inner_segment_boundary.push_back(encoded_lengths); } else { inner_segment_boundary_success = false; } } if (!inner_segment_boundary_success) { LOG(WARNING) << "Failed to construct inner segment boundary"; result->inner_segment_boundary.clear(); } return true; } void DictionaryPredictor::AggregateRealtimeConversion( const ConversionRequest &request, size_t realtime_candidates_size, Segments *segments, std::vector<Result> *results) const { DCHECK(converter_); DCHECK(immutable_converter_); DCHECK(segments); DCHECK(results); // TODO(noriyukit): Currently, |segments| is abused as a temporary output from // the immutable converter. Therefore, the first segment needs to be mutable. // Fix this bad abuse. Segment *segment = segments->mutable_conversion_segment(0); DCHECK(!segment->key().empty()); // First insert a top conversion result. if (request.use_actual_converter_for_realtime_conversion()) { if (!PushBackTopConversionResult(request, *segments, results)) { LOG(WARNING) << "Realtime conversion with converter failed"; } } if (realtime_candidates_size == 0) { return; } // In what follows, add results from immutable converter. // TODO(noriyukit): The |immutable_converter_| used below can be replaced by // |converter_| in principle. There's a problem of ranking when we get // multiple segments, i.e., how to concatenate candidates in each segment. // Currently, immutable converter handles such ranking in prediction mode to // generate single segment results. So we want to share that code. // Preserve the current candidates_size to restore segments at the end of this // method. const size_t prev_candidates_size = segment->candidates_size(); const ConversionRequest request_for_realtime = GetConversionRequestForRealtimeCandidates( request, realtime_candidates_size, prev_candidates_size); if (!immutable_converter_->ConvertForRequest(request_for_realtime, segments) || prev_candidates_size >= segment->candidates_size()) { LOG(WARNING) << "Convert failed"; return; } // A little tricky treatment: // Since ImmutableConverter::Convert creates a set of new candidates, // copy them into the array of Results. for (size_t i = prev_candidates_size; i < segment->candidates_size(); ++i) { const Segment::Candidate &candidate = segment->candidate(i); results->push_back(Result()); Result *result = &results->back(); result->key = candidate.key; result->value = candidate.value; result->wcost = candidate.wcost; result->lid = candidate.lid; result->rid = candidate.rid; result->inner_segment_boundary = candidate.inner_segment_boundary; result->SetTypesAndTokenAttributes(REALTIME, Token::NONE); result->candidate_attributes |= candidate.attributes; result->consumed_key_size = candidate.consumed_key_size; } // Remove candidates created by ImmutableConverter. segment->erase_candidates(prev_candidates_size, segment->candidates_size() - prev_candidates_size); } size_t DictionaryPredictor::GetCandidateCutoffThreshold( const Segments &segments) const { DCHECK(segments.request_type() == Segments::PREDICTION || segments.request_type() == Segments::SUGGESTION); if (segments.request_type() == Segments::PREDICTION) { // If PREDICTION, many candidates are needed than SUGGESTION. return kPredictionMaxResultsSize; } return kSuggestionMaxResultsSize; } DictionaryPredictor::PredictionType DictionaryPredictor::AggregateUnigramCandidate( const ConversionRequest &request, const Segments &segments, std::vector<Result> *results) const { DCHECK(results); DCHECK(dictionary_); DCHECK(segments.request_type() == Segments::PREDICTION || segments.request_type() == Segments::SUGGESTION); const size_t cutoff_threshold = GetCandidateCutoffThreshold(segments); const size_t prev_results_size = results->size(); GetPredictiveResults(*dictionary_, "", request, segments, UNIGRAM, cutoff_threshold, Segment::Candidate::SOURCE_INFO_NONE, unknown_id_, results); const size_t unigram_results_size = results->size() - prev_results_size; // If size reaches max_results_size (== cutoff_threshold). // we don't show the candidates, since disambiguation from // 256 candidates is hard. (It may exceed max_results_size, because this is // just a limit for each backend, so total number may be larger) if (unigram_results_size >= cutoff_threshold) { results->resize(prev_results_size); } return UNIGRAM; } DictionaryPredictor::PredictionType DictionaryPredictor::AggregateUnigramCandidateForMixedConversion( const ConversionRequest &request, const Segments &segments, std::vector<Result> *results) const { DCHECK(segments.request_type() == Segments::PREDICTION || segments.request_type() == Segments::SUGGESTION); AggregateUnigramCandidateForMixedConversion(*dictionary_, request, segments, unknown_id_, results); return UNIGRAM; } void DictionaryPredictor::AggregateUnigramCandidateForMixedConversion( const dictionary::DictionaryInterface &dictionary, const ConversionRequest &request, const Segments &segments, int unknown_id, std::vector<Result> *results) { const size_t cutoff_threshold = kPredictionMaxResultsSize; std::vector<Result> raw_result; // No history key GetPredictiveResults(dictionary, "", request, segments, UNIGRAM, cutoff_threshold, Segment::Candidate::SOURCE_INFO_NONE, unknown_id, &raw_result); // Hereafter, we split "Needed Results" and "(maybe) Unneeded Results." // The algorithm is: // 1) Take the Result with minimum cost. // 2) Remove results which is "redundant" (defined by MaybeRedundant), // from remaining results. // 3) Repeat 1) and 2) five times. // Note: to reduce the number of memory allocation, we swap out the // "redundant" results to the end of the |results| vector. constexpr size_t kDeleteTrialNum = 5; // min_iter is the beginning of the remaining results (inclusive), and // max_iter is the end of the remaining results (exclusive). typedef std::vector<Result>::iterator Iter; Iter min_iter = raw_result.begin(); Iter max_iter = raw_result.end(); for (size_t i = 0; i < kDeleteTrialNum; ++i) { if (min_iter == max_iter) { break; } // Find the Result with minimum cost. Swap it with the beginning element. std::iter_swap(min_iter, std::min_element(min_iter, max_iter, ResultWCostLess())); const Result &reference_result = *min_iter; // Preserve the reference result. ++min_iter; // Traverse all remaining elements and check if each result is redundant. for (Iter iter = min_iter; iter != max_iter;) { // - We do not filter user dictionary word. const bool should_check_redundant = !iter->IsUserDictionaryResult(); if (should_check_redundant && MaybeRedundant(reference_result.value, iter->value)) { // Swap out the redundant result. --max_iter; std::iter_swap(iter, max_iter); } else { ++iter; } } } // Then the |raw_result| contains; // [begin, min_iter): reference results in the above loop. // [max_iter, end): (maybe) redundant results. // [min_iter, max_iter): remaining results. // Here, we revive the redundant results up to five in the result cost order. constexpr size_t kDoNotDeleteNum = 5; if (std::distance(max_iter, raw_result.end()) >= kDoNotDeleteNum) { std::partial_sort(max_iter, max_iter + kDoNotDeleteNum, raw_result.end(), ResultWCostLess()); max_iter += kDoNotDeleteNum; } else { max_iter = raw_result.end(); } // Finally output the result. results->insert(results->end(), raw_result.begin(), max_iter); } void DictionaryPredictor::AggregateBigramPrediction( const ConversionRequest &request, const Segments &segments, Segment::Candidate::SourceInfo source_info, std::vector<Result> *results) const { DCHECK(results); DCHECK(dictionary_); // TODO(toshiyuki): Support suggestion from the last 2 histories. // ex) "六本木"+"ヒルズ"->"レジデンス" std::string history_key, history_value; if (!GetHistoryKeyAndValue(segments, &history_key, &history_value)) { return; } AddBigramResultsFromHistory(history_key, history_value, request, segments, source_info, results); } void DictionaryPredictor::AddBigramResultsFromHistory( const std::string &history_key, const std::string &history_value, const ConversionRequest &request, const Segments &segments, Segment::Candidate::SourceInfo source_info, std::vector<Result> *results) const { // Check that history_key/history_value are in the dictionary. FindValueCallback find_history_callback(history_value); dictionary_->LookupPrefix(history_key, request, &find_history_callback); // History value is not found in the dictionary. // User may create this the history candidate from T13N or segment // expand/shrinkg operations. if (!find_history_callback.found()) { return; } const size_t cutoff_threshold = GetCandidateCutoffThreshold(segments); const size_t prev_results_size = results->size(); GetPredictiveResultsForBigram(*dictionary_, history_key, history_value, request, segments, BIGRAM, cutoff_threshold, source_info, unknown_id_, results); const size_t bigram_results_size = results->size() - prev_results_size; // if size reaches max_results_size, // we don't show the candidates, since disambiguation from // 256 candidates is hard. (It may exceed max_results_size, because this is // just a limit for each backend, so total number may be larger) if (bigram_results_size >= cutoff_threshold) { results->resize(prev_results_size); return; } // Obtain the character type of the last history value. const size_t history_value_size = Util::CharsLen(history_value); if (history_value_size == 0) { return; } const Util::ScriptType history_ctype = Util::GetScriptType(history_value); const Util::ScriptType last_history_ctype = Util::GetScriptType( Util::Utf8SubString(history_value, history_value_size - 1, 1)); for (size_t i = prev_results_size; i < results->size(); ++i) { CheckBigramResult(find_history_callback.token(), history_ctype, last_history_ctype, request, &(*results)[i]); } } // Filter out irrelevant bigrams. For example, we don't want to // suggest "リカ" from the history "アメ". void DictionaryPredictor::CheckBigramResult( const Token &history_token, const Util::ScriptType history_ctype, const Util::ScriptType last_history_ctype, const ConversionRequest &request, Result *result) const { DCHECK(result); const std::string &history_key = history_token.key; const std::string &history_value = history_token.value; const std::string key(result->key, history_key.size(), result->key.size() - history_key.size()); const std::string value(result->value, history_value.size(), result->value.size() - history_value.size()); // Don't suggest 0-length key/value. if (key.empty() || value.empty()) { result->removed = true; MOZC_WORD_LOG(*result, "Removed. key, value or both are empty."); return; } const Util::ScriptType ctype = Util::GetScriptType(Util::Utf8SubString(value, 0, 1)); if (history_ctype == Util::KANJI && ctype == Util::KATAKANA) { // Do not filter "六本木ヒルズ" MOZC_WORD_LOG(*result, "Valid bigram. Kanji + Katakana pattern."); return; } // If freq("アメ") < freq("アメリカ"), we don't // need to suggest it. As "アメリカ" should already be // suggested when user type "アメ". // Note that wcost = -500 * log(prob). if (ctype != Util::KANJI && history_token.cost > result->wcost) { result->removed = true; MOZC_WORD_LOG(*result, "Removed. The prefix's score is lower than the whole."); return; } // If character type doesn't change, this boundary might NOT // be a word boundary. Only use iif the entire key is reasonably long. const size_t key_len = Util::CharsLen(result->key); if (ctype == last_history_ctype && ((ctype == Util::HIRAGANA && key_len <= 9) || (ctype == Util::KATAKANA && key_len <= 5))) { result->removed = true; MOZC_WORD_LOG(*result, "Removed. Short Hiragana (<= 9) or Katakana (<= 5)"); return; } // The suggested key/value pair must exist in the dictionary. // For example, we don't want to suggest "ターネット" from // the history "イン". // If character type is Kanji and the suggestion is not a // zero_query_suggestion, we relax this condition, as there are // many Kanji-compounds which may not in the dictionary. For example, // we want to suggest "霊長類研究所" from the history "京都大学". if (ctype == Util::KANJI && Util::CharsLen(value) >= 2) { // Do not filter this. // TODO(toshiyuki): one-length kanji prediciton may be annoying other than // some exceptions, "駅", "口", etc MOZC_WORD_LOG(*result, "Valid bigram. Kanji suffix (>= 2)."); return; } // Check if the word is in the dictionary or not. // For Hiragana words, check if that word is in a key of values. // This is for a situation that // ありがとうございました is not in the dictionary, but // ありがとう御座いました is in the dictionary. if (ctype == Util::HIRAGANA) { if (!dictionary_->HasKey(key)) { result->removed = true; MOZC_WORD_LOG(*result, "Removed. No keys are found."); return; } } else { FindValueCallback callback(value); dictionary_->LookupPrefix(key, request, &callback); if (!callback.found()) { result->removed = true; MOZC_WORD_LOG(*result, "Removed. No prefix found."); return; } } MOZC_WORD_LOG(*result, "Valid bigram."); } void DictionaryPredictor::GetPredictiveResults( const DictionaryInterface &dictionary, const std::string &history_key, const ConversionRequest &request, const Segments &segments, PredictionTypes types, size_t lookup_limit, Segment::Candidate::SourceInfo source_info, int unknown_id_, std::vector<Result> *results) { if (!request.has_composer()) { std::string input_key = history_key; input_key.append(segments.conversion_segment(0).key()); PredictiveLookupCallback callback(types, lookup_limit, input_key.size(), nullptr, source_info, unknown_id_, "", GetSpatialCostParams(request), results); dictionary.LookupPredictive(input_key, request, &callback); return; } // If we have ambiguity for the input, get expanded key. // Example1 roman input: for "あk", we will get |base|, "あ" and |expanded|, // "か", "き", etc // Example2 kana input: for "あか", we will get |base|, "あ" and |expanded|, // "か", and "が". std::string base; std::set<std::string> expanded; request.composer().GetQueriesForPrediction(&base, &expanded); std::string input_key; if (expanded.empty()) { input_key.assign(history_key).append(base); PredictiveLookupCallback callback(types, lookup_limit, input_key.size(), nullptr, source_info, unknown_id_, "", GetSpatialCostParams(request), results); dictionary.LookupPredictive(input_key, request, &callback); return; } // `non_expanded_original_key` keeps the original key request before // key expansions. This key is passed to the callback so that it can // identify whether the key is actually expanded or not. const std::string non_expanded_original_key = IsEnableNewSpatialScoring(request) ? history_key + segments.conversion_segment(0).key() : ""; // |expanded| is a very small set, so calling LookupPredictive multiple // times is not so expensive. Also, the number of lookup results is limited // by |lookup_limit|. for (const std::string &expanded_char : expanded) { input_key.assign(history_key).append(base).append(expanded_char); PredictiveLookupCallback callback(types, lookup_limit, input_key.size(), nullptr, source_info, unknown_id_, non_expanded_original_key, GetSpatialCostParams(request), results); dictionary.LookupPredictive(input_key, request, &callback); } } void DictionaryPredictor::GetPredictiveResultsForBigram( const DictionaryInterface &dictionary, const std::string &history_key, const std::string &history_value, const ConversionRequest &request, const Segments &segments, PredictionTypes types, size_t lookup_limit, Segment::Candidate::SourceInfo source_info, int unknown_id_, std::vector<Result> *results) const { if (!request.has_composer()) { std::string input_key = history_key; input_key.append(segments.conversion_segment(0).key()); PredictiveBigramLookupCallback callback( types, lookup_limit, input_key.size(), nullptr, history_value, source_info, unknown_id_, "", GetSpatialCostParams(request), results); dictionary.LookupPredictive(input_key, request, &callback); return; } // If we have ambiguity for the input, get expanded key. // Example1 roman input: for "あk", we will get |base|, "あ" and |expanded|, // "か", "き", etc // Example2 kana input: for "あか", we will get |base|, "あ" and |expanded|, // "か", and "が". std::string base; std::set<std::string> expanded; request.composer().GetQueriesForPrediction(&base, &expanded); const std::string input_key = history_key + base; const std::string non_expanded_original_key = IsEnableNewSpatialScoring(request) ? history_key + segments.conversion_segment(0).key() : ""; PredictiveBigramLookupCallback callback( types, lookup_limit, input_key.size(), expanded.empty() ? nullptr : &expanded, history_value, source_info, unknown_id_, non_expanded_original_key, GetSpatialCostParams(request), results); dictionary.LookupPredictive(input_key, request, &callback); } void DictionaryPredictor::GetPredictiveResultsForEnglishKey( const DictionaryInterface &dictionary, const ConversionRequest &request, const std::string &input_key, PredictionTypes types, size_t lookup_limit, std::vector<Result> *results) const { const size_t prev_results_size = results->size(); if (Util::IsUpperAscii(input_key)) { // For upper case key, look up its lower case version and then transform // the results to upper case. std::string key(input_key); Util::LowerString(&key); PredictiveLookupCallback callback(types, lookup_limit, key.size(), nullptr, Segment::Candidate::SOURCE_INFO_NONE, unknown_id_, "", GetSpatialCostParams(request), results); dictionary.LookupPredictive(key, request, &callback); for (size_t i = prev_results_size; i < results->size(); ++i) { Util::UpperString(&(*results)[i].value); } } else if (Util::IsCapitalizedAscii(input_key)) { // For capitalized key, look up its lower case version and then transform // the results to capital. std::string key(input_key); Util::LowerString(&key); PredictiveLookupCallback callback(types, lookup_limit, key.size(), nullptr, Segment::Candidate::SOURCE_INFO_NONE, unknown_id_, "", GetSpatialCostParams(request), results); dictionary.LookupPredictive(key, request, &callback); for (size_t i = prev_results_size; i < results->size(); ++i) { Util::CapitalizeString(&(*results)[i].value); } } else { // For other cases (lower and as-is), just look up directly. PredictiveLookupCallback callback( types, lookup_limit, input_key.size(), nullptr, Segment::Candidate::SOURCE_INFO_NONE, unknown_id_, "", GetSpatialCostParams(request), results); dictionary.LookupPredictive(input_key, request, &callback); } // If input mode is FULL_ASCII, then convert the results to full-width. if (request.has_composer() && request.composer().GetInputMode() == transliteration::FULL_ASCII) { std::string tmp; for (size_t i = prev_results_size; i < results->size(); ++i) { tmp.assign((*results)[i].value); japanese_util::HalfWidthAsciiToFullWidthAscii(tmp, &(*results)[i].value); } } } void DictionaryPredictor::GetPredictiveResultsUsingTypingCorrection( const DictionaryInterface &dictionary, const std::string &history_key, const ConversionRequest &request, const Segments &segments, PredictionTypes types, size_t lookup_limit, std::vector<Result> *results) const { if (!request.has_composer()) { return; } std::vector<composer::TypeCorrectedQuery> queries; request.composer().GetTypeCorrectedQueriesForPrediction(&queries); for (size_t query_index = 0; query_index < queries.size(); ++query_index) { const composer::TypeCorrectedQuery &query = queries[query_index]; const std::string input_key = history_key + query.base; const size_t previous_results_size = results->size(); PredictiveLookupCallback callback( types, lookup_limit, input_key.size(), query.expanded.empty() ? nullptr : &query.expanded, Segment::Candidate::SOURCE_INFO_NONE, unknown_id_, "", GetSpatialCostParams(request), results); dictionary.LookupPredictive(input_key, request, &callback); for (size_t i = previous_results_size; i < results->size(); ++i) { (*results)[i].wcost += query.cost; } lookup_limit -= results->size() - previous_results_size; if (lookup_limit <= 0) { break; } } } // static bool DictionaryPredictor::GetZeroQueryCandidatesForKey( const ConversionRequest &request, const std::string &key, const ZeroQueryDict &dict, std::vector<ZeroQueryResult> *results) { const int32_t available_emoji_carrier = request.request().available_emoji_carrier(); DCHECK(results); results->clear(); auto range = dict.equal_range(key); if (range.first == range.second) { return false; } for (; range.first != range.second; ++range.first) { const auto &entry = range.first; if (entry.type() != ZERO_QUERY_EMOJI) { results->push_back( std::make_pair(std::string(entry.value()), entry.type())); continue; } if (available_emoji_carrier & Request::UNICODE_EMOJI && entry.emoji_type() & EMOJI_UNICODE) { results->push_back( std::make_pair(std::string(entry.value()), entry.type())); continue; } if ((available_emoji_carrier & Request::DOCOMO_EMOJI && entry.emoji_type() & EMOJI_DOCOMO) || (available_emoji_carrier & Request::SOFTBANK_EMOJI && entry.emoji_type() & EMOJI_SOFTBANK) || (available_emoji_carrier & Request::KDDI_EMOJI && entry.emoji_type() & EMOJI_KDDI)) { std::string android_pua; Util::Ucs4ToUtf8(entry.emoji_android_pua(), &android_pua); results->push_back(std::make_pair(android_pua, entry.type())); } } return !results->empty(); } // static void DictionaryPredictor::AppendZeroQueryToResults( const std::vector<ZeroQueryResult> &candidates, uint16_t lid, uint16_t rid, std::vector<Result> *results) { int cost = 0; for (size_t i = 0; i < candidates.size(); ++i) { // Increment cost to show the candidates in order. constexpr int kSuffixPenalty = 10; results->push_back(Result()); Result *result = &results->back(); result->SetTypesAndTokenAttributes(SUFFIX, Token::NONE); result->SetSourceInfoForZeroQuery(candidates[i].second); result->key = candidates[i].first; result->value = candidates[i].first; result->wcost = cost; result->lid = lid; result->rid = rid; cost += kSuffixPenalty; } } // Returns true if we add zero query result. bool DictionaryPredictor::AggregateNumberZeroQueryPrediction( const ConversionRequest &request, const Segments &segments, std::vector<Result> *results) const { std::string number_key; if (!GetNumberHistory(segments, &number_key)) { return false; } std::vector<ZeroQueryResult> candidates_for_number_key; GetZeroQueryCandidatesForKey(request, number_key, zero_query_number_dict_, &candidates_for_number_key); std::vector<ZeroQueryResult> default_candidates_for_number; GetZeroQueryCandidatesForKey(request, "default", zero_query_number_dict_, &default_candidates_for_number); DCHECK(!default_candidates_for_number.empty()); AppendZeroQueryToResults(candidates_for_number_key, counter_suffix_word_id_, counter_suffix_word_id_, results); AppendZeroQueryToResults(default_candidates_for_number, counter_suffix_word_id_, counter_suffix_word_id_, results); return true; } // Returns true if we add zero query result. bool DictionaryPredictor::AggregateZeroQueryPrediction( const ConversionRequest &request, const Segments &segments, std::vector<Result> *results) const { const size_t history_size = segments.history_segments_size(); if (history_size <= 0) { return false; } const Segment &last_segment = segments.history_segment(history_size - 1); DCHECK_GT(last_segment.candidates_size(), 0); const std::string &history_value = last_segment.candidate(0).value; std::vector<ZeroQueryResult> candidates; if (!GetZeroQueryCandidatesForKey(request, history_value, zero_query_dict_, &candidates)) { return false; } const uint16_t kId = 0; // EOS AppendZeroQueryToResults(candidates, kId, kId, results); return true; } void DictionaryPredictor::AggregateSuffixPrediction( const ConversionRequest &request, const Segments &segments, std::vector<Result> *results) const { DCHECK_GT(segments.conversion_segments_size(), 0); DCHECK(!segments.conversion_segment(0).key().empty()); // Not zero query // Uses larger cutoff (kPredictionMaxResultsSize) in order to consider // all suffix entries. const size_t cutoff_threshold = kPredictionMaxResultsSize; const std::string kEmptyHistoryKey = ""; GetPredictiveResults(*suffix_dictionary_, kEmptyHistoryKey, request, segments, SUFFIX, cutoff_threshold, Segment::Candidate::SOURCE_INFO_NONE, unknown_id_, results); } void DictionaryPredictor::AggregateZeroQuerySuffixPrediction( const ConversionRequest &request, const Segments &segments, std::vector<Result> *results) const { DCHECK_GT(segments.conversion_segments_size(), 0); DCHECK(segments.conversion_segment(0).key().empty()); if (AggregateNumberZeroQueryPrediction(request, segments, results)) { return; } AggregateZeroQueryPrediction(request, segments, results); if (IsLatinInputMode(request)) { // We do not want zero query results from suffix dictionary for Latin // input mode. For example, we do not need "です", "。" just after "when". return; } // Uses larger cutoff (kPredictionMaxResultsSize) in order to consider // all suffix entries. const size_t cutoff_threshold = kPredictionMaxResultsSize; const std::string kEmptyHistoryKey = ""; GetPredictiveResults( *suffix_dictionary_, kEmptyHistoryKey, request, segments, SUFFIX, cutoff_threshold, Segment::Candidate::DICTIONARY_PREDICTOR_ZERO_QUERY_SUFFIX, unknown_id_, results); } void DictionaryPredictor::AggregateEnglishPrediction( const ConversionRequest &request, const Segments &segments, std::vector<Result> *results) const { DCHECK(results); DCHECK(dictionary_); const size_t cutoff_threshold = GetCandidateCutoffThreshold(segments); const size_t prev_results_size = results->size(); const std::string &input_key = segments.conversion_segment(0).key(); GetPredictiveResultsForEnglishKey(*dictionary_, request, input_key, ENGLISH, cutoff_threshold, results); size_t unigram_results_size = results->size() - prev_results_size; if (unigram_results_size >= cutoff_threshold) { results->resize(prev_results_size); return; } } void DictionaryPredictor::AggregateEnglishPredictionUsingRawInput( const ConversionRequest &request, const Segments &segments, std::vector<Result> *results) const { DCHECK(results); DCHECK(dictionary_); if (!request.has_composer()) { return; } const size_t cutoff_threshold = GetCandidateCutoffThreshold(segments); const size_t prev_results_size = results->size(); std::string input_key; request.composer().GetRawString(&input_key); GetPredictiveResultsForEnglishKey(*dictionary_, request, input_key, ENGLISH, cutoff_threshold, results); size_t unigram_results_size = results->size() - prev_results_size; if (unigram_results_size >= cutoff_threshold) { results->resize(prev_results_size); return; } } void DictionaryPredictor::AggregateTypeCorrectingPrediction( const ConversionRequest &request, const Segments &segments, std::vector<Result> *results) const { DCHECK(results); DCHECK(dictionary_); const size_t prev_results_size = results->size(); if (prev_results_size > 10000) { return; } const size_t cutoff_threshold = GetCandidateCutoffThreshold(segments); // Currently, history key is never utilized. const std::string kEmptyHistoryKey = ""; GetPredictiveResultsUsingTypingCorrection( *dictionary_, kEmptyHistoryKey, request, segments, TYPING_CORRECTION, cutoff_threshold, results); if (results->size() - prev_results_size >= cutoff_threshold) { results->resize(prev_results_size); return; } } bool DictionaryPredictor::ShouldAggregateRealTimeConversionResults( const ConversionRequest &request, const Segments &segments) { constexpr size_t kMaxRealtimeKeySize = 300; // 300 bytes in UTF8 const std::string &key = segments.conversion_segment(0).key(); if (key.empty() || key.size() >= kMaxRealtimeKeySize) { // 1) If key is empty, realtime conversion doesn't work. // 2) If the key is too long, we'll hit a performance issue. return false; } return (segments.request_type() == Segments::PARTIAL_SUGGESTION || request.config().use_realtime_conversion() || IsMixedConversionEnabled(request.request())); } bool DictionaryPredictor::IsZipCodeRequest(const std::string &key) { if (key.empty()) { return false; } for (ConstChar32Iterator iter(key); !iter.Done(); iter.Next()) { const char32 c = iter.Get(); if (!('0' <= c && c <= '9') && (c != '-')) { return false; } } return true; } } // namespace mozc #undef MOZC_WORD_LOG_MESSAGE #undef MOZC_WORD_LOG
fcitx/mozc
src/prediction/dictionary_predictor.cc
C++
bsd-3-clause
88,483
'use strict'; angular.module('shopnxApp') .config(function ($stateProvider) { $stateProvider .state('checkout', { title: 'Checkout with the items you selected', url: '/checkout', templateUrl: 'app/checkout/checkout.html', controller: 'CheckoutCtrl', authenticate: true }); });
kinddevil/shop
client/app/checkout/checkout.js
JavaScript
bsd-3-clause
338
package org.hisp.dhis.email; /* * Copyright (c) 2004-2018, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /** * @author Zubair <rajazubair.asghar@gmail.com> */ public enum EmailResponse { SENT( "success" ), FAILED( "failed" ), ABORTED( "aborted" ), NOT_CONFIGURED( "no configuration found" ); private String responseMessage; EmailResponse( String responseMessage ) { this.responseMessage = responseMessage; } public String getResponseMessage() { return responseMessage; } public void setResponseMessage( String responseMessage ) { this.responseMessage = responseMessage; } }
vietnguyen/dhis2-core
dhis-2/dhis-api/src/main/java/org/hisp/dhis/email/EmailResponse.java
Java
bsd-3-clause
2,144
/** * SAHARA Scheduling Server * * Schedules and assigns local laboratory rigs. * * @license See LICENSE in the top level directory for complete license terms. * * Copyright (c) 2009, University of Technology, Sydney * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the University of Technology, Sydney nor the names * of its contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * @author Tania Machet (tmachet) * @date 13 December 2010 */ package au.edu.uts.eng.remotelabs.schedserver.reports.intf.types; import java.io.Serializable; import java.util.ArrayList; import java.util.Calendar; import javax.xml.namespace.QName; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import javax.xml.stream.XMLStreamWriter; import org.apache.axiom.om.OMConstants; import org.apache.axiom.om.OMDataSource; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.OMFactory; import org.apache.axiom.om.impl.llom.OMSourcedElementImpl; import org.apache.axis2.databinding.ADBBean; import org.apache.axis2.databinding.ADBDataSource; import org.apache.axis2.databinding.ADBException; import org.apache.axis2.databinding.utils.BeanUtil; import org.apache.axis2.databinding.utils.ConverterUtil; import org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl; import org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter; /** * QuerySessionReportType bean class. */ public class QuerySessionReportType implements ADBBean { /* * This type was generated from the piece of schema that had * name = QuerySessionReportType * Namespace URI = http://remotelabs.eng.uts.edu.au/schedserver/reports * Namespace Prefix = ns1 */ private static final long serialVersionUID = -5121246029757741056L; private static String generatePrefix(final String namespace) { if (namespace.equals("http://remotelabs.eng.uts.edu.au/schedserver/reports")) { return "ns1"; } return BeanUtil.getUniquePrefix(); } protected RequestorType requestor; public RequestorType getRequestor() { return this.requestor; } public void setRequestor(final RequestorType param) { this.requestor = param; } protected QueryFilterType querySelect; public QueryFilterType getQuerySelect() { return this.querySelect; } public void setQuerySelect(final QueryFilterType param) { this.querySelect = param; } protected QueryFilterType queryConstraints; protected boolean queryConstraintsTracker = false; public QueryFilterType getQueryConstraints() { return this.queryConstraints; } public void setQueryConstraints(final QueryFilterType param) { this.queryConstraints = param; this.queryConstraintsTracker = param != null; } protected Calendar startTime; protected boolean startTimeTracker = false; public Calendar getStartTime() { return this.startTime; } public void setStartTime(final Calendar param) { this.startTime = param; this.startTimeTracker = param != null; } protected Calendar endTime; protected boolean endTimeTracker = false; public Calendar getEndTime() { return this.endTime; } public void setEndTime(final Calendar param) { this.endTime = param; this.endTimeTracker = param != null; } protected PaginationType pagination; protected boolean paginationTracker = false; public PaginationType getPagination() { return this.pagination; } public void setPagination(final PaginationType param) { this.pagination = param; this.paginationTracker = param != null; } public static boolean isReaderMTOMAware(final XMLStreamReader reader) { boolean isReaderMTOMAware = false; try { isReaderMTOMAware = Boolean.TRUE.equals(reader.getProperty(OMConstants.IS_DATA_HANDLERS_AWARE)); } catch (final IllegalArgumentException e) { isReaderMTOMAware = false; } return isReaderMTOMAware; } public OMElement getOMElement(final QName parentQName, final OMFactory factory) throws ADBException { final OMDataSource dataSource = new ADBDataSource(this, parentQName) { @Override public void serialize(final MTOMAwareXMLStreamWriter xmlWriter) throws XMLStreamException { QuerySessionReportType.this.serialize(this.parentQName, factory, xmlWriter); } }; return new OMSourcedElementImpl(parentQName, factory, dataSource); } @Override public void serialize(final QName parentQName, final OMFactory factory, final MTOMAwareXMLStreamWriter xmlWriter) throws XMLStreamException, ADBException { this.serialize(parentQName, factory, xmlWriter, false); } @Override public void serialize(final QName parentQName, final OMFactory factory, final MTOMAwareXMLStreamWriter xmlWriter, final boolean serializeType) throws XMLStreamException, ADBException { String prefix = parentQName.getPrefix(); String namespace = parentQName.getNamespaceURI(); if ((namespace != null) && (namespace.trim().length() > 0)) { final String writerPrefix = xmlWriter.getPrefix(namespace); if (writerPrefix != null) { xmlWriter.writeStartElement(namespace, parentQName.getLocalPart()); } else { if (prefix == null) { prefix = QuerySessionReportType.generatePrefix(namespace); } xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } } else { xmlWriter.writeStartElement(parentQName.getLocalPart()); } if (serializeType) { final String namespacePrefix = this.registerPrefix(xmlWriter, "http://remotelabs.eng.uts.edu.au/schedserver/reports"); if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)) { this.writeAttribute("xsi", "http://www.w3.org/2001/XMLSchema-instance", "type", namespacePrefix + ":QuerySessionReportType", xmlWriter); } else { this.writeAttribute("xsi", "http://www.w3.org/2001/XMLSchema-instance", "type", "QuerySessionReportType", xmlWriter); } } if (this.requestor == null) { throw new ADBException("requestor cannot be null!!"); } this.requestor.serialize(new QName("", "requestor"), factory, xmlWriter); if (this.querySelect == null) { throw new ADBException("querySelect cannot be null!!"); } this.querySelect.serialize(new QName("", "querySelect"), factory, xmlWriter); if (this.queryConstraintsTracker) { if (this.queryConstraints == null) { throw new ADBException("queryConstraints cannot be null!!"); } this.queryConstraints.serialize(new QName("", "queryConstraints"), factory, xmlWriter); } if (this.startTimeTracker) { namespace = ""; if (!namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = QuerySessionReportType.generatePrefix(namespace); xmlWriter.writeStartElement(prefix, "startTime", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace, "startTime"); } } else { xmlWriter.writeStartElement("startTime"); } if (this.startTime == null) { throw new ADBException("startTime cannot be null!!"); } else { xmlWriter.writeCharacters(ConverterUtil.convertToString(this.startTime)); } xmlWriter.writeEndElement(); } if (this.endTimeTracker) { namespace = ""; if (!namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = QuerySessionReportType.generatePrefix(namespace); xmlWriter.writeStartElement(prefix, "endTime", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace, "endTime"); } } else { xmlWriter.writeStartElement("endTime"); } if (this.endTime == null) { throw new ADBException("endTime cannot be null!!"); } else { xmlWriter.writeCharacters(ConverterUtil.convertToString(this.endTime)); } xmlWriter.writeEndElement(); } if (this.paginationTracker) { if (this.pagination == null) { throw new ADBException("pagination cannot be null!!"); } this.pagination.serialize(new QName("", "pagination"), factory, xmlWriter); } xmlWriter.writeEndElement(); } private void writeAttribute(final String prefix, final String namespace, final String attName, final String attValue, final XMLStreamWriter xmlWriter) throws XMLStreamException { if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace, attName, attValue); } private String registerPrefix(final XMLStreamWriter xmlWriter, final String namespace) throws XMLStreamException { String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = QuerySessionReportType.generatePrefix(namespace); while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) { prefix = BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } @Override public XMLStreamReader getPullParser(final QName qName) throws ADBException { final ArrayList<Serializable> elementList = new ArrayList<Serializable>(); elementList.add(new QName("", "requestor")); if (this.requestor == null) { throw new ADBException("requestor cannot be null!!"); } elementList.add(this.requestor); elementList.add(new QName("", "querySelect")); if (this.querySelect == null) { throw new ADBException("querySelect cannot be null!!"); } elementList.add(this.querySelect); if (this.queryConstraintsTracker) { elementList.add(new QName("", "queryConstraints")); if (this.queryConstraints == null) { throw new ADBException("queryConstraints cannot be null!!"); } elementList.add(this.queryConstraints); } if (this.startTimeTracker) { elementList.add(new QName("", "startTime")); if (this.startTime != null) { elementList.add(ConverterUtil.convertToString(this.startTime)); } else { throw new ADBException("startTime cannot be null!!"); } } if (this.endTimeTracker) { elementList.add(new QName("", "endTime")); if (this.endTime != null) { elementList.add(ConverterUtil.convertToString(this.endTime)); } else { throw new ADBException("endTime cannot be null!!"); } } if (this.paginationTracker) { elementList.add(new QName("", "pagination")); if (this.pagination == null) { throw new ADBException("pagination cannot be null!!"); } elementList.add(this.pagination); } return new ADBXMLStreamReaderImpl(qName, elementList.toArray(), new Object[0]); } public static class Factory { public static QuerySessionReportType parse(final XMLStreamReader reader) throws Exception { final QuerySessionReportType object = new QuerySessionReportType(); try { while (!reader.isStartElement() && !reader.isEndElement()) { reader.next(); } if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type") != null) { final String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type"); if (fullTypeName != null) { String nsPrefix = null; if (fullTypeName.indexOf(":") > -1) { nsPrefix = fullTypeName.substring(0, fullTypeName.indexOf(":")); } nsPrefix = nsPrefix == null ? "" : nsPrefix; final String type = fullTypeName.substring(fullTypeName.indexOf(":") + 1); if (!"QuerySessionReportType".equals(type)) { final String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix); return (QuerySessionReportType) ExtensionMapper.getTypeObject(nsUri, type, reader); } } } reader.next(); while (!reader.isStartElement() && !reader.isEndElement()) { reader.next(); } if (reader.isStartElement() && new QName("", "requestor").equals(reader.getName())) { object.setRequestor(RequestorType.Factory.parse(reader)); reader.next(); } else { throw new ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) { reader.next(); } if (reader.isStartElement() && new QName("", "querySelect").equals(reader.getName())) { object.setQuerySelect(QueryFilterType.Factory.parse(reader)); reader.next(); } else { throw new ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) { reader.next(); } if (reader.isStartElement() && new QName("", "queryConstraints").equals(reader.getName())) { object.setQueryConstraints(QueryFilterType.Factory.parse(reader)); reader.next(); } while (!reader.isStartElement() && !reader.isEndElement()) { reader.next(); } if (reader.isStartElement() && new QName("", "startTime").equals(reader.getName())) { final String content = reader.getElementText(); object.setStartTime(ConverterUtil.convertToDateTime(content)); reader.next(); } while (!reader.isStartElement() && !reader.isEndElement()) { reader.next(); } if (reader.isStartElement() && new QName("", "endTime").equals(reader.getName())) { final String content = reader.getElementText(); object.setEndTime(ConverterUtil.convertToDateTime(content)); reader.next(); } while (!reader.isStartElement() && !reader.isEndElement()) { reader.next(); } if (reader.isStartElement() && new QName("", "pagination").equals(reader.getName())) { object.setPagination(PaginationType.Factory.parse(reader)); reader.next(); } while (!reader.isStartElement() && !reader.isEndElement()) { reader.next(); } if (reader.isStartElement()) { throw new ADBException("Unexpected subelement " + reader.getLocalName()); } } catch (final XMLStreamException e) { throw new Exception(e); } return object; } } }
jeking3/scheduling-server
Reports/src/au/edu/uts/eng/remotelabs/schedserver/reports/intf/types/QuerySessionReportType.java
Java
bsd-3-clause
20,256
/** * Copyright (c) 2016, The National Archives <pronom@nationalarchives.gsi.gov.uk> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following * conditions are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of the The National Archives nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.1-b02-fcs // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2010.03.22 at 11:40:59 AM GMT // package uk.gov.nationalarchives.droid.report.planets.domain; import java.math.BigDecimal; import java.math.BigInteger; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.datatype.XMLGregorianCalendar; /** * <p>Java class for YearItemType complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="YearItemType"&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;sequence&gt; * &lt;element name="year" type="{http://www.w3.org/2001/XMLSchema}gYear"/&gt; * &lt;element name="numFiles" type="{http://www.w3.org/2001/XMLSchema}integer"/&gt; * &lt;element name="totalFileSize" type="{http://www.w3.org/2001/XMLSchema}decimal"/&gt; * &lt;/sequence&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * </pre> * * @deprecated PLANETS XML is now generated using XSLT over normal report xml files. */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "YearItemType", propOrder = { "year", "numFiles", "totalFileSize" }) @Deprecated public class YearItemType { @XmlElement(required = true) @XmlSchemaType(name = "gYear") protected XMLGregorianCalendar year; @XmlElement(required = true) protected BigInteger numFiles; @XmlElement(required = true) protected BigDecimal totalFileSize; /** * Gets the value of the year property. * * @return * possible object is * {@link XMLGregorianCalendar } * */ public XMLGregorianCalendar getYear() { return year; } /** * Sets the value of the year property. * * @param value * allowed object is * {@link XMLGregorianCalendar } * */ public void setYear(XMLGregorianCalendar value) { this.year = value; } /** * Gets the value of the numFiles property. * * @return * possible object is * {@link BigInteger } * */ public BigInteger getNumFiles() { return numFiles; } /** * Sets the value of the numFiles property. * * @param value * allowed object is * {@link BigInteger } * */ public void setNumFiles(BigInteger value) { this.numFiles = value; } /** * Gets the value of the totalFileSize property. * * @return * possible object is * {@link BigDecimal } * */ public BigDecimal getTotalFileSize() { return totalFileSize; } /** * Sets the value of the totalFileSize property. * * @param value * allowed object is * {@link BigDecimal } * */ public void setTotalFileSize(BigDecimal value) { this.totalFileSize = value; } }
snail1966/droid
droid-report/src/main/java/uk/gov/nationalarchives/droid/report/planets/domain/YearItemType.java
Java
bsd-3-clause
5,288
/** * Copyright (c) 2016, The National Archives <pronom@nationalarchives.gsi.gov.uk> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following * conditions are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of the The National Archives nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package uk.gov.nationalarchives.droid.command.action; import java.io.PrintWriter; import java.util.Map; import uk.gov.nationalarchives.droid.command.i18n.I18N; import uk.gov.nationalarchives.droid.core.interfaces.signature.SignatureFileException; import uk.gov.nationalarchives.droid.core.interfaces.signature.SignatureFileInfo; import uk.gov.nationalarchives.droid.core.interfaces.signature.SignatureManager; import uk.gov.nationalarchives.droid.core.interfaces.signature.SignatureType; /** * @author rflitcroft * */ public class DisplayDefaultSignatureFileVersionCommand implements DroidCommand { private PrintWriter printWriter; private SignatureManager signatureManager; /** * {@inheritDoc} */ @Override public void execute() throws CommandExecutionException { try { Map<SignatureType, SignatureFileInfo> sigFileInfos = signatureManager.getDefaultSignatures(); for (SignatureFileInfo info : sigFileInfos.values()) { printWriter.println(I18N.getResource(I18N.DEFAULT_SIGNATURE_VERSION, info.getType(), info.getVersion(), info.getFile().getName())); } } catch (SignatureFileException e) { throw new CommandExecutionException(e); } } /** * @param printWriter the printWriter to set */ public void setPrintWriter(PrintWriter printWriter) { this.printWriter = printWriter; } /** * @param signatureManager the signatureManager to set */ public void setSignatureManager(SignatureManager signatureManager) { this.signatureManager = signatureManager; } }
snail1966/droid
droid-command-line/src/main/java/uk/gov/nationalarchives/droid/command/action/DisplayDefaultSignatureFileVersionCommand.java
Java
bsd-3-clause
3,404
/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE762_Mismatched_Memory_Management_Routines__delete_int64_t_calloc_52c.cpp Label Definition File: CWE762_Mismatched_Memory_Management_Routines__delete.label.xml Template File: sources-sinks-52c.tmpl.cpp */ /* * @description * CWE: 762 Mismatched Memory Management Routines * BadSource: calloc Allocate data using calloc() * GoodSource: Allocate data using new * Sinks: * GoodSink: Deallocate data using free() * BadSink : Deallocate data using delete * Flow Variant: 52 Data flow: data passed as an argument from one function to another to another in three different source files * * */ #include "std_testcase.h" namespace CWE762_Mismatched_Memory_Management_Routines__delete_int64_t_calloc_52 { #ifndef OMITBAD void badSink_c(int64_t * data) { /* POTENTIAL FLAW: Deallocate memory using delete - the source memory allocation function may * require a call to free() to deallocate the memory */ delete data; } #endif /* OMITBAD */ #ifndef OMITGOOD /* goodG2B uses the GoodSource with the BadSink */ void goodG2BSink_c(int64_t * data) { /* POTENTIAL FLAW: Deallocate memory using delete - the source memory allocation function may * require a call to free() to deallocate the memory */ delete data; } /* goodB2G uses the BadSource with the GoodSink */ void goodB2GSink_c(int64_t * data) { /* FIX: Deallocate the memory using free() */ free(data); } #endif /* OMITGOOD */ } /* close namespace */
JianpingZeng/xcc
xcc/test/juliet/testcases/CWE762_Mismatched_Memory_Management_Routines/s03/CWE762_Mismatched_Memory_Management_Routines__delete_int64_t_calloc_52c.cpp
C++
bsd-3-clause
1,553
/* * Copyright (c) 2010-2011 Mark Allen. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.restfb; import static com.restfb.json.JsonObject.NULL; import static com.restfb.util.ReflectionUtils.findFieldsWithAnnotation; import static com.restfb.util.ReflectionUtils.getFirstParameterizedTypeArgument; import static com.restfb.util.ReflectionUtils.isPrimitive; import static com.restfb.util.StringUtils.isBlank; import static com.restfb.util.StringUtils.trimToEmpty; import static java.util.Collections.unmodifiableList; import static java.util.Collections.unmodifiableSet; import static java.util.logging.Level.FINE; import static java.util.logging.Level.FINER; import static java.util.logging.Level.FINEST; import java.lang.reflect.Field; import java.math.BigDecimal; import java.math.BigInteger; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.logging.Logger; import com.restfb.exception.FacebookJsonMappingException; import com.restfb.json.JsonArray; import com.restfb.json.JsonException; import com.restfb.json.JsonObject; import com.restfb.types.Post.Comments; import com.restfb.util.ReflectionUtils.FieldWithAnnotation; /** * Default implementation of a JSON-to-Java mapper. * * @author <a href="http://restfb.com">Mark Allen</a> */ public class DefaultJsonMapper implements JsonMapper { /** * Logger. */ private static final Logger logger = Logger.getLogger(DefaultJsonMapper.class.getName()); /** * @see com.restfb.JsonMapper#toJavaList(String, Class) */ @Override public <T> List<T> toJavaList(String json, Class<T> type) { json = trimToEmpty(json); if (isBlank(json)) throw new FacebookJsonMappingException("JSON is an empty string - can't map it."); if (type == null) throw new FacebookJsonMappingException("You must specify the Java type to map to."); if (json.startsWith("{")) { // Sometimes Facebook returns the empty object {} when it really should be // returning an empty list [] (example: do an FQL query for a user's // affiliations - it's a list except when there are none, then it turns // into an object). Check for that special case here. if (isEmptyObject(json)) { if (logger.isLoggable(FINER)) logger.finer("Encountered {} when we should've seen []. " + "Mapping the {} as an empty list and moving on..."); return new ArrayList<T>(); } // Special case: if the only element of this object is an array called // "data", then treat it as a list. The Graph API uses this convention for // connections and in a few other places, e.g. comments on the Post // object. // Doing this simplifies mapping, so we don't have to worry about having a // little placeholder object that only has a "data" value. try { JsonObject jsonObject = new JsonObject(json); String[] fieldNames = JsonObject.getNames(jsonObject); if (fieldNames != null) { boolean hasSingleDataProperty = fieldNames.length == 1 && "data".equals(fieldNames[0]); Object jsonDataObject = jsonObject.get("data"); if (!hasSingleDataProperty && !(jsonDataObject instanceof JsonArray)) throw new FacebookJsonMappingException("JSON is an object but is being mapped as a list " + "instead. Offending JSON is '" + json + "'."); json = jsonDataObject.toString(); } } catch (JsonException e) { // Should never get here, but just in case... throw new FacebookJsonMappingException("Unable to convert Facebook response " + "JSON to a list of " + type.getName() + " instances. Offending JSON is " + json, e); } } try { List<T> list = new ArrayList<T>(); JsonArray jsonArray = new JsonArray(json); for (int i = 0; i < jsonArray.length(); i++) list.add(toJavaObject(jsonArray.get(i).toString(), type)); return unmodifiableList(list); } catch (FacebookJsonMappingException e) { throw e; } catch (Exception e) { throw new FacebookJsonMappingException("Unable to convert Facebook response " + "JSON to a list of " + type.getName() + " instances", e); } } /** * @see com.restfb.JsonMapper#toJavaObject(String, Class) */ @Override @SuppressWarnings("unchecked") public <T> T toJavaObject(String json, Class<T> type) { verifyThatJsonIsOfObjectType(json); try { // Are we asked to map to JsonObject? If so, short-circuit right away. if (type.equals(JsonObject.class)) return (T) new JsonObject(json); List<FieldWithAnnotation<Facebook>> fieldsWithAnnotation = findFieldsWithAnnotation(type, Facebook.class); Set<String> facebookFieldNamesWithMultipleMappings = facebookFieldNamesWithMultipleMappings(fieldsWithAnnotation); // If there are no annotated fields, assume we're mapping to a built-in // type. If this is actually the empty object, just return a new instance // of the corresponding Java type. if (fieldsWithAnnotation.size() == 0) if (isEmptyObject(json)) return createInstance(type); else return toPrimitiveJavaType(json, type); // Facebook will sometimes return the string "null". // Check for that and bail early if we find it. if ("null".equals(json)) return null; // Facebook will sometimes return the string "false" to mean null. // Check for that and bail early if we find it. if ("false".equals(json)) { if (logger.isLoggable(FINE)) logger.fine("Encountered 'false' from Facebook when trying to map to " + type.getSimpleName() + " - mapping null instead."); return null; } JsonObject jsonObject = new JsonObject(json); T instance = createInstance(type); if (instance instanceof JsonObject) return (T) jsonObject; // For each Facebook-annotated field on the current Java object, pull data // out of the JSON object and put it in the Java object for (FieldWithAnnotation<Facebook> fieldWithAnnotation : fieldsWithAnnotation) { String facebookFieldName = getFacebookFieldName(fieldWithAnnotation); if (!jsonObject.has(facebookFieldName)) { if (logger.isLoggable(FINER)) logger.finer("No JSON value present for '" + facebookFieldName + "', skipping. JSON is '" + json + "'."); continue; } fieldWithAnnotation.getField().setAccessible(true); // Set the Java field's value. // // If we notice that this Facebook field name is mapped more than once, // go into a special mode where we swallow any exceptions that occur // when mapping to the Java field. This is because Facebook will // sometimes return data in different formats for the same field name. // See issues 56 and 90 for examples of this behavior and discussion. if (facebookFieldNamesWithMultipleMappings.contains(facebookFieldName)) { try { fieldWithAnnotation.getField() .set(instance, toJavaType(fieldWithAnnotation, jsonObject, facebookFieldName)); } catch (FacebookJsonMappingException e) { logMultipleMappingFailedForField(facebookFieldName, fieldWithAnnotation, json); } catch (JsonException e) { logMultipleMappingFailedForField(facebookFieldName, fieldWithAnnotation, json); } } else { fieldWithAnnotation.getField().set(instance, toJavaType(fieldWithAnnotation, jsonObject, facebookFieldName)); } } return instance; } catch (FacebookJsonMappingException e) { throw e; } catch (Exception e) { throw new FacebookJsonMappingException("Unable to map JSON to Java. Offending JSON is '" + json + "'.", e); } } /** * Dumps out a log message when one of a multiple-mapped Facebook field name * JSON-to-Java mapping operation fails. * * @param facebookFieldName * The Facebook field name. * @param fieldWithAnnotation * The Java field to map to and its annotation. * @param json * The JSON that failed to map to the Java field. */ protected void logMultipleMappingFailedForField(String facebookFieldName, FieldWithAnnotation<Facebook> fieldWithAnnotation, String json) { if (!logger.isLoggable(FINER)) return; Field field = fieldWithAnnotation.getField(); if (logger.isLoggable(FINER)) logger.finer("Could not map '" + facebookFieldName + "' to " + field.getDeclaringClass().getSimpleName() + "." + field.getName() + ", but continuing on because '" + facebookFieldName + "' is mapped to multiple fields in " + field.getDeclaringClass().getSimpleName() + ". JSON is " + json); } /** * For a Java field annotated with the {@code Facebook} annotation, figure out * what the corresponding Facebook JSON field name to map to it is. * * @param fieldWithAnnotation * A Java field annotated with the {@code Facebook} annotation. * @return The Facebook JSON field name that should be mapped to this Java * field. */ protected String getFacebookFieldName(FieldWithAnnotation<Facebook> fieldWithAnnotation) { String facebookFieldName = fieldWithAnnotation.getAnnotation().value(); Field field = fieldWithAnnotation.getField(); // If no Facebook field name was specified in the annotation, assume // it's the same name as the Java field if (isBlank(facebookFieldName)) { if (logger.isLoggable(FINEST)) logger.finest("No explicit Facebook field name found for " + field + ", so defaulting to the field name itself (" + field.getName() + ")"); facebookFieldName = field.getName(); } return facebookFieldName; } /** * Finds any Facebook JSON fields that are mapped to more than 1 Java field. * * @param fieldsWithAnnotation * Java fields annotated with the {@code Facebook} annotation. * @return Any Facebook JSON fields that are mapped to more than 1 Java field. */ protected Set<String> facebookFieldNamesWithMultipleMappings(List<FieldWithAnnotation<Facebook>> fieldsWithAnnotation) { Map<String, Integer> facebookFieldsNamesWithOccurrenceCount = new HashMap<String, Integer>(); Set<String> facebookFieldNamesWithMultipleMappings = new HashSet<String>(); // Get a count of Facebook field name occurrences for each // @Facebook-annotated field for (FieldWithAnnotation<Facebook> fieldWithAnnotation : fieldsWithAnnotation) { String fieldName = getFacebookFieldName(fieldWithAnnotation); int occurrenceCount = facebookFieldsNamesWithOccurrenceCount.containsKey(fieldName) ? facebookFieldsNamesWithOccurrenceCount .get(fieldName) : 0; facebookFieldsNamesWithOccurrenceCount.put(fieldName, occurrenceCount + 1); } // Pull out only those field names with multiple mappings for (Entry<String, Integer> entry : facebookFieldsNamesWithOccurrenceCount.entrySet()) if (entry.getValue() > 1) facebookFieldNamesWithMultipleMappings.add(entry.getKey()); return unmodifiableSet(facebookFieldNamesWithMultipleMappings); } /** * @see com.restfb.JsonMapper#toJson(Object) */ @Override public String toJson(Object object) { // Delegate to recursive method return toJsonInternal(object).toString(); } /** * Is the given {@code json} a valid JSON object? * * @param json * The JSON to check. * @throws FacebookJsonMappingException * If {@code json} is not a valid JSON object. */ protected void verifyThatJsonIsOfObjectType(String json) { if (isBlank(json)) throw new FacebookJsonMappingException("JSON is an empty string - can't map it."); if (json.startsWith("[")) throw new FacebookJsonMappingException("JSON is an array but is being mapped as an object " + "- you should map it as a List instead. Offending JSON is '" + json + "'."); } /** * Recursively marshal the given {@code object} to JSON. * <p> * Used by {@link #toJson(Object)}. * * @param object * The object to marshal. * @return JSON representation of the given {@code object}. * @throws FacebookJsonMappingException * If an error occurs while marshaling to JSON. */ protected Object toJsonInternal(Object object) { if (object == null) return NULL; if (object instanceof List<?>) { JsonArray jsonArray = new JsonArray(); for (Object o : (List<?>) object) jsonArray.put(toJsonInternal(o)); return jsonArray; } if (object instanceof Map<?, ?>) { JsonObject jsonObject = new JsonObject(); for (Entry<?, ?> entry : ((Map<?, ?>) object).entrySet()) { if (!(entry.getKey() instanceof String)) throw new FacebookJsonMappingException("Your Map keys must be of type " + String.class + " in order to be converted to JSON. Offending map is " + object); try { jsonObject.put((String) entry.getKey(), toJsonInternal(entry.getValue())); } catch (JsonException e) { throw new FacebookJsonMappingException("Unable to process value '" + entry.getValue() + "' for key '" + entry.getKey() + "' in Map " + object, e); } } return jsonObject; } if (isPrimitive(object)) return object; if (object instanceof BigInteger) return ((BigInteger) object).longValue(); if (object instanceof BigDecimal) return ((BigDecimal) object).doubleValue(); // We've passed the special-case bits, so let's try to marshal this as a // plain old Javabean... List<FieldWithAnnotation<Facebook>> fieldsWithAnnotation = findFieldsWithAnnotation(object.getClass(), Facebook.class); JsonObject jsonObject = new JsonObject(); Set<String> facebookFieldNamesWithMultipleMappings = facebookFieldNamesWithMultipleMappings(fieldsWithAnnotation); if (facebookFieldNamesWithMultipleMappings.size() > 0) throw new FacebookJsonMappingException("Unable to convert to JSON because multiple @" + Facebook.class.getSimpleName() + " annotations for the same name are present: " + facebookFieldNamesWithMultipleMappings); for (FieldWithAnnotation<Facebook> fieldWithAnnotation : fieldsWithAnnotation) { String facebookFieldName = getFacebookFieldName(fieldWithAnnotation); fieldWithAnnotation.getField().setAccessible(true); try { jsonObject.put(facebookFieldName, toJsonInternal(fieldWithAnnotation.getField().get(object))); } catch (Exception e) { throw new FacebookJsonMappingException("Unable to process field '" + facebookFieldName + "' for " + object.getClass(), e); } } return jsonObject; } /** * Given a {@code json} value of something like {@code MyValue} or {@code 123} * , return a representation of that value of type {@code type}. * <p> * This is to support non-legal JSON served up by Facebook for API calls like * {@code Friends.get} (example result: {@code [222333,1240079]}). * * @param <T> * The Java type to map to. * @param json * The non-legal JSON to map to the Java type. * @param type * Type token. * @return Java representation of {@code json}. * @throws FacebookJsonMappingException * If an error occurs while mapping JSON to Java. */ @SuppressWarnings("unchecked") protected <T> T toPrimitiveJavaType(String json, Class<T> type) { if (String.class.equals(type)) { // If the string starts and ends with quotes, remove them, since Facebook // can serve up strings surrounded by quotes. if (json.length() > 1 && json.startsWith("\"") && json.endsWith("\"")) { json = json.replaceFirst("\"", ""); json = json.substring(0, json.length() - 1); } return (T) json; } if (Integer.class.equals(type) || Integer.TYPE.equals(type)) return (T) new Integer(json); if (Boolean.class.equals(type) || Boolean.TYPE.equals(type)) return (T) new Boolean(json); if (Long.class.equals(type) || Long.TYPE.equals(type)) return (T) new Long(json); if (Double.class.equals(type) || Double.TYPE.equals(type)) return (T) new Double(json); if (Float.class.equals(type) || Float.TYPE.equals(type)) return (T) new Float(json); if (BigInteger.class.equals(type)) return (T) new BigInteger(json); if (BigDecimal.class.equals(type)) return (T) new BigDecimal(json); throw new FacebookJsonMappingException("Don't know how to map JSON to " + type + ". Are you sure you're mapping to the right class? " + "Offending JSON is '" + json + "'."); } /** * Extracts JSON data for a field according to its {@code Facebook} annotation * and returns it converted to the proper Java type. * * @param fieldWithAnnotation * The field/annotation pair which specifies what Java type to * convert to. * @param jsonObject * "Raw" JSON object to pull data from. * @param facebookFieldName * Specifies what JSON field to pull "raw" data from. * @return A * @throws JsonException * If an error occurs while mapping JSON to Java. * @throws FacebookJsonMappingException * If an error occurs while mapping JSON to Java. */ protected Object toJavaType(FieldWithAnnotation<Facebook> fieldWithAnnotation, JsonObject jsonObject, String facebookFieldName) throws JsonException, FacebookJsonMappingException { Class<?> type = fieldWithAnnotation.getField().getType(); Object rawValue = jsonObject.get(facebookFieldName); // Short-circuit right off the bat if we've got a null value. if (NULL.equals(rawValue)) return null; if (String.class.equals(type)) { // Special handling here for better error checking. // Since JsonObject.getString() will return literal JSON text even if it's // _not_ a JSON string, we check the marshaled type and bail if needed. // For example, calling JsonObject.getString("results") on the below // JSON... // {"results":[{"name":"Mark Allen"}]} // ... would return the string "[{"name":"Mark Allen"}]" instead of // throwing an error. So we throw the error ourselves. // Per Antonello Naccarato, sometimes FB will return an empty JSON array // instead of an empty string. Look for that here. if (rawValue instanceof JsonArray) if (((JsonArray) rawValue).length() == 0) { if (logger.isLoggable(FINER)) logger.finer("Coercing an empty JSON array " + "to an empty string for " + fieldWithAnnotation); return ""; } // If the user wants a string, _always_ give her a string. // This is useful if, for example, you've got a @Facebook-annotated string // field that you'd like to have a numeric type shoved into. // User beware: this will turn *anything* into a string, which might lead // to results you don't expect. return rawValue.toString(); } if (Integer.class.equals(type) || Integer.TYPE.equals(type)) return new Integer(jsonObject.getInt(facebookFieldName)); if (Boolean.class.equals(type) || Boolean.TYPE.equals(type)) return new Boolean(jsonObject.getBoolean(facebookFieldName)); if (Long.class.equals(type) || Long.TYPE.equals(type)) return new Long(jsonObject.getLong(facebookFieldName)); if (Double.class.equals(type) || Double.TYPE.equals(type)) return new Double(jsonObject.getDouble(facebookFieldName)); if (Float.class.equals(type) || Float.TYPE.equals(type)) return new BigDecimal(jsonObject.getString(facebookFieldName)).floatValue(); if (BigInteger.class.equals(type)) return new BigInteger(jsonObject.getString(facebookFieldName)); if (BigDecimal.class.equals(type)) return new BigDecimal(jsonObject.getString(facebookFieldName)); if (List.class.equals(type)) return toJavaList(rawValue.toString(), getFirstParameterizedTypeArgument(fieldWithAnnotation.getField())); String rawValueAsString = rawValue.toString(); // Hack for issue 76 where FB will sometimes return a Post's Comments as // "[]" instead of an object type (wtf) if (Comments.class.isAssignableFrom(type) && rawValue instanceof JsonArray) { if (logger.isLoggable(FINE)) logger.fine("Encountered comment array '" + rawValueAsString + "' but expected a " + Comments.class.getSimpleName() + " object instead. Working around that " + "by coercing into an empty " + Comments.class.getSimpleName() + " instance..."); JsonObject workaroundJsonObject = new JsonObject(); workaroundJsonObject.put("count", 0); workaroundJsonObject.put("data", new JsonArray()); rawValueAsString = workaroundJsonObject.toString(); } // Some other type - recurse into it return toJavaObject(rawValueAsString, type); } /** * Creates a new instance of the given {@code type}. * * @param <T> * Java type to map to. * @param type * Type token. * @return A new instance of {@code type}. * @throws FacebookJsonMappingException * If an error occurs when creating a new instance ({@code type} is * inaccessible, doesn't have a public no-arg constructor, etc.) */ protected <T> T createInstance(Class<T> type) { String errorMessage = "Unable to create an instance of " + type + ". Please make sure that it's marked 'public' " + "and, if it's a nested class, is marked 'static'. " + "It should have a public, no-argument constructor."; try { return type.newInstance(); } catch (IllegalAccessException e) { throw new FacebookJsonMappingException(errorMessage, e); } catch (InstantiationException e) { throw new FacebookJsonMappingException(errorMessage, e); } } /** * Is the given JSON equivalent to the empty object (<code>{}</code>)? * * @param json * The JSON to check. * @return {@code true} if the JSON is equivalent to the empty object, * {@code false} otherwise. */ protected boolean isEmptyObject(String json) { return "{}".equals(json); } }
gooddata/GoodData-CL
connector/src/main/java/com/restfb/DefaultJsonMapper.java
Java
bsd-3-clause
26,108
// Copyright NVIDIA Corporation 2012 // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // * Neither the name of NVIDIA CORPORATION nor the names of its // contributors may be used to endorse or promote products derived // from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY // OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include <dp/ui/RenderTarget.h> namespace dp { namespace ui { RenderTarget::~RenderTarget() { } bool RenderTarget::beginRendering() { return true; } void RenderTarget::endRendering() { } bool RenderTarget::isStereoEnabled() const { return false; } bool RenderTarget::setStereoTarget( StereoTarget target ) { return target == LEFT; } RenderTarget::StereoTarget RenderTarget::getStereoTarget() const { return LEFT; } } // namespace ui } // namespace dp
swq0553/pipeline
dp/ui/src/RenderTarget.cpp
C++
bsd-3-clause
2,148
import PromiseRouter from '../PromiseRouter'; import * as middleware from "../middlewares"; import { Parse } from "parse/node"; export class PushRouter extends PromiseRouter { mountRoutes() { this.route("POST", "/push", middleware.promiseEnforceMasterKeyAccess, PushRouter.handlePOST); } static handlePOST(req) { const pushController = req.config.pushController; if (!pushController) { throw new Parse.Error(Parse.Error.PUSH_MISCONFIGURED, 'Push controller is not set'); } let where = PushRouter.getQueryCondition(req); pushController.sendPush(req.body, where, req.config, req.auth); return Promise.resolve({ response: { 'result': true } }); } /** * Get query condition from the request body. * @param {Object} req A request object * @returns {Object} The query condition, the where field in a query api call */ static getQueryCondition(req) { let body = req.body || {}; let hasWhere = typeof body.where !== 'undefined'; let hasChannels = typeof body.channels !== 'undefined'; let where; if (hasWhere && hasChannels) { throw new Parse.Error(Parse.Error.PUSH_MISCONFIGURED, 'Channels and query can not be set at the same time.'); } else if (hasWhere) { where = body.where; } else if (hasChannels) { where = { "channels": { "$in": body.channels } } } else { throw new Parse.Error(Parse.Error.PUSH_MISCONFIGURED, 'Channels and query should be set at least one.'); } return where; } } export default PushRouter;
lucianmat/parse-server
src/Routers/PushRouter.js
JavaScript
bsd-3-clause
1,611
<?php /** * @see https://github.com/zendframework/zend-di for the canonical source repository * @copyright Copyright (c) 2017 Zend Technologies USA Inc. (https://www.zend.com) * @license https://github.com/zendframework/zend-di/blob/master/LICENSE.md New BSD License */ namespace ZendTest\Di\TestAsset; class A { }
zendframework/zend-di
test/TestAsset/A.php
PHP
bsd-3-clause
329
// // detail/pipe_select_interrupter.hpp // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // // Copyright (c) 2003-2010 Christopher M. Kohlhoff (chris at kohlhoff dot com) // // Distributed under the Boost Software License, Version 1.0. (See accompanying // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) // #ifndef BOOST_ASIO_DETAIL_PIPE_SELECT_INTERRUPTER_HPP #define BOOST_ASIO_DETAIL_PIPE_SELECT_INTERRUPTER_HPP #if defined(_MSC_VER) && (_MSC_VER >= 1200) # pragma once #endif // defined(_MSC_VER) && (_MSC_VER >= 1200) #include <boost/asio/detail/config.hpp> #if !defined(BOOST_WINDOWS) #if !defined(__CYGWIN__) #if !defined(__SYMBIAN32__) #if !defined(BOOST_ASIO_HAS_EVENTFD) #include <boost/asio/detail/push_options.hpp> namespace boost { namespace asio { namespace detail { class pipe_select_interrupter { public: // Constructor. BOOST_ASIO_DECL pipe_select_interrupter(); // Destructor. BOOST_ASIO_DECL ~pipe_select_interrupter(); // Interrupt the select call. BOOST_ASIO_DECL void interrupt(); // Reset the select interrupt. Returns true if the call was interrupted. BOOST_ASIO_DECL bool reset(); // Get the read descriptor to be passed to select. int read_descriptor() const { return read_descriptor_; } private: // The read end of a connection used to interrupt the select call. This file // descriptor is passed to select such that when it is time to stop, a single // byte will be written on the other end of the connection and this // descriptor will become readable. int read_descriptor_; // The write end of a connection used to interrupt the select call. A single // byte may be written to this to wake up the select which is waiting for the // other end to become readable. int write_descriptor_; }; } // namespace detail } // namespace asio } // namespace boost #include <boost/asio/detail/pop_options.hpp> #if defined(BOOST_ASIO_HEADER_ONLY) # include <boost/asio/detail/impl/pipe_select_interrupter.ipp> #endif // defined(BOOST_ASIO_HEADER_ONLY) #endif // !defined(BOOST_ASIO_HAS_EVENTFD) #endif // !defined(__SYMBIAN32__) #endif // !defined(__CYGWIN__) #endif // !defined(BOOST_WINDOWS) #endif // BOOST_ASIO_DETAIL_PIPE_SELECT_INTERRUPTER_HPP
benkaraban/anima-games-engine
LibsExternes/Includes/boost/asio/detail/pipe_select_interrupter.hpp
C++
bsd-3-clause
2,322
from lib.common import helpers class Module: def __init__(self, mainMenu, params=[]): self.info = { 'Name': 'Invoke-LockWorkStation', 'Author': ['@harmj0y'], 'Description': ("Locks the workstation's display."), 'Background' : False, 'OutputExtension' : None, 'NeedsAdmin' : False, 'OpsecSafe' : False, 'Language' : 'powershell', 'MinLanguageVersion' : '2', 'Comments': [ 'http://poshcode.org/1640' ] } # any options needed by the module, settable during runtime self.options = { # format: # value_name : {description, required, default_value} 'Agent' : { 'Description' : 'Agent to run module on.', 'Required' : True, 'Value' : '' } } # save off a copy of the mainMenu object to access external functionality # like listeners/agent handlers/etc. self.mainMenu = mainMenu for param in params: # parameter format is [Name, Value] option, value = param if option in self.options: self.options[option]['Value'] = value def generate(self): script = """ Function Invoke-LockWorkStation { # region define P/Invoke types dynamically # stolen from PowerSploit https://github.com/mattifestation/PowerSploit/blob/master/Mayhem/Mayhem.psm1 # thanks matt and chris :) $DynAssembly = New-Object System.Reflection.AssemblyName('Win32') $AssemblyBuilder = [AppDomain]::CurrentDomain.DefineDynamicAssembly($DynAssembly, [Reflection.Emit.AssemblyBuilderAccess]::Run) $ModuleBuilder = $AssemblyBuilder.DefineDynamicModule('Win32', $False) $TypeBuilder = $ModuleBuilder.DefineType('Win32.User32', 'Public, Class') $DllImportConstructor = [Runtime.InteropServices.DllImportAttribute].GetConstructor(@([String])) $SetLastError = [Runtime.InteropServices.DllImportAttribute].GetField('SetLastError') $SetLastErrorCustomAttribute = New-Object Reflection.Emit.CustomAttributeBuilder($DllImportConstructor, @('User32.dll'), [Reflection.FieldInfo[]]@($SetLastError), @($True)) # Define [Win32.User32]::LockWorkStation() $PInvokeMethod = $TypeBuilder.DefinePInvokeMethod('LockWorkStation', 'User32.dll', ([Reflection.MethodAttributes]::Public -bor [Reflection.MethodAttributes]::Static), [Reflection.CallingConventions]::Standard, [Bool], [Type[]]@(), [Runtime.InteropServices.CallingConvention]::Winapi, [Runtime.InteropServices.CharSet]::Ansi) $PInvokeMethod.SetCustomAttribute($SetLastErrorCustomAttribute) $User32 = $TypeBuilder.CreateType() $Null = $User32::LockWorkStation() } Invoke-LockWorkStation; "Workstation locked." """ return script
Hackplayers/Empire-mod-Hpys-tests
lib/modules/powershell/management/lock.py
Python
bsd-3-clause
3,056
/* ***** BEGIN LICENSE BLOCK ***** * Distributed under the BSD license: * * Copyright (c) 2010, Ajax.org B.V. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of Ajax.org B.V. nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL AJAX.ORG B.V. BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * ***** END LICENSE BLOCK ***** */ define(function(require, exports, module) { "use strict"; var keys = require("./keys"); var useragent = require("./useragent"); exports.addListener = function(elem, type, callback) { if (elem.addEventListener) { return elem.addEventListener(type, callback, false); } if (elem.attachEvent) { var wrapper = function() { callback.call(elem, window.event); }; callback._wrapper = wrapper; elem.attachEvent("on" + type, wrapper); } }; exports.removeListener = function(elem, type, callback) { if (elem.removeEventListener) { return elem.removeEventListener(type, callback, false); } if (elem.detachEvent) { elem.detachEvent("on" + type, callback._wrapper || callback); } }; /* * Prevents propagation and clobbers the default action of the passed event */ exports.stopEvent = function(e) { exports.stopPropagation(e); exports.preventDefault(e); return false; }; exports.stopPropagation = function(e) { if (e.stopPropagation) e.stopPropagation(); else e.cancelBubble = true; }; exports.preventDefault = function(e) { if (e.preventDefault) e.preventDefault(); else e.returnValue = false; }; /* * @return {Number} 0 for left button, 1 for middle button, 2 for right button */ exports.getButton = function(e) { if (e.type == "dblclick") return 0; if (e.type == "contextmenu" || (useragent.isMac && (e.ctrlKey && !e.altKey && !e.shiftKey))) return 2; // DOM Event if (e.preventDefault) { return e.button; } // old IE else { return {1:0, 2:2, 4:1}[e.button]; } }; exports.capture = function(el, eventHandler, releaseCaptureHandler) { function onMouseUp(e) { eventHandler && eventHandler(e); releaseCaptureHandler && releaseCaptureHandler(e); exports.removeListener(document, "mousemove", eventHandler, true); exports.removeListener(document, "mouseup", onMouseUp, true); exports.removeListener(document, "dragstart", onMouseUp, true); } exports.addListener(document, "mousemove", eventHandler, true); exports.addListener(document, "mouseup", onMouseUp, true); exports.addListener(document, "dragstart", onMouseUp, true); return onMouseUp; }; exports.addMouseWheelListener = function(el, callback) { if ("onmousewheel" in el) { exports.addListener(el, "mousewheel", function(e) { var factor = 8; if (e.wheelDeltaX !== undefined) { e.wheelX = -e.wheelDeltaX / factor; e.wheelY = -e.wheelDeltaY / factor; } else { e.wheelX = 0; e.wheelY = -e.wheelDelta / factor; } callback(e); }); } else if ("onwheel" in el) { exports.addListener(el, "wheel", function(e) { var factor = 0.35; switch (e.deltaMode) { case e.DOM_DELTA_PIXEL: e.wheelX = e.deltaX * factor || 0; e.wheelY = e.deltaY * factor || 0; break; case e.DOM_DELTA_LINE: case e.DOM_DELTA_PAGE: e.wheelX = (e.deltaX || 0) * 5; e.wheelY = (e.deltaY || 0) * 5; break; } callback(e); }); } else { exports.addListener(el, "DOMMouseScroll", function(e) { if (e.axis && e.axis == e.HORIZONTAL_AXIS) { e.wheelX = (e.detail || 0) * 5; e.wheelY = 0; } else { e.wheelX = 0; e.wheelY = (e.detail || 0) * 5; } callback(e); }); } }; exports.addMultiMouseDownListener = function(el, timeouts, eventHandler, callbackName) { var clicks = 0; var startX, startY, timer; var eventNames = { 2: "dblclick", 3: "tripleclick", 4: "quadclick" }; exports.addListener(el, "mousedown", function(e) { if (exports.getButton(e) !== 0) { clicks = 0; } else if (e.detail > 1) { clicks++; if (clicks > 4) clicks = 1; } else { clicks = 1; } if (useragent.isIE) { var isNewClick = Math.abs(e.clientX - startX) > 5 || Math.abs(e.clientY - startY) > 5; if (!timer || isNewClick) clicks = 1; if (timer) clearTimeout(timer); timer = setTimeout(function() {timer = null}, timeouts[clicks - 1] || 600); if (clicks == 1) { startX = e.clientX; startY = e.clientY; } } e._clicks = clicks; eventHandler[callbackName]("mousedown", e); if (clicks > 4) clicks = 0; else if (clicks > 1) return eventHandler[callbackName](eventNames[clicks], e); }); if (useragent.isOldIE) { exports.addListener(el, "dblclick", function(e) { clicks = 2; if (timer) clearTimeout(timer); timer = setTimeout(function() {timer = null}, timeouts[clicks - 1] || 600); eventHandler[callbackName]("mousedown", e); eventHandler[callbackName](eventNames[clicks], e); }); } }; var getModifierHash = useragent.isMac && useragent.isOpera && !("KeyboardEvent" in window) ? function(e) { return 0 | (e.metaKey ? 1 : 0) | (e.altKey ? 2 : 0) | (e.shiftKey ? 4 : 0) | (e.ctrlKey ? 8 : 0); } : function(e) { return 0 | (e.ctrlKey ? 1 : 0) | (e.altKey ? 2 : 0) | (e.shiftKey ? 4 : 0) | (e.metaKey ? 8 : 0); }; exports.getModifierString = function(e) { return keys.KEY_MODS[getModifierHash(e)]; }; function normalizeCommandKeys(callback, e, keyCode) { var hashId = getModifierHash(e); if (!useragent.isMac && pressedKeys) { if (pressedKeys[91] || pressedKeys[92]) hashId |= 8; if (pressedKeys.altGr) { if ((3 & hashId) != 3) pressedKeys.altGr = 0; else return; } if (keyCode === 18 || keyCode === 17) { var location = "location" in e ? e.location : e.keyLocation; if (keyCode === 17 && location === 1) { if (pressedKeys[keyCode] == 1) ts = e.timeStamp; } else if (keyCode === 18 && hashId === 3 && location === 2) { var dt = e.timeStamp - ts; if (dt < 50) pressedKeys.altGr = true; } } } if (keyCode in keys.MODIFIER_KEYS) { keyCode = -1; } if (hashId & 8 && (keyCode === 91 || keyCode === 93)) { keyCode = -1; } if (!hashId && keyCode === 13) { var location = "location" in e ? e.location : e.keyLocation; if (location === 3) { callback(e, hashId, -keyCode); if (e.defaultPrevented) return; } } if (useragent.isChromeOS && hashId & 8) { callback(e, hashId, keyCode); if (e.defaultPrevented) return; else hashId &= ~8; } // If there is no hashId and the keyCode is not a function key, then // we don't call the callback as we don't handle a command key here // (it's a normal key/character input). if (!hashId && !(keyCode in keys.FUNCTION_KEYS) && !(keyCode in keys.PRINTABLE_KEYS)) { return false; } return callback(e, hashId, keyCode); } var pressedKeys = null; var ts = 0; exports.addCommandKeyListener = function(el, callback) { var addListener = exports.addListener; if (useragent.isOldGecko || (useragent.isOpera && !("KeyboardEvent" in window))) { // Old versions of Gecko aka. Firefox < 4.0 didn't repeat the keydown // event if the user pressed the key for a longer time. Instead, the // keydown event was fired once and later on only the keypress event. // To emulate the 'right' keydown behavior, the keyCode of the initial // keyDown event is stored and in the following keypress events the // stores keyCode is used to emulate a keyDown event. var lastKeyDownKeyCode = null; addListener(el, "keydown", function(e) { lastKeyDownKeyCode = e.keyCode; }); addListener(el, "keypress", function(e) { return normalizeCommandKeys(callback, e, lastKeyDownKeyCode); }); } else { var lastDefaultPrevented = null; addListener(el, "keydown", function(e) { pressedKeys[e.keyCode] = (pressedKeys[e.keyCode] || 0) + 1; var result = normalizeCommandKeys(callback, e, e.keyCode); lastDefaultPrevented = e.defaultPrevented; return result; }); addListener(el, "keypress", function(e) { if (lastDefaultPrevented && (e.ctrlKey || e.altKey || e.shiftKey || e.metaKey)) { exports.stopEvent(e); lastDefaultPrevented = null; } }); addListener(el, "keyup", function(e) { pressedKeys[e.keyCode] = null; }); if (!pressedKeys) { pressedKeys = Object.create(null); addListener(window, "focus", function(e) { pressedKeys = Object.create(null); }); } } }; if (window.postMessage && !useragent.isOldIE) { var postMessageId = 1; exports.nextTick = function(callback, win) { win = win || window; var messageName = "zero-timeout-message-" + postMessageId; exports.addListener(win, "message", function listener(e) { if (e.data == messageName) { exports.stopPropagation(e); exports.removeListener(win, "message", listener); callback(); } }); win.postMessage(messageName, "*"); }; } exports.nextFrame = window.requestAnimationFrame || window.mozRequestAnimationFrame || window.webkitRequestAnimationFrame || window.msRequestAnimationFrame || window.oRequestAnimationFrame; if (exports.nextFrame) exports.nextFrame = exports.nextFrame.bind(window); else exports.nextFrame = function(callback) { setTimeout(callback, 17); }; });
louis-tru/touch_code
client/third_party/ace/lib/ace/lib/event.js
JavaScript
bsd-3-clause
12,179
############################################################################### ## ## Copyright (C) 2014-2016, New York University. ## Copyright (C) 2011-2014, NYU-Poly. ## Copyright (C) 2006-2011, University of Utah. ## All rights reserved. ## Contact: contact@vistrails.org ## ## This file is part of VisTrails. ## ## "Redistribution and use in source and binary forms, with or without ## modification, are permitted provided that the following conditions are met: ## ## - Redistributions of source code must retain the above copyright notice, ## this list of conditions and the following disclaimer. ## - Redistributions in binary form must reproduce the above copyright ## notice, this list of conditions and the following disclaimer in the ## documentation and/or other materials provided with the distribution. ## - Neither the name of the New York University nor the names of its ## contributors may be used to endorse or promote products derived from ## this software without specific prior written permission. ## ## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" ## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, ## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR ## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR ## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, ## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, ## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; ## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, ## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR ## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." ## ############################################################################### from __future__ import division """ This python module defines Connection class. """ import copy from vistrails.db.domain import DBConnection from vistrails.core.vistrail.port import PortEndPoint, Port import unittest from vistrails.db.domain import IdScope ################################################################################ class Connection(DBConnection): """ A Connection is a connection between two modules. Right now there's only Module connections. """ ########################################################################## # Constructors and copy @staticmethod def from_port_specs(source, dest): """from_port_specs(source: PortSpec, dest: PortSpec) -> Connection Static method that creates a Connection given source and destination ports. """ conn = Connection() conn.source = copy.copy(source) conn.destination = copy.copy(dest) return conn @staticmethod def fromID(id): """fromTypeID(id: int) -> Connection Static method that creates a Connection given an id. """ conn = Connection() conn.id = id conn.source.endPoint = PortEndPoint.Source conn.destination.endPoint = PortEndPoint.Destination return conn def __init__(self, *args, **kwargs): """__init__() -> Connection Initializes source and destination ports. """ DBConnection.__init__(self, *args, **kwargs) if self.id is None: self.db_id = -1 if not len(self.ports) > 0: self.source = Port(type='source') self.destination = Port(type='destination') def __copy__(self): """__copy__() -> Connection - Returns a clone of self. """ return Connection.do_copy(self) def do_copy(self, new_ids=False, id_scope=None, id_remap=None): cp = DBConnection.do_copy(self, new_ids, id_scope, id_remap) cp.__class__ = Connection for port in cp.ports: Port.convert(port) return cp ########################################################################## @staticmethod def convert(_connection): # print "ports: %s" % _Connection._get_ports(_connection) if _connection.__class__ == Connection: return _connection.__class__ = Connection for port in _connection.ports: Port.convert(port) ########################################################################## # Properties id = DBConnection.db_id ports = DBConnection.db_ports def add_port(self, port): self.db_add_port(port) def _get_sourceId(self): """ _get_sourceId() -> int Returns the module id of source port. Do not use this function, use sourceId property: c.sourceId """ return self.source.moduleId def _set_sourceId(self, id): """ _set_sourceId(id : int) -> None Sets this connection source id. It updates both self.source.moduleId and self.source.id. Do not use this function, use sourceId property: c.sourceId = id """ self.source.moduleId = id self.source.id = id sourceId = property(_get_sourceId, _set_sourceId) def _get_destinationId(self): """ _get_destinationId() -> int Returns the module id of dest port. Do not use this function, use sourceId property: c.destinationId """ return self.destination.moduleId def _set_destinationId(self, id): """ _set_destinationId(id : int) -> None Sets this connection destination id. It updates self.dest.moduleId. Do not use this function, use destinationId property: c.destinationId = id """ self.destination.moduleId = id destinationId = property(_get_destinationId, _set_destinationId) def _get_source(self): """_get_source() -> Port Returns source port. Do not use this function, use source property: c.source """ try: return self.db_get_port_by_type('source') except KeyError: pass return None def _set_source(self, source): """_set_source(source: Port) -> None Sets this connection source port. Do not use this function, use source property instead: c.source = source """ try: port = self.db_get_port_by_type('source') self.db_delete_port(port) except KeyError: pass if source is not None: self.db_add_port(source) source = property(_get_source, _set_source) def _get_destination(self): """_get_destination() -> Port Returns destination port. Do not use this function, use destination property: c.destination """ # return self.db_ports['destination'] try: return self.db_get_port_by_type('destination') except KeyError: pass return None def _set_destination(self, dest): """_set_destination(dest: Port) -> None Sets this connection destination port. Do not use this function, use destination property instead: c.destination = dest """ try: port = self.db_get_port_by_type('destination') self.db_delete_port(port) except KeyError: pass if dest is not None: self.db_add_port(dest) destination = property(_get_destination, _set_destination) dest = property(_get_destination, _set_destination) ########################################################################## # Operators def __str__(self): """__str__() -> str - Returns a string representation of a Connection object. """ rep = "<connection id='%s'>%s%s</connection>" return rep % (str(self.id), str(self.source), str(self.destination)) def __ne__(self, other): return not self.__eq__(other) def __eq__(self, other): if type(other) != type(self): return False return (self.source == other.source and self.dest == other.dest) def equals_no_id(self, other): """Checks equality up to ids (connection and ports).""" if type(self) != type(other): return False return (self.source.equals_no_id(other.source) and self.dest.equals_no_id(other.dest)) ################################################################################ # Testing class TestConnection(unittest.TestCase): def create_connection(self, id_scope=IdScope()): from vistrails.core.vistrail.port import Port from vistrails.core.modules.basic_modules import identifier as basic_pkg source = Port(id=id_scope.getNewId(Port.vtType), type='source', moduleId=21L, moduleName='String', name='value', signature='(%s:String)' % basic_pkg) destination = Port(id=id_scope.getNewId(Port.vtType), type='destination', moduleId=20L, moduleName='Float', name='value', signature='(%s:Float)' % basic_pkg) connection = Connection(id=id_scope.getNewId(Connection.vtType), ports=[source, destination]) return connection def test_copy(self): id_scope = IdScope() c1 = self.create_connection(id_scope) c2 = copy.copy(c1) self.assertEquals(c1, c2) self.assertEquals(c1.id, c2.id) c3 = c1.do_copy(True, id_scope, {}) self.assertEquals(c1, c3) self.assertNotEquals(c1.id, c3.id) def test_serialization(self): import vistrails.core.db.io c1 = self.create_connection() xml_str = vistrails.core.db.io.serialize(c1) c2 = vistrails.core.db.io.unserialize(xml_str, Connection) self.assertEquals(c1, c2) self.assertEquals(c1.id, c2.id) def testEmptyConnection(self): """Tests sane initialization of empty connection""" c = Connection() self.assertEquals(c.source.endPoint, PortEndPoint.Source) self.assertEquals(c.destination.endPoint, PortEndPoint.Destination) if __name__ == '__main__': unittest.main()
VisTrails/VisTrails
vistrails/core/vistrail/connection.py
Python
bsd-3-clause
10,497
// ============================================================================= // PROJECT CHRONO - http://projectchrono.org // // Copyright (c) 2014 projectchrono.org // All rights reserved. // // Use of this source code is governed by a BSD-style license that can be found // in the LICENSE file at the top level of the distribution and at // http://projectchrono.org/license-chrono.txt. // // ============================================================================= // Authors: Radu Serban // ============================================================================= // // Demo code about collisions and contacts using the penalty method (SMC) // // ============================================================================= #include "chrono/physics/ChSystemSMC.h" #include "chrono/physics/ChContactContainerSMC.h" #include "chrono/solver/ChSolverSMC.h" #include "chrono_irrlicht/ChIrrApp.h" #include <irrlicht.h> // Use the namespaces of Chrono using namespace chrono; using namespace chrono::irrlicht; // Use the main namespaces of Irrlicht using namespace irr; using namespace irr::core; using namespace irr::scene; using namespace irr::video; using namespace irr::io; using namespace irr::gui; void AddWall(std::shared_ptr<ChBody> body, const ChVector<>& dim, const ChVector<>& loc) { body->GetCollisionModel()->AddBox(dim.x(), dim.y(), dim.z(), loc); auto box = std::make_shared<ChBoxShape>(); box->GetBoxGeometry().Size = dim; box->GetBoxGeometry().Pos = loc; box->SetColor(ChColor(1, 0, 0)); box->SetFading(0.6f); body->AddAsset(box); } int main(int argc, char* argv[]) { GetLog() << "Copyright (c) 2017 projectchrono.org\nChrono version: " << CHRONO_VERSION << "\n\n"; // Simulation parameters double gravity = -9.81; double time_step = 0.00001; double out_step = 2000 * time_step; // Parameters for the falling ball int ballId = 100; double radius = 1; double mass = 1000; ChVector<> pos(0, 2, 0); ChQuaternion<> rot(1, 0, 0, 0); ChVector<> init_vel(0, 0, 0); // Parameters for the containing bin int binId = 200; double width = 2; double length = 2; double height = 1; double thickness = 0.1; // Create the system ChSystemSMC msystem; // The following two lines are optional, since they are the default options. They are added for future reference, // i.e. when needed to change those models. msystem.SetContactForceModel(ChSystemSMC::ContactForceModel::Hertz); msystem.SetAdhesionForceModel(ChSystemSMC::AdhesionForceModel::Constant); msystem.Set_G_acc(ChVector<>(0, gravity, 0)); // Change the default collision effective radius of curvature collision::ChCollisionInfo::SetDefaultEffectiveCurvatureRadius(1); // Create the Irrlicht visualization ChIrrApp application(&msystem, L"SMC demo", core::dimension2d<u32>(800, 600), false, true); // Easy shortcuts to add camera, lights, logo and sky in Irrlicht scene application.AddTypicalLogo(); application.AddTypicalSky(); application.AddTypicalLights(); application.AddTypicalCamera(core::vector3df(0, 3, -6)); // This means that contactforces will be shown in Irrlicht application application.SetSymbolscale(1e-4); application.SetContactsDrawMode(ChIrrTools::eCh_ContactsDrawMode::CONTACT_FORCES); // Create a material (will be used by both objects) auto material = std::make_shared<ChMaterialSurfaceSMC>(); material->SetRestitution(0.1f); material->SetFriction(0.4f); material->SetAdhesion(0); // Magnitude of the adhesion in Constant adhesion model // Create the falling ball auto ball = std::make_shared<ChBody>(ChMaterialSurface::SMC); ball->SetIdentifier(ballId); ball->SetMass(mass); ball->SetPos(pos); ball->SetRot(rot); ball->SetPos_dt(init_vel); // ball->SetWvel_par(ChVector<>(0,0,3)); ball->SetBodyFixed(false); ball->SetMaterialSurface(material); ball->SetCollide(true); ball->GetCollisionModel()->ClearModel(); ball->GetCollisionModel()->AddSphere(radius); ball->GetCollisionModel()->BuildModel(); ball->SetInertiaXX(0.4 * mass * radius * radius * ChVector<>(1, 1, 1)); auto sphere = std::make_shared<ChSphereShape>(); sphere->GetSphereGeometry().rad = radius; ball->AddAsset(sphere); auto mtexture = std::make_shared<ChTexture>(); mtexture->SetTextureFilename(GetChronoDataFile("bluwhite.png")); ball->AddAsset(mtexture); msystem.AddBody(ball); // Create container auto bin = std::make_shared<ChBody>(ChMaterialSurface::SMC); bin->SetIdentifier(binId); bin->SetMass(1); bin->SetPos(ChVector<>(0, 0, 0)); bin->SetRot(ChQuaternion<>(1, 0, 0, 0)); bin->SetCollide(true); bin->SetBodyFixed(true); bin->SetMaterialSurface(material); bin->GetCollisionModel()->ClearModel(); AddWall(bin, ChVector<>(width, thickness, length), ChVector<>(0, 0, 0)); // AddWall(bin, ChVector<>(thickness, height, length), ChVector<>(-width + thickness, height, 0)); // AddWall(bin, ChVector<>(thickness, height, length), ChVector<>(width - thickness, height, 0)); // AddWall(bin, ChVector<>(width, height, thickness), ChVector<>(0, height, -length + thickness)); // AddWall(bin, ChVector<>(width, height, thickness), ChVector<>(0, height, length - thickness)); bin->GetCollisionModel()->BuildModel(); msystem.AddBody(bin); // Complete asset construction application.AssetBindAll(); application.AssetUpdateAll(); // The soft-real-time cycle double time = 0.0; double out_time = 0.0; while (application.GetDevice()->run()) { application.BeginScene(); application.DrawAll(); ChIrrTools::drawGrid(application.GetVideoDriver(), 0.2, 0.2, 20, 20, ChCoordsys<>(ChVector<>(0, 0, 0), Q_from_AngX(CH_C_PI_2)), video::SColor(255, 80, 100, 100), true); while (time < out_time) { msystem.DoStepDynamics(time_step); time += time_step; } out_time += out_step; application.EndScene(); } return 0; }
amelmquist/chrono
src/demos/irrlicht/demo_IRR_ballSMC.cpp
C++
bsd-3-clause
6,214
/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82a.cpp Label Definition File: CWE134_Uncontrolled_Format_String.label.xml Template File: sources-sinks-82a.tmpl.cpp */ /* * @description * CWE: 134 Uncontrolled Format String * BadSource: connect_socket Read data using a connect socket (client side) * GoodSource: Copy a fixed string into data * Sinks: fprintf * GoodSink: fprintf with "%s" as the second argument and data as the third * BadSink : fprintf with data as the second argument * Flow Variant: 82 Data flow: data passed in a parameter to an virtual method called via a pointer * * */ #include "std_testcase.h" #include "CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82.h" #ifdef _WIN32 #include <winsock2.h> #include <windows.h> #include <direct.h> #pragma comment(lib, "ws2_32") /* include ws2_32.lib when linking */ #define CLOSE_SOCKET closesocket #else /* NOT _WIN32 */ #include <sys/types.h> #include <sys/socket.h> #include <netinet/in.h> #include <arpa/inet.h> #include <unistd.h> #define INVALID_SOCKET -1 #define SOCKET_ERROR -1 #define CLOSE_SOCKET close #define SOCKET int #endif #define TCP_PORT 27015 #define IP_ADDRESS "127.0.0.1" namespace CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82 { #ifndef OMITBAD void bad() { char * data; char dataBuffer[100] = ""; data = dataBuffer; { #ifdef _WIN32 WSADATA wsaData; int wsaDataInit = 0; #endif int recvResult; struct sockaddr_in service; char *replace; SOCKET connectSocket = INVALID_SOCKET; size_t dataLen = strlen(data); do { #ifdef _WIN32 if (WSAStartup(MAKEWORD(2,2), &wsaData) != NO_ERROR) { break; } wsaDataInit = 1; #endif /* POTENTIAL FLAW: Read data using a connect socket */ connectSocket = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP); if (connectSocket == INVALID_SOCKET) { break; } memset(&service, 0, sizeof(service)); service.sin_family = AF_INET; service.sin_addr.s_addr = inet_addr(IP_ADDRESS); service.sin_port = htons(TCP_PORT); if (connect(connectSocket, (struct sockaddr*)&service, sizeof(service)) == SOCKET_ERROR) { break; } /* Abort on error or the connection was closed, make sure to recv one * less char than is in the recv_buf in order to append a terminator */ /* Abort on error or the connection was closed */ recvResult = recv(connectSocket, (char *)(data + dataLen), sizeof(char) * (100 - dataLen - 1), 0); if (recvResult == SOCKET_ERROR || recvResult == 0) { break; } /* Append null terminator */ data[dataLen + recvResult / sizeof(char)] = '\0'; /* Eliminate CRLF */ replace = strchr(data, '\r'); if (replace) { *replace = '\0'; } replace = strchr(data, '\n'); if (replace) { *replace = '\0'; } } while (0); if (connectSocket != INVALID_SOCKET) { CLOSE_SOCKET(connectSocket); } #ifdef _WIN32 if (wsaDataInit) { WSACleanup(); } #endif } CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82_base* baseObject = new CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82_bad; baseObject->action(data); delete baseObject; } #endif /* OMITBAD */ #ifndef OMITGOOD /* goodG2B uses the GoodSource with the BadSink */ static void goodG2B() { char * data; char dataBuffer[100] = ""; data = dataBuffer; /* FIX: Use a fixed string that does not contain a format specifier */ strcpy(data, "fixedstringtest"); CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82_base* baseObject = new CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82_goodG2B; baseObject->action(data); delete baseObject; } /* goodB2G uses the BadSource with the GoodSink */ static void goodB2G() { char * data; char dataBuffer[100] = ""; data = dataBuffer; { #ifdef _WIN32 WSADATA wsaData; int wsaDataInit = 0; #endif int recvResult; struct sockaddr_in service; char *replace; SOCKET connectSocket = INVALID_SOCKET; size_t dataLen = strlen(data); do { #ifdef _WIN32 if (WSAStartup(MAKEWORD(2,2), &wsaData) != NO_ERROR) { break; } wsaDataInit = 1; #endif /* POTENTIAL FLAW: Read data using a connect socket */ connectSocket = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP); if (connectSocket == INVALID_SOCKET) { break; } memset(&service, 0, sizeof(service)); service.sin_family = AF_INET; service.sin_addr.s_addr = inet_addr(IP_ADDRESS); service.sin_port = htons(TCP_PORT); if (connect(connectSocket, (struct sockaddr*)&service, sizeof(service)) == SOCKET_ERROR) { break; } /* Abort on error or the connection was closed, make sure to recv one * less char than is in the recv_buf in order to append a terminator */ /* Abort on error or the connection was closed */ recvResult = recv(connectSocket, (char *)(data + dataLen), sizeof(char) * (100 - dataLen - 1), 0); if (recvResult == SOCKET_ERROR || recvResult == 0) { break; } /* Append null terminator */ data[dataLen + recvResult / sizeof(char)] = '\0'; /* Eliminate CRLF */ replace = strchr(data, '\r'); if (replace) { *replace = '\0'; } replace = strchr(data, '\n'); if (replace) { *replace = '\0'; } } while (0); if (connectSocket != INVALID_SOCKET) { CLOSE_SOCKET(connectSocket); } #ifdef _WIN32 if (wsaDataInit) { WSACleanup(); } #endif } CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82_base* baseObject = new CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82_goodB2G; baseObject->action(data); delete baseObject; } void good() { goodG2B(); goodB2G(); } #endif /* OMITGOOD */ } /* close namespace */ /* Below is the main(). It is only used when building this testcase on its own for testing or for building a binary to use in testing binary analysis tools. It is not used when compiling all the testcases as one application, which is how source code analysis tools are tested. */ #ifdef INCLUDEMAIN using namespace CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82; /* so that we can use good and bad easily */ int main(int argc, char * argv[]) { /* seed randomness */ srand( (unsigned)time(NULL) ); #ifndef OMITGOOD printLine("Calling good()..."); good(); printLine("Finished good()"); #endif /* OMITGOOD */ #ifndef OMITBAD printLine("Calling bad()..."); bad(); printLine("Finished bad()"); #endif /* OMITBAD */ return 0; } #endif
JianpingZeng/xcc
xcc/test/juliet/testcases/CWE134_Uncontrolled_Format_String/s01/CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82a.cpp
C++
bsd-3-clause
7,904
/* * libjingle * Copyright 2010, Google Inc. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include <stdio.h> #include <string> #include "talk/p2p/base/sessionmessages.h" #include "talk/base/logging.h" #include "talk/base/scoped_ptr.h" #include "talk/base/stringutils.h" #include "talk/p2p/base/constants.h" #include "talk/p2p/base/p2ptransport.h" #include "talk/p2p/base/parsing.h" #include "talk/p2p/base/sessionclient.h" #include "talk/p2p/base/sessiondescription.h" #include "talk/p2p/base/transport.h" #include "talk/xmllite/xmlconstants.h" #include "talk/xmpp/constants.h" namespace cricket { ActionType ToActionType(const std::string& type) { if (type == GINGLE_ACTION_INITIATE) return ACTION_SESSION_INITIATE; if (type == GINGLE_ACTION_INFO) return ACTION_SESSION_INFO; if (type == GINGLE_ACTION_ACCEPT) return ACTION_SESSION_ACCEPT; if (type == GINGLE_ACTION_REJECT) return ACTION_SESSION_REJECT; if (type == GINGLE_ACTION_TERMINATE) return ACTION_SESSION_TERMINATE; if (type == GINGLE_ACTION_CANDIDATES) return ACTION_TRANSPORT_INFO; if (type == JINGLE_ACTION_SESSION_INITIATE) return ACTION_SESSION_INITIATE; if (type == JINGLE_ACTION_TRANSPORT_INFO) return ACTION_TRANSPORT_INFO; if (type == JINGLE_ACTION_TRANSPORT_ACCEPT) return ACTION_TRANSPORT_ACCEPT; if (type == JINGLE_ACTION_SESSION_INFO) return ACTION_SESSION_INFO; if (type == JINGLE_ACTION_SESSION_ACCEPT) return ACTION_SESSION_ACCEPT; if (type == JINGLE_ACTION_SESSION_TERMINATE) return ACTION_SESSION_TERMINATE; if (type == JINGLE_ACTION_TRANSPORT_INFO) return ACTION_TRANSPORT_INFO; if (type == JINGLE_ACTION_TRANSPORT_ACCEPT) return ACTION_TRANSPORT_ACCEPT; if (type == JINGLE_ACTION_DESCRIPTION_INFO) return ACTION_DESCRIPTION_INFO; if (type == GINGLE_ACTION_UPDATE) return ACTION_DESCRIPTION_INFO; return ACTION_UNKNOWN; } std::string ToJingleString(ActionType type) { switch (type) { case ACTION_SESSION_INITIATE: return JINGLE_ACTION_SESSION_INITIATE; case ACTION_SESSION_INFO: return JINGLE_ACTION_SESSION_INFO; case ACTION_SESSION_ACCEPT: return JINGLE_ACTION_SESSION_ACCEPT; // Notice that reject and terminate both go to // "session-terminate", but there is no "session-reject". case ACTION_SESSION_REJECT: case ACTION_SESSION_TERMINATE: return JINGLE_ACTION_SESSION_TERMINATE; case ACTION_TRANSPORT_INFO: return JINGLE_ACTION_TRANSPORT_INFO; case ACTION_TRANSPORT_ACCEPT: return JINGLE_ACTION_TRANSPORT_ACCEPT; default: return ""; } } std::string ToGingleString(ActionType type) { switch (type) { case ACTION_SESSION_INITIATE: return GINGLE_ACTION_INITIATE; case ACTION_SESSION_INFO: return GINGLE_ACTION_INFO; case ACTION_SESSION_ACCEPT: return GINGLE_ACTION_ACCEPT; case ACTION_SESSION_REJECT: return GINGLE_ACTION_REJECT; case ACTION_SESSION_TERMINATE: return GINGLE_ACTION_TERMINATE; case ACTION_TRANSPORT_INFO: return GINGLE_ACTION_CANDIDATES; default: return ""; } } bool IsJingleMessage(const buzz::XmlElement* stanza) { const buzz::XmlElement* jingle = stanza->FirstNamed(QN_JINGLE); if (jingle == NULL) return false; return (jingle->HasAttr(buzz::QN_ACTION) && jingle->HasAttr(QN_SID)); } bool IsGingleMessage(const buzz::XmlElement* stanza) { const buzz::XmlElement* session = stanza->FirstNamed(QN_GINGLE_SESSION); if (session == NULL) return false; return (session->HasAttr(buzz::QN_TYPE) && session->HasAttr(buzz::QN_ID) && session->HasAttr(QN_INITIATOR)); } bool IsSessionMessage(const buzz::XmlElement* stanza) { return (stanza->Name() == buzz::QN_IQ && stanza->Attr(buzz::QN_TYPE) == buzz::STR_SET && (IsJingleMessage(stanza) || IsGingleMessage(stanza))); } bool ParseGingleSessionMessage(const buzz::XmlElement* session, SessionMessage* msg, ParseError* error) { msg->protocol = PROTOCOL_GINGLE; std::string type_string = session->Attr(buzz::QN_TYPE); msg->type = ToActionType(type_string); msg->sid = session->Attr(buzz::QN_ID); msg->initiator = session->Attr(QN_INITIATOR); msg->action_elem = session; if (msg->type == ACTION_UNKNOWN) return BadParse("unknown action: " + type_string, error); return true; } bool ParseJingleSessionMessage(const buzz::XmlElement* jingle, SessionMessage* msg, ParseError* error) { msg->protocol = PROTOCOL_JINGLE; std::string type_string = jingle->Attr(buzz::QN_ACTION); msg->type = ToActionType(type_string); msg->sid = jingle->Attr(QN_SID); msg->initiator = GetXmlAttr(jingle, QN_INITIATOR, buzz::STR_EMPTY); msg->action_elem = jingle; if (msg->type == ACTION_UNKNOWN) return BadParse("unknown action: " + type_string, error); return true; } bool ParseHybridSessionMessage(const buzz::XmlElement* jingle, SessionMessage* msg, ParseError* error) { if (!ParseJingleSessionMessage(jingle, msg, error)) return false; msg->protocol = PROTOCOL_HYBRID; return true; } bool ParseSessionMessage(const buzz::XmlElement* stanza, SessionMessage* msg, ParseError* error) { msg->id = stanza->Attr(buzz::QN_ID); msg->from = stanza->Attr(buzz::QN_FROM); msg->to = stanza->Attr(buzz::QN_TO); msg->stanza = stanza; const buzz::XmlElement* jingle = stanza->FirstNamed(QN_JINGLE); const buzz::XmlElement* session = stanza->FirstNamed(QN_GINGLE_SESSION); if (jingle && session) return ParseHybridSessionMessage(jingle, msg, error); if (jingle != NULL) return ParseJingleSessionMessage(jingle, msg, error); if (session != NULL) return ParseGingleSessionMessage(session, msg, error); return false; } buzz::XmlElement* WriteGingleAction(const SessionMessage& msg, const XmlElements& action_elems) { buzz::XmlElement* session = new buzz::XmlElement(QN_GINGLE_SESSION, true); session->AddAttr(buzz::QN_TYPE, ToGingleString(msg.type)); session->AddAttr(buzz::QN_ID, msg.sid); session->AddAttr(QN_INITIATOR, msg.initiator); AddXmlChildren(session, action_elems); return session; } buzz::XmlElement* WriteJingleAction(const SessionMessage& msg, const XmlElements& action_elems) { buzz::XmlElement* jingle = new buzz::XmlElement(QN_JINGLE, true); jingle->AddAttr(buzz::QN_ACTION, ToJingleString(msg.type)); jingle->AddAttr(QN_SID, msg.sid); if (msg.type == ACTION_SESSION_INITIATE) { jingle->AddAttr(QN_INITIATOR, msg.initiator); } AddXmlChildren(jingle, action_elems); return jingle; } void WriteSessionMessage(const SessionMessage& msg, const XmlElements& action_elems, buzz::XmlElement* stanza) { stanza->SetAttr(buzz::QN_TO, msg.to); stanza->SetAttr(buzz::QN_TYPE, buzz::STR_SET); if (msg.protocol == PROTOCOL_GINGLE) { stanza->AddElement(WriteGingleAction(msg, action_elems)); } else { stanza->AddElement(WriteJingleAction(msg, action_elems)); } } TransportParser* GetTransportParser(const TransportParserMap& trans_parsers, const std::string& name) { TransportParserMap::const_iterator map = trans_parsers.find(name); if (map == trans_parsers.end()) { return NULL; } else { return map->second; } } bool ParseCandidates(SignalingProtocol protocol, const buzz::XmlElement* candidates_elem, const TransportParserMap& trans_parsers, const std::string& transport_type, Candidates* candidates, ParseError* error) { TransportParser* trans_parser = GetTransportParser(trans_parsers, transport_type); if (trans_parser == NULL) return BadParse("unknown transport type: " + transport_type, error); return trans_parser->ParseCandidates(protocol, candidates_elem, candidates, error); } bool ParseGingleTransportInfos(const buzz::XmlElement* action_elem, const ContentInfos& contents, const TransportParserMap& trans_parsers, TransportInfos* tinfos, ParseError* error) { TransportInfo tinfo(CN_OTHER, NS_GINGLE_P2P, Candidates()); if (!ParseCandidates(PROTOCOL_GINGLE, action_elem, trans_parsers, NS_GINGLE_P2P, &tinfo.candidates, error)) return false; bool has_audio = FindContentInfoByName(contents, CN_AUDIO) != NULL; bool has_video = FindContentInfoByName(contents, CN_VIDEO) != NULL; // If we don't have media, no need to separate the candidates. if (!has_audio && !has_video) { tinfos->push_back(tinfo); return true; } // If we have media, separate the candidates. Create the // TransportInfo here to avoid copying the candidates. TransportInfo audio_tinfo(CN_AUDIO, NS_GINGLE_P2P, Candidates()); TransportInfo video_tinfo(CN_VIDEO, NS_GINGLE_P2P, Candidates()); for (Candidates::iterator cand = tinfo.candidates.begin(); cand != tinfo.candidates.end(); cand++) { if (cand->name() == GINGLE_CANDIDATE_NAME_RTP || cand->name() == GINGLE_CANDIDATE_NAME_RTCP) { audio_tinfo.candidates.push_back(*cand); } else if (cand->name() == GINGLE_CANDIDATE_NAME_VIDEO_RTP || cand->name() == GINGLE_CANDIDATE_NAME_VIDEO_RTCP) { video_tinfo.candidates.push_back(*cand); } } if (has_audio) { tinfos->push_back(audio_tinfo); } if (has_video) { tinfos->push_back(video_tinfo); } return true; } bool ParseJingleTransportInfo(const buzz::XmlElement* trans_elem, const ContentInfo& content, const TransportParserMap& trans_parsers, TransportInfos* tinfos, ParseError* error) { std::string transport_type = trans_elem->Name().Namespace(); TransportInfo tinfo(content.name, transport_type, Candidates()); if (!ParseCandidates(PROTOCOL_JINGLE, trans_elem, trans_parsers, transport_type, &tinfo.candidates, error)) return false; tinfos->push_back(tinfo); return true; } bool ParseJingleTransportInfos(const buzz::XmlElement* jingle, const ContentInfos& contents, const TransportParserMap trans_parsers, TransportInfos* tinfos, ParseError* error) { for (const buzz::XmlElement* pair_elem = jingle->FirstNamed(QN_JINGLE_CONTENT); pair_elem != NULL; pair_elem = pair_elem->NextNamed(QN_JINGLE_CONTENT)) { std::string content_name; if (!RequireXmlAttr(pair_elem, QN_JINGLE_CONTENT_NAME, &content_name, error)) return false; const ContentInfo* content = FindContentInfoByName(contents, content_name); if (!content) return BadParse("Unknown content name: " + content_name, error); const buzz::XmlElement* trans_elem; if (!RequireXmlChild(pair_elem, LN_TRANSPORT, &trans_elem, error)) return false; if (!ParseJingleTransportInfo(trans_elem, *content, trans_parsers, tinfos, error)) return false; } return true; } buzz::XmlElement* NewTransportElement(const std::string& name) { return new buzz::XmlElement(buzz::QName(name, LN_TRANSPORT), true); } bool WriteCandidates(SignalingProtocol protocol, const std::string& trans_type, const Candidates& candidates, const TransportParserMap& trans_parsers, XmlElements* elems, WriteError* error) { TransportParser* trans_parser = GetTransportParser(trans_parsers, trans_type); if (trans_parser == NULL) return BadWrite("unknown transport type: " + trans_type, error); return trans_parser->WriteCandidates(protocol, candidates, elems, error); } bool WriteGingleTransportInfos(const TransportInfos& tinfos, const TransportParserMap& trans_parsers, XmlElements* elems, WriteError* error) { for (TransportInfos::const_iterator tinfo = tinfos.begin(); tinfo != tinfos.end(); ++tinfo) { if (!WriteCandidates(PROTOCOL_GINGLE, tinfo->transport_type, tinfo->candidates, trans_parsers, elems, error)) return false; } return true; } bool WriteJingleTransportInfo(const TransportInfo& tinfo, const TransportParserMap& trans_parsers, XmlElements* elems, WriteError* error) { XmlElements candidate_elems; if (!WriteCandidates(PROTOCOL_JINGLE, tinfo.transport_type, tinfo.candidates, trans_parsers, &candidate_elems, error)) return false; buzz::XmlElement* trans_elem = NewTransportElement(tinfo.transport_type); AddXmlChildren(trans_elem, candidate_elems); elems->push_back(trans_elem); return true; } void WriteJingleContentPair(const std::string name, const XmlElements& pair_elems, XmlElements* elems) { buzz::XmlElement* pair_elem = new buzz::XmlElement(QN_JINGLE_CONTENT); pair_elem->SetAttr(QN_JINGLE_CONTENT_NAME, name); pair_elem->SetAttr(QN_CREATOR, LN_INITIATOR); AddXmlChildren(pair_elem, pair_elems); elems->push_back(pair_elem); } bool WriteJingleTransportInfos(const TransportInfos& tinfos, const TransportParserMap& trans_parsers, XmlElements* elems, WriteError* error) { for (TransportInfos::const_iterator tinfo = tinfos.begin(); tinfo != tinfos.end(); ++tinfo) { XmlElements pair_elems; if (!WriteJingleTransportInfo(*tinfo, trans_parsers, &pair_elems, error)) return false; WriteJingleContentPair(tinfo->content_name, pair_elems, elems); } return true; } ContentParser* GetContentParser(const ContentParserMap& content_parsers, const std::string& type) { ContentParserMap::const_iterator map = content_parsers.find(type); if (map == content_parsers.end()) { return NULL; } else { return map->second; } } bool ParseContentInfo(SignalingProtocol protocol, const std::string& name, const std::string& type, const buzz::XmlElement* elem, const ContentParserMap& parsers, ContentInfos* contents, ParseError* error) { ContentParser* parser = GetContentParser(parsers, type); if (parser == NULL) return BadParse("unknown application content: " + type, error); const ContentDescription* desc; if (!parser->ParseContent(protocol, elem, &desc, error)) return false; contents->push_back(ContentInfo(name, type, desc)); return true; } bool ParseContentType(const buzz::XmlElement* parent_elem, std::string* content_type, const buzz::XmlElement** content_elem, ParseError* error) { if (!RequireXmlChild(parent_elem, LN_DESCRIPTION, content_elem, error)) return false; *content_type = (*content_elem)->Name().Namespace(); return true; } bool ParseGingleContentInfos(const buzz::XmlElement* session, const ContentParserMap& content_parsers, ContentInfos* contents, ParseError* error) { std::string content_type; const buzz::XmlElement* content_elem; if (!ParseContentType(session, &content_type, &content_elem, error)) return false; if (content_type == NS_GINGLE_VIDEO) { // A parser parsing audio or video content should look at the // namespace and only parse the codecs relevant to that namespace. // We use this to control which codecs get parsed: first audio, // then video. talk_base::scoped_ptr<buzz::XmlElement> audio_elem( new buzz::XmlElement(QN_GINGLE_AUDIO_CONTENT)); CopyXmlChildren(content_elem, audio_elem.get()); if (!ParseContentInfo(PROTOCOL_GINGLE, CN_AUDIO, NS_JINGLE_RTP, audio_elem.get(), content_parsers, contents, error)) return false; if (!ParseContentInfo(PROTOCOL_GINGLE, CN_VIDEO, NS_JINGLE_RTP, content_elem, content_parsers, contents, error)) return false; } else if (content_type == NS_GINGLE_AUDIO) { if (!ParseContentInfo(PROTOCOL_GINGLE, CN_AUDIO, NS_JINGLE_RTP, content_elem, content_parsers, contents, error)) return false; } else { if (!ParseContentInfo(PROTOCOL_GINGLE, CN_OTHER, content_type, content_elem, content_parsers, contents, error)) return false; } return true; } bool ParseJingleContentInfos(const buzz::XmlElement* jingle, const ContentParserMap& content_parsers, ContentInfos* contents, ParseError* error) { for (const buzz::XmlElement* pair_elem = jingle->FirstNamed(QN_JINGLE_CONTENT); pair_elem != NULL; pair_elem = pair_elem->NextNamed(QN_JINGLE_CONTENT)) { std::string content_name; if (!RequireXmlAttr(pair_elem, QN_JINGLE_CONTENT_NAME, &content_name, error)) return false; std::string content_type; const buzz::XmlElement* content_elem; if (!ParseContentType(pair_elem, &content_type, &content_elem, error)) return false; if (!ParseContentInfo(PROTOCOL_JINGLE, content_name, content_type, content_elem, content_parsers, contents, error)) return false; } return true; } bool ParseJingleGroupInfos(const buzz::XmlElement* jingle, ContentGroups* groups, ParseError* error) { for (const buzz::XmlElement* pair_elem = jingle->FirstNamed(QN_JINGLE_DRAFT_GROUP); pair_elem != NULL; pair_elem = pair_elem->NextNamed(QN_JINGLE_DRAFT_GROUP)) { std::string group_name; if (!RequireXmlAttr(pair_elem, QN_JINGLE_DRAFT_GROUP_TYPE, &group_name, error)) return false; ContentGroup group(group_name); for (const buzz::XmlElement* child_elem = pair_elem->FirstNamed(QN_JINGLE_CONTENT); child_elem != NULL; child_elem = child_elem->NextNamed(QN_JINGLE_CONTENT)) { std::string content_name; if (!RequireXmlAttr(child_elem, QN_JINGLE_CONTENT_NAME, &content_name, error)) return false; group.AddContentName(content_name); } groups->push_back(group); } return true; } buzz::XmlElement* WriteContentInfo(SignalingProtocol protocol, const ContentInfo& content, const ContentParserMap& parsers, WriteError* error) { ContentParser* parser = GetContentParser(parsers, content.type); if (parser == NULL) { BadWrite("unknown content type: " + content.type, error); return NULL; } buzz::XmlElement* elem = NULL; if (!parser->WriteContent(protocol, content.description, &elem, error)) return NULL; return elem; } bool WriteGingleContentInfos(const ContentInfos& contents, const ContentParserMap& parsers, XmlElements* elems, WriteError* error) { if (contents.size() == 1) { buzz::XmlElement* elem = WriteContentInfo( PROTOCOL_GINGLE, contents.front(), parsers, error); if (!elem) return false; elems->push_back(elem); } else if (contents.size() == 2 && contents.at(0).type == NS_JINGLE_RTP && contents.at(1).type == NS_JINGLE_RTP) { // Special-case audio + video contents so that they are "merged" // into one "video" content. buzz::XmlElement* audio = WriteContentInfo( PROTOCOL_GINGLE, contents.at(0), parsers, error); if (!audio) return false; buzz::XmlElement* video = WriteContentInfo( PROTOCOL_GINGLE, contents.at(1), parsers, error); if (!video) { delete audio; return false; } CopyXmlChildren(audio, video); elems->push_back(video); delete audio; } else { return BadWrite("Gingle protocol may only have one content.", error); } return true; } const TransportInfo* GetTransportInfoByContentName( const TransportInfos& tinfos, const std::string& content_name) { for (TransportInfos::const_iterator tinfo = tinfos.begin(); tinfo != tinfos.end(); ++tinfo) { if (content_name == tinfo->content_name) { return &*tinfo; } } return NULL; } bool WriteJingleContentPairs(const ContentInfos& contents, const ContentParserMap& content_parsers, const TransportInfos& tinfos, const TransportParserMap& trans_parsers, XmlElements* elems, WriteError* error) { for (ContentInfos::const_iterator content = contents.begin(); content != contents.end(); ++content) { const TransportInfo* tinfo = GetTransportInfoByContentName(tinfos, content->name); if (!tinfo) return BadWrite("No transport for content: " + content->name, error); XmlElements pair_elems; buzz::XmlElement* elem = WriteContentInfo( PROTOCOL_JINGLE, *content, content_parsers, error); if (!elem) return false; pair_elems.push_back(elem); if (!WriteJingleTransportInfo(*tinfo, trans_parsers, &pair_elems, error)) return false; WriteJingleContentPair(content->name, pair_elems, elems); } return true; } bool WriteJingleGroupInfo(const ContentInfos& contents, const ContentGroups& groups, XmlElements* elems, WriteError* error) { if (!groups.empty()) { buzz::XmlElement* pair_elem = new buzz::XmlElement(QN_JINGLE_DRAFT_GROUP); pair_elem->SetAttr(QN_JINGLE_DRAFT_GROUP_TYPE, GROUP_TYPE_BUNDLE); XmlElements pair_elems; for (ContentInfos::const_iterator content = contents.begin(); content != contents.end(); ++content) { buzz::XmlElement* child_elem = new buzz::XmlElement(QN_JINGLE_CONTENT, false); child_elem->SetAttr(QN_JINGLE_CONTENT_NAME, content->name); pair_elems.push_back(child_elem); } AddXmlChildren(pair_elem, pair_elems); elems->push_back(pair_elem); } return true; } bool ParseContentType(SignalingProtocol protocol, const buzz::XmlElement* action_elem, std::string* content_type, ParseError* error) { const buzz::XmlElement* content_elem; if (protocol == PROTOCOL_GINGLE) { if (!ParseContentType(action_elem, content_type, &content_elem, error)) return false; // Internally, we only use NS_JINGLE_RTP. if (*content_type == NS_GINGLE_AUDIO || *content_type == NS_GINGLE_VIDEO) *content_type = NS_JINGLE_RTP; } else { const buzz::XmlElement* pair_elem = action_elem->FirstNamed(QN_JINGLE_CONTENT); if (pair_elem == NULL) return BadParse("No contents found", error); if (!ParseContentType(pair_elem, content_type, &content_elem, error)) return false; // If there is more than one content type, return an error. for (; pair_elem != NULL; pair_elem = pair_elem->NextNamed(QN_JINGLE_CONTENT)) { std::string content_type2; if (!ParseContentType(pair_elem, &content_type2, &content_elem, error)) return false; if (content_type2 != *content_type) return BadParse("More than one content type found", error); } } return true; } static bool ParseContentMessage( SignalingProtocol protocol, const buzz::XmlElement* action_elem, bool expect_transports, const ContentParserMap& content_parsers, const TransportParserMap& trans_parsers, SessionInitiate* init, ParseError* error) { init->owns_contents = true; if (protocol == PROTOCOL_GINGLE) { if (!ParseGingleContentInfos(action_elem, content_parsers, &init->contents, error)) return false; if (expect_transports && !ParseGingleTransportInfos(action_elem, init->contents, trans_parsers, &init->transports, error)) return false; } else { if (!ParseJingleContentInfos(action_elem, content_parsers, &init->contents, error)) return false; if (!ParseJingleGroupInfos(action_elem, &init->groups, error)) return false; if (expect_transports && !ParseJingleTransportInfos(action_elem, init->contents, trans_parsers, &init->transports, error)) return false; } return true; } static bool WriteContentMessage( SignalingProtocol protocol, const ContentInfos& contents, const TransportInfos& tinfos, const ContentParserMap& content_parsers, const TransportParserMap& transport_parsers, const ContentGroups& groups, XmlElements* elems, WriteError* error) { if (protocol == PROTOCOL_GINGLE) { if (!WriteGingleContentInfos(contents, content_parsers, elems, error)) return false; if (!WriteGingleTransportInfos(tinfos, transport_parsers, elems, error)) return false; } else { if (!WriteJingleContentPairs(contents, content_parsers, tinfos, transport_parsers, elems, error)) return false; if (!WriteJingleGroupInfo(contents, groups, elems, error)) return false; } return true; } bool ParseSessionInitiate(SignalingProtocol protocol, const buzz::XmlElement* action_elem, const ContentParserMap& content_parsers, const TransportParserMap& trans_parsers, SessionInitiate* init, ParseError* error) { bool expect_transports = true; return ParseContentMessage(protocol, action_elem, expect_transports, content_parsers, trans_parsers, init, error); } bool WriteSessionInitiate(SignalingProtocol protocol, const ContentInfos& contents, const TransportInfos& tinfos, const ContentParserMap& content_parsers, const TransportParserMap& transport_parsers, const ContentGroups& groups, XmlElements* elems, WriteError* error) { return WriteContentMessage(protocol, contents, tinfos, content_parsers, transport_parsers, groups, elems, error); } bool ParseSessionAccept(SignalingProtocol protocol, const buzz::XmlElement* action_elem, const ContentParserMap& content_parsers, const TransportParserMap& transport_parsers, SessionAccept* accept, ParseError* error) { bool expect_transports = true; return ParseContentMessage(protocol, action_elem, expect_transports, content_parsers, transport_parsers, accept, error); } bool WriteSessionAccept(SignalingProtocol protocol, const ContentInfos& contents, const TransportInfos& tinfos, const ContentParserMap& content_parsers, const TransportParserMap& transport_parsers, const ContentGroups& groups, XmlElements* elems, WriteError* error) { return WriteContentMessage(protocol, contents, tinfos, content_parsers, transport_parsers, groups, elems, error); } bool ParseSessionTerminate(SignalingProtocol protocol, const buzz::XmlElement* action_elem, SessionTerminate* term, ParseError* error) { if (protocol == PROTOCOL_GINGLE) { const buzz::XmlElement* reason_elem = action_elem->FirstElement(); if (reason_elem != NULL) { term->reason = reason_elem->Name().LocalPart(); const buzz::XmlElement *debug_elem = reason_elem->FirstElement(); if (debug_elem != NULL) { term->debug_reason = debug_elem->Name().LocalPart(); } } return true; } else { const buzz::XmlElement* reason_elem = action_elem->FirstNamed(QN_JINGLE_REASON); if (reason_elem) { reason_elem = reason_elem->FirstElement(); if (reason_elem) { term->reason = reason_elem->Name().LocalPart(); } } return true; } } void WriteSessionTerminate(SignalingProtocol protocol, const SessionTerminate& term, XmlElements* elems) { if (protocol == PROTOCOL_GINGLE) { elems->push_back(new buzz::XmlElement(buzz::QName(NS_GINGLE, term.reason))); } else { if (!term.reason.empty()) { buzz::XmlElement* reason_elem = new buzz::XmlElement(QN_JINGLE_REASON); reason_elem->AddElement(new buzz::XmlElement( buzz::QName(NS_JINGLE, term.reason))); elems->push_back(reason_elem); } } } bool ParseDescriptionInfo(SignalingProtocol protocol, const buzz::XmlElement* action_elem, const ContentParserMap& content_parsers, const TransportParserMap& transport_parsers, DescriptionInfo* description_info, ParseError* error) { bool expect_transports = false; return ParseContentMessage(protocol, action_elem, expect_transports, content_parsers, transport_parsers, description_info, error); } bool ParseTransportInfos(SignalingProtocol protocol, const buzz::XmlElement* action_elem, const ContentInfos& contents, const TransportParserMap& trans_parsers, TransportInfos* tinfos, ParseError* error) { if (protocol == PROTOCOL_GINGLE) { return ParseGingleTransportInfos( action_elem, contents, trans_parsers, tinfos, error); } else { return ParseJingleTransportInfos( action_elem, contents, trans_parsers, tinfos, error); } } bool WriteTransportInfos(SignalingProtocol protocol, const TransportInfos& tinfos, const TransportParserMap& trans_parsers, XmlElements* elems, WriteError* error) { if (protocol == PROTOCOL_GINGLE) { return WriteGingleTransportInfos(tinfos, trans_parsers, elems, error); } else { return WriteJingleTransportInfos(tinfos, trans_parsers, elems, error); } } bool GetUriTarget(const std::string& prefix, const std::string& str, std::string* after) { size_t pos = str.find(prefix); if (pos == std::string::npos) return false; *after = str.substr(pos + prefix.size(), std::string::npos); return true; } bool FindSessionRedirect(const buzz::XmlElement* stanza, SessionRedirect* redirect) { const buzz::XmlElement* error_elem = GetXmlChild(stanza, LN_ERROR); if (error_elem == NULL) return false; const buzz::XmlElement* redirect_elem = error_elem->FirstNamed(QN_GINGLE_REDIRECT); if (redirect_elem == NULL) redirect_elem = error_elem->FirstNamed(buzz::QN_STANZA_REDIRECT); if (redirect_elem == NULL) return false; if (!GetUriTarget(STR_REDIRECT_PREFIX, redirect_elem->BodyText(), &redirect->target)) return false; return true; } } // namespace cricket
muzili/libjingle-0.6.14
talk/p2p/base/sessionmessages.cc
C++
bsd-3-clause
34,550
from __future__ import division, absolute_import, print_function import collections import tempfile import sys import shutil import warnings import operator import io import itertools if sys.version_info[0] >= 3: import builtins else: import __builtin__ as builtins from decimal import Decimal import numpy as np from nose import SkipTest from numpy.compat import asbytes, getexception, strchar, unicode, sixu from test_print import in_foreign_locale from numpy.core.multiarray_tests import ( test_neighborhood_iterator, test_neighborhood_iterator_oob, test_pydatamem_seteventhook_start, test_pydatamem_seteventhook_end, test_inplace_increment, get_buffer_info, test_as_c_array ) from numpy.testing import ( TestCase, run_module_suite, assert_, assert_raises, assert_equal, assert_almost_equal, assert_array_equal, assert_array_almost_equal, assert_allclose, assert_array_less, runstring, dec ) # Need to test an object that does not fully implement math interface from datetime import timedelta if sys.version_info[:2] > (3, 2): # In Python 3.3 the representation of empty shape, strides and suboffsets # is an empty tuple instead of None. # http://docs.python.org/dev/whatsnew/3.3.html#api-changes EMPTY = () else: EMPTY = None class TestFlags(TestCase): def setUp(self): self.a = np.arange(10) def test_writeable(self): mydict = locals() self.a.flags.writeable = False self.assertRaises(ValueError, runstring, 'self.a[0] = 3', mydict) self.assertRaises(ValueError, runstring, 'self.a[0:1].itemset(3)', mydict) self.a.flags.writeable = True self.a[0] = 5 self.a[0] = 0 def test_otherflags(self): assert_equal(self.a.flags.carray, True) assert_equal(self.a.flags.farray, False) assert_equal(self.a.flags.behaved, True) assert_equal(self.a.flags.fnc, False) assert_equal(self.a.flags.forc, True) assert_equal(self.a.flags.owndata, True) assert_equal(self.a.flags.writeable, True) assert_equal(self.a.flags.aligned, True) assert_equal(self.a.flags.updateifcopy, False) def test_string_align(self): a = np.zeros(4, dtype=np.dtype('|S4')) assert_(a.flags.aligned) # not power of two are accessed bytewise and thus considered aligned a = np.zeros(5, dtype=np.dtype('|S4')) assert_(a.flags.aligned) def test_void_align(self): a = np.zeros(4, dtype=np.dtype([("a", "i4"), ("b", "i4")])) assert_(a.flags.aligned) class TestHash(TestCase): # see #3793 def test_int(self): for st, ut, s in [(np.int8, np.uint8, 8), (np.int16, np.uint16, 16), (np.int32, np.uint32, 32), (np.int64, np.uint64, 64)]: for i in range(1, s): assert_equal(hash(st(-2**i)), hash(-2**i), err_msg="%r: -2**%d" % (st, i)) assert_equal(hash(st(2**(i - 1))), hash(2**(i - 1)), err_msg="%r: 2**%d" % (st, i - 1)) assert_equal(hash(st(2**i - 1)), hash(2**i - 1), err_msg="%r: 2**%d - 1" % (st, i)) i = max(i - 1, 1) assert_equal(hash(ut(2**(i - 1))), hash(2**(i - 1)), err_msg="%r: 2**%d" % (ut, i - 1)) assert_equal(hash(ut(2**i - 1)), hash(2**i - 1), err_msg="%r: 2**%d - 1" % (ut, i)) class TestAttributes(TestCase): def setUp(self): self.one = np.arange(10) self.two = np.arange(20).reshape(4, 5) self.three = np.arange(60, dtype=np.float64).reshape(2, 5, 6) def test_attributes(self): assert_equal(self.one.shape, (10,)) assert_equal(self.two.shape, (4, 5)) assert_equal(self.three.shape, (2, 5, 6)) self.three.shape = (10, 3, 2) assert_equal(self.three.shape, (10, 3, 2)) self.three.shape = (2, 5, 6) assert_equal(self.one.strides, (self.one.itemsize,)) num = self.two.itemsize assert_equal(self.two.strides, (5*num, num)) num = self.three.itemsize assert_equal(self.three.strides, (30*num, 6*num, num)) assert_equal(self.one.ndim, 1) assert_equal(self.two.ndim, 2) assert_equal(self.three.ndim, 3) num = self.two.itemsize assert_equal(self.two.size, 20) assert_equal(self.two.nbytes, 20*num) assert_equal(self.two.itemsize, self.two.dtype.itemsize) assert_equal(self.two.base, np.arange(20)) def test_dtypeattr(self): assert_equal(self.one.dtype, np.dtype(np.int_)) assert_equal(self.three.dtype, np.dtype(np.float_)) assert_equal(self.one.dtype.char, 'l') assert_equal(self.three.dtype.char, 'd') self.assertTrue(self.three.dtype.str[0] in '<>') assert_equal(self.one.dtype.str[1], 'i') assert_equal(self.three.dtype.str[1], 'f') def test_int_subclassing(self): # Regression test for https://github.com/numpy/numpy/pull/3526 numpy_int = np.int_(0) if sys.version_info[0] >= 3: # On Py3k int_ should not inherit from int, because it's not fixed-width anymore assert_equal(isinstance(numpy_int, int), False) else: # Otherwise, it should inherit from int... assert_equal(isinstance(numpy_int, int), True) # ... and fast-path checks on C-API level should also work from numpy.core.multiarray_tests import test_int_subclass assert_equal(test_int_subclass(numpy_int), True) def test_stridesattr(self): x = self.one def make_array(size, offset, strides): return np.ndarray(size, buffer=x, dtype=int, offset=offset*x.itemsize, strides=strides*x.itemsize) assert_equal(make_array(4, 4, -1), np.array([4, 3, 2, 1])) self.assertRaises(ValueError, make_array, 4, 4, -2) self.assertRaises(ValueError, make_array, 4, 2, -1) self.assertRaises(ValueError, make_array, 8, 3, 1) assert_equal(make_array(8, 3, 0), np.array([3]*8)) # Check behavior reported in gh-2503: self.assertRaises(ValueError, make_array, (2, 3), 5, np.array([-2, -3])) make_array(0, 0, 10) def test_set_stridesattr(self): x = self.one def make_array(size, offset, strides): try: r = np.ndarray([size], dtype=int, buffer=x, offset=offset*x.itemsize) except: raise RuntimeError(getexception()) r.strides = strides = strides*x.itemsize return r assert_equal(make_array(4, 4, -1), np.array([4, 3, 2, 1])) assert_equal(make_array(7, 3, 1), np.array([3, 4, 5, 6, 7, 8, 9])) self.assertRaises(ValueError, make_array, 4, 4, -2) self.assertRaises(ValueError, make_array, 4, 2, -1) self.assertRaises(RuntimeError, make_array, 8, 3, 1) # Check that the true extent of the array is used. # Test relies on as_strided base not exposing a buffer. x = np.lib.stride_tricks.as_strided(np.arange(1), (10, 10), (0, 0)) def set_strides(arr, strides): arr.strides = strides self.assertRaises(ValueError, set_strides, x, (10*x.itemsize, x.itemsize)) # Test for offset calculations: x = np.lib.stride_tricks.as_strided(np.arange(10, dtype=np.int8)[-1], shape=(10,), strides=(-1,)) self.assertRaises(ValueError, set_strides, x[::-1], -1) a = x[::-1] a.strides = 1 a[::2].strides = 2 def test_fill(self): for t in "?bhilqpBHILQPfdgFDGO": x = np.empty((3, 2, 1), t) y = np.empty((3, 2, 1), t) x.fill(1) y[...] = 1 assert_equal(x, y) def test_fill_max_uint64(self): x = np.empty((3, 2, 1), dtype=np.uint64) y = np.empty((3, 2, 1), dtype=np.uint64) value = 2**64 - 1 y[...] = value x.fill(value) assert_array_equal(x, y) def test_fill_struct_array(self): # Filling from a scalar x = np.array([(0, 0.0), (1, 1.0)], dtype='i4,f8') x.fill(x[0]) assert_equal(x['f1'][1], x['f1'][0]) # Filling from a tuple that can be converted # to a scalar x = np.zeros(2, dtype=[('a', 'f8'), ('b', 'i4')]) x.fill((3.5, -2)) assert_array_equal(x['a'], [3.5, 3.5]) assert_array_equal(x['b'], [-2, -2]) class TestArrayConstruction(TestCase): def test_array(self): d = np.ones(6) r = np.array([d, d]) assert_equal(r, np.ones((2, 6))) d = np.ones(6) tgt = np.ones((2, 6)) r = np.array([d, d]) assert_equal(r, tgt) tgt[1] = 2 r = np.array([d, d + 1]) assert_equal(r, tgt) d = np.ones(6) r = np.array([[d, d]]) assert_equal(r, np.ones((1, 2, 6))) d = np.ones(6) r = np.array([[d, d], [d, d]]) assert_equal(r, np.ones((2, 2, 6))) d = np.ones((6, 6)) r = np.array([d, d]) assert_equal(r, np.ones((2, 6, 6))) d = np.ones((6, )) r = np.array([[d, d + 1], d + 2]) assert_equal(len(r), 2) assert_equal(r[0], [d, d + 1]) assert_equal(r[1], d + 2) tgt = np.ones((2, 3), dtype=np.bool) tgt[0, 2] = False tgt[1, 0:2] = False r = np.array([[True, True, False], [False, False, True]]) assert_equal(r, tgt) r = np.array([[True, False], [True, False], [False, True]]) assert_equal(r, tgt.T) def test_array_empty(self): assert_raises(TypeError, np.array) def test_array_copy_false(self): d = np.array([1, 2, 3]) e = np.array(d, copy=False) d[1] = 3 assert_array_equal(e, [1, 3, 3]) e = np.array(d, copy=False, order='F') d[1] = 4 assert_array_equal(e, [1, 4, 3]) e[2] = 7 assert_array_equal(d, [1, 4, 7]) def test_array_copy_true(self): d = np.array([[1,2,3], [1, 2, 3]]) e = np.array(d, copy=True) d[0, 1] = 3 e[0, 2] = -7 assert_array_equal(e, [[1, 2, -7], [1, 2, 3]]) assert_array_equal(d, [[1, 3, 3], [1, 2, 3]]) e = np.array(d, copy=True, order='F') d[0, 1] = 5 e[0, 2] = 7 assert_array_equal(e, [[1, 3, 7], [1, 2, 3]]) assert_array_equal(d, [[1, 5, 3], [1,2,3]]) def test_array_cont(self): d = np.ones(10)[::2] assert_(np.ascontiguousarray(d).flags.c_contiguous) assert_(np.ascontiguousarray(d).flags.f_contiguous) assert_(np.asfortranarray(d).flags.c_contiguous) assert_(np.asfortranarray(d).flags.f_contiguous) d = np.ones((10, 10))[::2,::2] assert_(np.ascontiguousarray(d).flags.c_contiguous) assert_(np.asfortranarray(d).flags.f_contiguous) class TestAssignment(TestCase): def test_assignment_broadcasting(self): a = np.arange(6).reshape(2, 3) # Broadcasting the input to the output a[...] = np.arange(3) assert_equal(a, [[0, 1, 2], [0, 1, 2]]) a[...] = np.arange(2).reshape(2, 1) assert_equal(a, [[0, 0, 0], [1, 1, 1]]) # For compatibility with <= 1.5, a limited version of broadcasting # the output to the input. # # This behavior is inconsistent with NumPy broadcasting # in general, because it only uses one of the two broadcasting # rules (adding a new "1" dimension to the left of the shape), # applied to the output instead of an input. In NumPy 2.0, this kind # of broadcasting assignment will likely be disallowed. a[...] = np.arange(6)[::-1].reshape(1, 2, 3) assert_equal(a, [[5, 4, 3], [2, 1, 0]]) # The other type of broadcasting would require a reduction operation. def assign(a, b): a[...] = b assert_raises(ValueError, assign, a, np.arange(12).reshape(2, 2, 3)) def test_assignment_errors(self): # Address issue #2276 class C: pass a = np.zeros(1) def assign(v): a[0] = v assert_raises((AttributeError, TypeError), assign, C()) assert_raises(ValueError, assign, [1]) class TestDtypedescr(TestCase): def test_construction(self): d1 = np.dtype('i4') assert_equal(d1, np.dtype(np.int32)) d2 = np.dtype('f8') assert_equal(d2, np.dtype(np.float64)) def test_byteorders(self): self.assertNotEqual(np.dtype('<i4'), np.dtype('>i4')) self.assertNotEqual(np.dtype([('a', '<i4')]), np.dtype([('a', '>i4')])) class TestZeroRank(TestCase): def setUp(self): self.d = np.array(0), np.array('x', object) def test_ellipsis_subscript(self): a, b = self.d self.assertEqual(a[...], 0) self.assertEqual(b[...], 'x') self.assertTrue(a[...].base is a) # `a[...] is a` in numpy <1.9. self.assertTrue(b[...].base is b) # `b[...] is b` in numpy <1.9. def test_empty_subscript(self): a, b = self.d self.assertEqual(a[()], 0) self.assertEqual(b[()], 'x') self.assertTrue(type(a[()]) is a.dtype.type) self.assertTrue(type(b[()]) is str) def test_invalid_subscript(self): a, b = self.d self.assertRaises(IndexError, lambda x: x[0], a) self.assertRaises(IndexError, lambda x: x[0], b) self.assertRaises(IndexError, lambda x: x[np.array([], int)], a) self.assertRaises(IndexError, lambda x: x[np.array([], int)], b) def test_ellipsis_subscript_assignment(self): a, b = self.d a[...] = 42 self.assertEqual(a, 42) b[...] = '' self.assertEqual(b.item(), '') def test_empty_subscript_assignment(self): a, b = self.d a[()] = 42 self.assertEqual(a, 42) b[()] = '' self.assertEqual(b.item(), '') def test_invalid_subscript_assignment(self): a, b = self.d def assign(x, i, v): x[i] = v self.assertRaises(IndexError, assign, a, 0, 42) self.assertRaises(IndexError, assign, b, 0, '') self.assertRaises(ValueError, assign, a, (), '') def test_newaxis(self): a, b = self.d self.assertEqual(a[np.newaxis].shape, (1,)) self.assertEqual(a[..., np.newaxis].shape, (1,)) self.assertEqual(a[np.newaxis, ...].shape, (1,)) self.assertEqual(a[..., np.newaxis].shape, (1,)) self.assertEqual(a[np.newaxis, ..., np.newaxis].shape, (1, 1)) self.assertEqual(a[..., np.newaxis, np.newaxis].shape, (1, 1)) self.assertEqual(a[np.newaxis, np.newaxis, ...].shape, (1, 1)) self.assertEqual(a[(np.newaxis,)*10].shape, (1,)*10) def test_invalid_newaxis(self): a, b = self.d def subscript(x, i): x[i] self.assertRaises(IndexError, subscript, a, (np.newaxis, 0)) self.assertRaises(IndexError, subscript, a, (np.newaxis,)*50) def test_constructor(self): x = np.ndarray(()) x[()] = 5 self.assertEqual(x[()], 5) y = np.ndarray((), buffer=x) y[()] = 6 self.assertEqual(x[()], 6) def test_output(self): x = np.array(2) self.assertRaises(ValueError, np.add, x, [1], x) class TestScalarIndexing(TestCase): def setUp(self): self.d = np.array([0, 1])[0] def test_ellipsis_subscript(self): a = self.d self.assertEqual(a[...], 0) self.assertEqual(a[...].shape, ()) def test_empty_subscript(self): a = self.d self.assertEqual(a[()], 0) self.assertEqual(a[()].shape, ()) def test_invalid_subscript(self): a = self.d self.assertRaises(IndexError, lambda x: x[0], a) self.assertRaises(IndexError, lambda x: x[np.array([], int)], a) def test_invalid_subscript_assignment(self): a = self.d def assign(x, i, v): x[i] = v self.assertRaises(TypeError, assign, a, 0, 42) def test_newaxis(self): a = self.d self.assertEqual(a[np.newaxis].shape, (1,)) self.assertEqual(a[..., np.newaxis].shape, (1,)) self.assertEqual(a[np.newaxis, ...].shape, (1,)) self.assertEqual(a[..., np.newaxis].shape, (1,)) self.assertEqual(a[np.newaxis, ..., np.newaxis].shape, (1, 1)) self.assertEqual(a[..., np.newaxis, np.newaxis].shape, (1, 1)) self.assertEqual(a[np.newaxis, np.newaxis, ...].shape, (1, 1)) self.assertEqual(a[(np.newaxis,)*10].shape, (1,)*10) def test_invalid_newaxis(self): a = self.d def subscript(x, i): x[i] self.assertRaises(IndexError, subscript, a, (np.newaxis, 0)) self.assertRaises(IndexError, subscript, a, (np.newaxis,)*50) def test_overlapping_assignment(self): # With positive strides a = np.arange(4) a[:-1] = a[1:] assert_equal(a, [1, 2, 3, 3]) a = np.arange(4) a[1:] = a[:-1] assert_equal(a, [0, 0, 1, 2]) # With positive and negative strides a = np.arange(4) a[:] = a[::-1] assert_equal(a, [3, 2, 1, 0]) a = np.arange(6).reshape(2, 3) a[::-1,:] = a[:, ::-1] assert_equal(a, [[5, 4, 3], [2, 1, 0]]) a = np.arange(6).reshape(2, 3) a[::-1, ::-1] = a[:, ::-1] assert_equal(a, [[3, 4, 5], [0, 1, 2]]) # With just one element overlapping a = np.arange(5) a[:3] = a[2:] assert_equal(a, [2, 3, 4, 3, 4]) a = np.arange(5) a[2:] = a[:3] assert_equal(a, [0, 1, 0, 1, 2]) a = np.arange(5) a[2::-1] = a[2:] assert_equal(a, [4, 3, 2, 3, 4]) a = np.arange(5) a[2:] = a[2::-1] assert_equal(a, [0, 1, 2, 1, 0]) a = np.arange(5) a[2::-1] = a[:1:-1] assert_equal(a, [2, 3, 4, 3, 4]) a = np.arange(5) a[:1:-1] = a[2::-1] assert_equal(a, [0, 1, 0, 1, 2]) class TestCreation(TestCase): def test_from_attribute(self): class x(object): def __array__(self, dtype=None): pass self.assertRaises(ValueError, np.array, x()) def test_from_string(self): types = np.typecodes['AllInteger'] + np.typecodes['Float'] nstr = ['123', '123'] result = np.array([123, 123], dtype=int) for type in types: msg = 'String conversion for %s' % type assert_equal(np.array(nstr, dtype=type), result, err_msg=msg) def test_void(self): arr = np.array([], dtype='V') assert_equal(arr.dtype.kind, 'V') def test_zeros(self): types = np.typecodes['AllInteger'] + np.typecodes['AllFloat'] for dt in types: d = np.zeros((13,), dtype=dt) assert_equal(np.count_nonzero(d), 0) # true for ieee floats assert_equal(d.sum(), 0) assert_(not d.any()) d = np.zeros(2, dtype='(2,4)i4') assert_equal(np.count_nonzero(d), 0) assert_equal(d.sum(), 0) assert_(not d.any()) d = np.zeros(2, dtype='4i4') assert_equal(np.count_nonzero(d), 0) assert_equal(d.sum(), 0) assert_(not d.any()) d = np.zeros(2, dtype='(2,4)i4, (2,4)i4') assert_equal(np.count_nonzero(d), 0) @dec.slow def test_zeros_big(self): # test big array as they might be allocated different by the sytem types = np.typecodes['AllInteger'] + np.typecodes['AllFloat'] for dt in types: d = np.zeros((30 * 1024**2,), dtype=dt) assert_(not d.any()) def test_zeros_obj(self): # test initialization from PyLong(0) d = np.zeros((13,), dtype=object) assert_array_equal(d, [0] * 13) assert_equal(np.count_nonzero(d), 0) def test_zeros_obj_obj(self): d = np.zeros(10, dtype=[('k', object, 2)]) assert_array_equal(d['k'], 0) def test_zeros_like_like_zeros(self): # test zeros_like returns the same as zeros for c in np.typecodes['All']: if c == 'V': continue d = np.zeros((3,3), dtype=c) assert_array_equal(np.zeros_like(d), d) assert_equal(np.zeros_like(d).dtype, d.dtype) # explicitly check some special cases d = np.zeros((3,3), dtype='S5') assert_array_equal(np.zeros_like(d), d) assert_equal(np.zeros_like(d).dtype, d.dtype) d = np.zeros((3,3), dtype='U5') assert_array_equal(np.zeros_like(d), d) assert_equal(np.zeros_like(d).dtype, d.dtype) d = np.zeros((3,3), dtype='<i4') assert_array_equal(np.zeros_like(d), d) assert_equal(np.zeros_like(d).dtype, d.dtype) d = np.zeros((3,3), dtype='>i4') assert_array_equal(np.zeros_like(d), d) assert_equal(np.zeros_like(d).dtype, d.dtype) d = np.zeros((3,3), dtype='<M8[s]') assert_array_equal(np.zeros_like(d), d) assert_equal(np.zeros_like(d).dtype, d.dtype) d = np.zeros((3,3), dtype='>M8[s]') assert_array_equal(np.zeros_like(d), d) assert_equal(np.zeros_like(d).dtype, d.dtype) d = np.zeros((3,3), dtype='f4,f4') assert_array_equal(np.zeros_like(d), d) assert_equal(np.zeros_like(d).dtype, d.dtype) def test_empty_unicode(self): # don't throw decode errors on garbage memory for i in range(5, 100, 5): d = np.empty(i, dtype='U') str(d) def test_sequence_non_homogenous(self): assert_equal(np.array([4, 2**80]).dtype, np.object) assert_equal(np.array([4, 2**80, 4]).dtype, np.object) assert_equal(np.array([2**80, 4]).dtype, np.object) assert_equal(np.array([2**80] * 3).dtype, np.object) assert_equal(np.array([[1, 1],[1j, 1j]]).dtype, np.complex) assert_equal(np.array([[1j, 1j],[1, 1]]).dtype, np.complex) assert_equal(np.array([[1, 1, 1],[1, 1j, 1.], [1, 1, 1]]).dtype, np.complex) @dec.skipif(sys.version_info[0] >= 3) def test_sequence_long(self): assert_equal(np.array([long(4), long(4)]).dtype, np.long) assert_equal(np.array([long(4), 2**80]).dtype, np.object) assert_equal(np.array([long(4), 2**80, long(4)]).dtype, np.object) assert_equal(np.array([2**80, long(4)]).dtype, np.object) def test_non_sequence_sequence(self): """Should not segfault. Class Fail breaks the sequence protocol for new style classes, i.e., those derived from object. Class Map is a mapping type indicated by raising a ValueError. At some point we may raise a warning instead of an error in the Fail case. """ class Fail(object): def __len__(self): return 1 def __getitem__(self, index): raise ValueError() class Map(object): def __len__(self): return 1 def __getitem__(self, index): raise KeyError() a = np.array([Map()]) assert_(a.shape == (1,)) assert_(a.dtype == np.dtype(object)) assert_raises(ValueError, np.array, [Fail()]) def test_no_len_object_type(self): # gh-5100, want object array from iterable object without len() class Point2: def __init__(self): pass def __getitem__(self, ind): if ind in [0, 1]: return ind else: raise IndexError() d = np.array([Point2(), Point2(), Point2()]) assert_equal(d.dtype, np.dtype(object)) class TestStructured(TestCase): def test_subarray_field_access(self): a = np.zeros((3, 5), dtype=[('a', ('i4', (2, 2)))]) a['a'] = np.arange(60).reshape(3, 5, 2, 2) # Since the subarray is always in C-order, a transpose # does not swap the subarray: assert_array_equal(a.T['a'], a['a'].transpose(1, 0, 2, 3)) # In Fortran order, the subarray gets appended # like in all other cases, not prepended as a special case b = a.copy(order='F') assert_equal(a['a'].shape, b['a'].shape) assert_equal(a.T['a'].shape, a.T.copy()['a'].shape) def test_subarray_comparison(self): # Check that comparisons between record arrays with # multi-dimensional field types work properly a = np.rec.fromrecords( [([1, 2, 3], 'a', [[1, 2], [3, 4]]), ([3, 3, 3], 'b', [[0, 0], [0, 0]])], dtype=[('a', ('f4', 3)), ('b', np.object), ('c', ('i4', (2, 2)))]) b = a.copy() assert_equal(a == b, [True, True]) assert_equal(a != b, [False, False]) b[1].b = 'c' assert_equal(a == b, [True, False]) assert_equal(a != b, [False, True]) for i in range(3): b[0].a = a[0].a b[0].a[i] = 5 assert_equal(a == b, [False, False]) assert_equal(a != b, [True, True]) for i in range(2): for j in range(2): b = a.copy() b[0].c[i, j] = 10 assert_equal(a == b, [False, True]) assert_equal(a != b, [True, False]) # Check that broadcasting with a subarray works a = np.array([[(0,)], [(1,)]], dtype=[('a', 'f8')]) b = np.array([(0,), (0,), (1,)], dtype=[('a', 'f8')]) assert_equal(a == b, [[True, True, False], [False, False, True]]) assert_equal(b == a, [[True, True, False], [False, False, True]]) a = np.array([[(0,)], [(1,)]], dtype=[('a', 'f8', (1,))]) b = np.array([(0,), (0,), (1,)], dtype=[('a', 'f8', (1,))]) assert_equal(a == b, [[True, True, False], [False, False, True]]) assert_equal(b == a, [[True, True, False], [False, False, True]]) a = np.array([[([0, 0],)], [([1, 1],)]], dtype=[('a', 'f8', (2,))]) b = np.array([([0, 0],), ([0, 1],), ([1, 1],)], dtype=[('a', 'f8', (2,))]) assert_equal(a == b, [[True, False, False], [False, False, True]]) assert_equal(b == a, [[True, False, False], [False, False, True]]) # Check that broadcasting Fortran-style arrays with a subarray work a = np.array([[([0, 0],)], [([1, 1],)]], dtype=[('a', 'f8', (2,))], order='F') b = np.array([([0, 0],), ([0, 1],), ([1, 1],)], dtype=[('a', 'f8', (2,))]) assert_equal(a == b, [[True, False, False], [False, False, True]]) assert_equal(b == a, [[True, False, False], [False, False, True]]) # Check that incompatible sub-array shapes don't result to broadcasting x = np.zeros((1,), dtype=[('a', ('f4', (1, 2))), ('b', 'i1')]) y = np.zeros((1,), dtype=[('a', ('f4', (2,))), ('b', 'i1')]) # This comparison invokes deprecated behaviour, and will probably # start raising an error eventually. What we really care about in this # test is just that it doesn't return True. with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=DeprecationWarning) assert_equal(x == y, False) x = np.zeros((1,), dtype=[('a', ('f4', (2, 1))), ('b', 'i1')]) y = np.zeros((1,), dtype=[('a', ('f4', (2,))), ('b', 'i1')]) # This comparison invokes deprecated behaviour, and will probably # start raising an error eventually. What we really care about in this # test is just that it doesn't return True. with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=DeprecationWarning) assert_equal(x == y, False) # Check that structured arrays that are different only in # byte-order work a = np.array([(5, 42), (10, 1)], dtype=[('a', '>i8'), ('b', '<f8')]) b = np.array([(5, 43), (10, 1)], dtype=[('a', '<i8'), ('b', '>f8')]) assert_equal(a == b, [False, True]) def test_casting(self): # Check that casting a structured array to change its byte order # works a = np.array([(1,)], dtype=[('a', '<i4')]) assert_(np.can_cast(a.dtype, [('a', '>i4')], casting='unsafe')) b = a.astype([('a', '>i4')]) assert_equal(b, a.byteswap().newbyteorder()) assert_equal(a['a'][0], b['a'][0]) # Check that equality comparison works on structured arrays if # they are 'equiv'-castable a = np.array([(5, 42), (10, 1)], dtype=[('a', '>i4'), ('b', '<f8')]) b = np.array([(42, 5), (1, 10)], dtype=[('b', '>f8'), ('a', '<i4')]) assert_(np.can_cast(a.dtype, b.dtype, casting='equiv')) assert_equal(a == b, [True, True]) # Check that 'equiv' casting can reorder fields and change byte # order assert_(np.can_cast(a.dtype, b.dtype, casting='equiv')) c = a.astype(b.dtype, casting='equiv') assert_equal(a == c, [True, True]) # Check that 'safe' casting can change byte order and up-cast # fields t = [('a', '<i8'), ('b', '>f8')] assert_(np.can_cast(a.dtype, t, casting='safe')) c = a.astype(t, casting='safe') assert_equal((c == np.array([(5, 42), (10, 1)], dtype=t)), [True, True]) # Check that 'same_kind' casting can change byte order and # change field widths within a "kind" t = [('a', '<i4'), ('b', '>f4')] assert_(np.can_cast(a.dtype, t, casting='same_kind')) c = a.astype(t, casting='same_kind') assert_equal((c == np.array([(5, 42), (10, 1)], dtype=t)), [True, True]) # Check that casting fails if the casting rule should fail on # any of the fields t = [('a', '>i8'), ('b', '<f4')] assert_(not np.can_cast(a.dtype, t, casting='safe')) assert_raises(TypeError, a.astype, t, casting='safe') t = [('a', '>i2'), ('b', '<f8')] assert_(not np.can_cast(a.dtype, t, casting='equiv')) assert_raises(TypeError, a.astype, t, casting='equiv') t = [('a', '>i8'), ('b', '<i2')] assert_(not np.can_cast(a.dtype, t, casting='same_kind')) assert_raises(TypeError, a.astype, t, casting='same_kind') assert_(not np.can_cast(a.dtype, b.dtype, casting='no')) assert_raises(TypeError, a.astype, b.dtype, casting='no') # Check that non-'unsafe' casting can't change the set of field names for casting in ['no', 'safe', 'equiv', 'same_kind']: t = [('a', '>i4')] assert_(not np.can_cast(a.dtype, t, casting=casting)) t = [('a', '>i4'), ('b', '<f8'), ('c', 'i4')] assert_(not np.can_cast(a.dtype, t, casting=casting)) def test_objview(self): # https://github.com/numpy/numpy/issues/3286 a = np.array([], dtype=[('a', 'f'), ('b', 'f'), ('c', 'O')]) a[['a', 'b']] # TypeError? # https://github.com/numpy/numpy/issues/3253 dat2 = np.zeros(3, [('A', 'i'), ('B', '|O')]) dat2[['B', 'A']] # TypeError? def test_setfield(self): # https://github.com/numpy/numpy/issues/3126 struct_dt = np.dtype([('elem', 'i4', 5),]) dt = np.dtype([('field', 'i4', 10),('struct', struct_dt)]) x = np.zeros(1, dt) x[0]['field'] = np.ones(10, dtype='i4') x[0]['struct'] = np.ones(1, dtype=struct_dt) assert_equal(x[0]['field'], np.ones(10, dtype='i4')) def test_setfield_object(self): # make sure object field assignment with ndarray value # on void scalar mimics setitem behavior b = np.zeros(1, dtype=[('x', 'O')]) # next line should work identically to b['x'][0] = np.arange(3) b[0]['x'] = np.arange(3) assert_equal(b[0]['x'], np.arange(3)) #check that broadcasting check still works c = np.zeros(1, dtype=[('x', 'O', 5)]) def testassign(): c[0]['x'] = np.arange(3) assert_raises(ValueError, testassign) class TestBool(TestCase): def test_test_interning(self): a0 = np.bool_(0) b0 = np.bool_(False) self.assertTrue(a0 is b0) a1 = np.bool_(1) b1 = np.bool_(True) self.assertTrue(a1 is b1) self.assertTrue(np.array([True])[0] is a1) self.assertTrue(np.array(True)[()] is a1) def test_sum(self): d = np.ones(101, dtype=np.bool) assert_equal(d.sum(), d.size) assert_equal(d[::2].sum(), d[::2].size) assert_equal(d[::-2].sum(), d[::-2].size) d = np.frombuffer(b'\xff\xff' * 100, dtype=bool) assert_equal(d.sum(), d.size) assert_equal(d[::2].sum(), d[::2].size) assert_equal(d[::-2].sum(), d[::-2].size) def check_count_nonzero(self, power, length): powers = [2 ** i for i in range(length)] for i in range(2**power): l = [(i & x) != 0 for x in powers] a = np.array(l, dtype=np.bool) c = builtins.sum(l) self.assertEqual(np.count_nonzero(a), c) av = a.view(np.uint8) av *= 3 self.assertEqual(np.count_nonzero(a), c) av *= 4 self.assertEqual(np.count_nonzero(a), c) av[av != 0] = 0xFF self.assertEqual(np.count_nonzero(a), c) def test_count_nonzero(self): # check all 12 bit combinations in a length 17 array # covers most cases of the 16 byte unrolled code self.check_count_nonzero(12, 17) @dec.slow def test_count_nonzero_all(self): # check all combinations in a length 17 array # covers all cases of the 16 byte unrolled code self.check_count_nonzero(17, 17) def test_count_nonzero_unaligned(self): # prevent mistakes as e.g. gh-4060 for o in range(7): a = np.zeros((18,), dtype=np.bool)[o+1:] a[:o] = True self.assertEqual(np.count_nonzero(a), builtins.sum(a.tolist())) a = np.ones((18,), dtype=np.bool)[o+1:] a[:o] = False self.assertEqual(np.count_nonzero(a), builtins.sum(a.tolist())) class TestMethods(TestCase): def test_round(self): def check_round(arr, expected, *round_args): assert_equal(arr.round(*round_args), expected) # With output array out = np.zeros_like(arr) res = arr.round(*round_args, out=out) assert_equal(out, expected) assert_equal(out, res) check_round(np.array([1.2, 1.5]), [1, 2]) check_round(np.array(1.5), 2) check_round(np.array([12.2, 15.5]), [10, 20], -1) check_round(np.array([12.15, 15.51]), [12.2, 15.5], 1) # Complex rounding check_round(np.array([4.5 + 1.5j]), [4 + 2j]) check_round(np.array([12.5 + 15.5j]), [10 + 20j], -1) def test_transpose(self): a = np.array([[1, 2], [3, 4]]) assert_equal(a.transpose(), [[1, 3], [2, 4]]) self.assertRaises(ValueError, lambda: a.transpose(0)) self.assertRaises(ValueError, lambda: a.transpose(0, 0)) self.assertRaises(ValueError, lambda: a.transpose(0, 1, 2)) def test_sort(self): # test ordering for floats and complex containing nans. It is only # necessary to check the lessthan comparison, so sorts that # only follow the insertion sort path are sufficient. We only # test doubles and complex doubles as the logic is the same. # check doubles msg = "Test real sort order with nans" a = np.array([np.nan, 1, 0]) b = np.sort(a) assert_equal(b, a[::-1], msg) # check complex msg = "Test complex sort order with nans" a = np.zeros(9, dtype=np.complex128) a.real += [np.nan, np.nan, np.nan, 1, 0, 1, 1, 0, 0] a.imag += [np.nan, 1, 0, np.nan, np.nan, 1, 0, 1, 0] b = np.sort(a) assert_equal(b, a[::-1], msg) # all c scalar sorts use the same code with different types # so it suffices to run a quick check with one type. The number # of sorted items must be greater than ~50 to check the actual # algorithm because quick and merge sort fall over to insertion # sort for small arrays. a = np.arange(101) b = a[::-1].copy() for kind in ['q', 'm', 'h']: msg = "scalar sort, kind=%s" % kind c = a.copy() c.sort(kind=kind) assert_equal(c, a, msg) c = b.copy() c.sort(kind=kind) assert_equal(c, a, msg) # test complex sorts. These use the same code as the scalars # but the compare function differs. ai = a*1j + 1 bi = b*1j + 1 for kind in ['q', 'm', 'h']: msg = "complex sort, real part == 1, kind=%s" % kind c = ai.copy() c.sort(kind=kind) assert_equal(c, ai, msg) c = bi.copy() c.sort(kind=kind) assert_equal(c, ai, msg) ai = a + 1j bi = b + 1j for kind in ['q', 'm', 'h']: msg = "complex sort, imag part == 1, kind=%s" % kind c = ai.copy() c.sort(kind=kind) assert_equal(c, ai, msg) c = bi.copy() c.sort(kind=kind) assert_equal(c, ai, msg) # test sorting of complex arrays requiring byte-swapping, gh-5441 for endianess in '<>': for dt in np.typecodes['Complex']: arr = np.array([1+3.j, 2+2.j, 3+1.j], dtype=endianess + dt) c = arr.copy() c.sort() msg = 'byte-swapped complex sort, dtype={0}'.format(dt) assert_equal(c, arr, msg) # test string sorts. s = 'aaaaaaaa' a = np.array([s + chr(i) for i in range(101)]) b = a[::-1].copy() for kind in ['q', 'm', 'h']: msg = "string sort, kind=%s" % kind c = a.copy() c.sort(kind=kind) assert_equal(c, a, msg) c = b.copy() c.sort(kind=kind) assert_equal(c, a, msg) # test unicode sorts. s = 'aaaaaaaa' a = np.array([s + chr(i) for i in range(101)], dtype=np.unicode) b = a[::-1].copy() for kind in ['q', 'm', 'h']: msg = "unicode sort, kind=%s" % kind c = a.copy() c.sort(kind=kind) assert_equal(c, a, msg) c = b.copy() c.sort(kind=kind) assert_equal(c, a, msg) # test object array sorts. a = np.empty((101,), dtype=np.object) a[:] = list(range(101)) b = a[::-1] for kind in ['q', 'h', 'm']: msg = "object sort, kind=%s" % kind c = a.copy() c.sort(kind=kind) assert_equal(c, a, msg) c = b.copy() c.sort(kind=kind) assert_equal(c, a, msg) # test record array sorts. dt = np.dtype([('f', float), ('i', int)]) a = np.array([(i, i) for i in range(101)], dtype=dt) b = a[::-1] for kind in ['q', 'h', 'm']: msg = "object sort, kind=%s" % kind c = a.copy() c.sort(kind=kind) assert_equal(c, a, msg) c = b.copy() c.sort(kind=kind) assert_equal(c, a, msg) # test datetime64 sorts. a = np.arange(0, 101, dtype='datetime64[D]') b = a[::-1] for kind in ['q', 'h', 'm']: msg = "datetime64 sort, kind=%s" % kind c = a.copy() c.sort(kind=kind) assert_equal(c, a, msg) c = b.copy() c.sort(kind=kind) assert_equal(c, a, msg) # test timedelta64 sorts. a = np.arange(0, 101, dtype='timedelta64[D]') b = a[::-1] for kind in ['q', 'h', 'm']: msg = "timedelta64 sort, kind=%s" % kind c = a.copy() c.sort(kind=kind) assert_equal(c, a, msg) c = b.copy() c.sort(kind=kind) assert_equal(c, a, msg) # check axis handling. This should be the same for all type # specific sorts, so we only check it for one type and one kind a = np.array([[3, 2], [1, 0]]) b = np.array([[1, 0], [3, 2]]) c = np.array([[2, 3], [0, 1]]) d = a.copy() d.sort(axis=0) assert_equal(d, b, "test sort with axis=0") d = a.copy() d.sort(axis=1) assert_equal(d, c, "test sort with axis=1") d = a.copy() d.sort() assert_equal(d, c, "test sort with default axis") # check axis handling for multidimensional empty arrays a = np.array([]) a.shape = (3, 2, 1, 0) for axis in range(-a.ndim, a.ndim): msg = 'test empty array sort with axis={0}'.format(axis) assert_equal(np.sort(a, axis=axis), a, msg) msg = 'test empty array sort with axis=None' assert_equal(np.sort(a, axis=None), a.ravel(), msg) def test_copy(self): def assert_fortran(arr): assert_(arr.flags.fortran) assert_(arr.flags.f_contiguous) assert_(not arr.flags.c_contiguous) def assert_c(arr): assert_(not arr.flags.fortran) assert_(not arr.flags.f_contiguous) assert_(arr.flags.c_contiguous) a = np.empty((2, 2), order='F') # Test copying a Fortran array assert_c(a.copy()) assert_c(a.copy('C')) assert_fortran(a.copy('F')) assert_fortran(a.copy('A')) # Now test starting with a C array. a = np.empty((2, 2), order='C') assert_c(a.copy()) assert_c(a.copy('C')) assert_fortran(a.copy('F')) assert_c(a.copy('A')) def test_sort_order(self): # Test sorting an array with fields x1 = np.array([21, 32, 14]) x2 = np.array(['my', 'first', 'name']) x3 = np.array([3.1, 4.5, 6.2]) r = np.rec.fromarrays([x1, x2, x3], names='id,word,number') r.sort(order=['id']) assert_equal(r.id, np.array([14, 21, 32])) assert_equal(r.word, np.array(['name', 'my', 'first'])) assert_equal(r.number, np.array([6.2, 3.1, 4.5])) r.sort(order=['word']) assert_equal(r.id, np.array([32, 21, 14])) assert_equal(r.word, np.array(['first', 'my', 'name'])) assert_equal(r.number, np.array([4.5, 3.1, 6.2])) r.sort(order=['number']) assert_equal(r.id, np.array([21, 32, 14])) assert_equal(r.word, np.array(['my', 'first', 'name'])) assert_equal(r.number, np.array([3.1, 4.5, 6.2])) if sys.byteorder == 'little': strtype = '>i2' else: strtype = '<i2' mydtype = [('name', strchar + '5'), ('col2', strtype)] r = np.array([('a', 1), ('b', 255), ('c', 3), ('d', 258)], dtype=mydtype) r.sort(order='col2') assert_equal(r['col2'], [1, 3, 255, 258]) assert_equal(r, np.array([('a', 1), ('c', 3), ('b', 255), ('d', 258)], dtype=mydtype)) def test_argsort(self): # all c scalar argsorts use the same code with different types # so it suffices to run a quick check with one type. The number # of sorted items must be greater than ~50 to check the actual # algorithm because quick and merge sort fall over to insertion # sort for small arrays. a = np.arange(101) b = a[::-1].copy() for kind in ['q', 'm', 'h']: msg = "scalar argsort, kind=%s" % kind assert_equal(a.copy().argsort(kind=kind), a, msg) assert_equal(b.copy().argsort(kind=kind), b, msg) # test complex argsorts. These use the same code as the scalars # but the compare fuction differs. ai = a*1j + 1 bi = b*1j + 1 for kind in ['q', 'm', 'h']: msg = "complex argsort, kind=%s" % kind assert_equal(ai.copy().argsort(kind=kind), a, msg) assert_equal(bi.copy().argsort(kind=kind), b, msg) ai = a + 1j bi = b + 1j for kind in ['q', 'm', 'h']: msg = "complex argsort, kind=%s" % kind assert_equal(ai.copy().argsort(kind=kind), a, msg) assert_equal(bi.copy().argsort(kind=kind), b, msg) # test argsort of complex arrays requiring byte-swapping, gh-5441 for endianess in '<>': for dt in np.typecodes['Complex']: arr = np.array([1+3.j, 2+2.j, 3+1.j], dtype=endianess + dt) msg = 'byte-swapped complex argsort, dtype={0}'.format(dt) assert_equal(arr.argsort(), np.arange(len(arr), dtype=np.intp), msg) # test string argsorts. s = 'aaaaaaaa' a = np.array([s + chr(i) for i in range(101)]) b = a[::-1].copy() r = np.arange(101) rr = r[::-1] for kind in ['q', 'm', 'h']: msg = "string argsort, kind=%s" % kind assert_equal(a.copy().argsort(kind=kind), r, msg) assert_equal(b.copy().argsort(kind=kind), rr, msg) # test unicode argsorts. s = 'aaaaaaaa' a = np.array([s + chr(i) for i in range(101)], dtype=np.unicode) b = a[::-1] r = np.arange(101) rr = r[::-1] for kind in ['q', 'm', 'h']: msg = "unicode argsort, kind=%s" % kind assert_equal(a.copy().argsort(kind=kind), r, msg) assert_equal(b.copy().argsort(kind=kind), rr, msg) # test object array argsorts. a = np.empty((101,), dtype=np.object) a[:] = list(range(101)) b = a[::-1] r = np.arange(101) rr = r[::-1] for kind in ['q', 'm', 'h']: msg = "object argsort, kind=%s" % kind assert_equal(a.copy().argsort(kind=kind), r, msg) assert_equal(b.copy().argsort(kind=kind), rr, msg) # test structured array argsorts. dt = np.dtype([('f', float), ('i', int)]) a = np.array([(i, i) for i in range(101)], dtype=dt) b = a[::-1] r = np.arange(101) rr = r[::-1] for kind in ['q', 'm', 'h']: msg = "structured array argsort, kind=%s" % kind assert_equal(a.copy().argsort(kind=kind), r, msg) assert_equal(b.copy().argsort(kind=kind), rr, msg) # test datetime64 argsorts. a = np.arange(0, 101, dtype='datetime64[D]') b = a[::-1] r = np.arange(101) rr = r[::-1] for kind in ['q', 'h', 'm']: msg = "datetime64 argsort, kind=%s" % kind assert_equal(a.copy().argsort(kind=kind), r, msg) assert_equal(b.copy().argsort(kind=kind), rr, msg) # test timedelta64 argsorts. a = np.arange(0, 101, dtype='timedelta64[D]') b = a[::-1] r = np.arange(101) rr = r[::-1] for kind in ['q', 'h', 'm']: msg = "timedelta64 argsort, kind=%s" % kind assert_equal(a.copy().argsort(kind=kind), r, msg) assert_equal(b.copy().argsort(kind=kind), rr, msg) # check axis handling. This should be the same for all type # specific argsorts, so we only check it for one type and one kind a = np.array([[3, 2], [1, 0]]) b = np.array([[1, 1], [0, 0]]) c = np.array([[1, 0], [1, 0]]) assert_equal(a.copy().argsort(axis=0), b) assert_equal(a.copy().argsort(axis=1), c) assert_equal(a.copy().argsort(), c) # using None is known fail at this point #assert_equal(a.copy().argsort(axis=None, c) # check axis handling for multidimensional empty arrays a = np.array([]) a.shape = (3, 2, 1, 0) for axis in range(-a.ndim, a.ndim): msg = 'test empty array argsort with axis={0}'.format(axis) assert_equal(np.argsort(a, axis=axis), np.zeros_like(a, dtype=np.intp), msg) msg = 'test empty array argsort with axis=None' assert_equal(np.argsort(a, axis=None), np.zeros_like(a.ravel(), dtype=np.intp), msg) # check that stable argsorts are stable r = np.arange(100) # scalars a = np.zeros(100) assert_equal(a.argsort(kind='m'), r) # complex a = np.zeros(100, dtype=np.complex) assert_equal(a.argsort(kind='m'), r) # string a = np.array(['aaaaaaaaa' for i in range(100)]) assert_equal(a.argsort(kind='m'), r) # unicode a = np.array(['aaaaaaaaa' for i in range(100)], dtype=np.unicode) assert_equal(a.argsort(kind='m'), r) def test_sort_unicode_kind(self): d = np.arange(10) k = b'\xc3\xa4'.decode("UTF8") assert_raises(ValueError, d.sort, kind=k) assert_raises(ValueError, d.argsort, kind=k) def test_searchsorted(self): # test for floats and complex containing nans. The logic is the # same for all float types so only test double types for now. # The search sorted routines use the compare functions for the # array type, so this checks if that is consistent with the sort # order. # check double a = np.array([0, 1, np.nan]) msg = "Test real searchsorted with nans, side='l'" b = a.searchsorted(a, side='l') assert_equal(b, np.arange(3), msg) msg = "Test real searchsorted with nans, side='r'" b = a.searchsorted(a, side='r') assert_equal(b, np.arange(1, 4), msg) # check double complex a = np.zeros(9, dtype=np.complex128) a.real += [0, 0, 1, 1, 0, 1, np.nan, np.nan, np.nan] a.imag += [0, 1, 0, 1, np.nan, np.nan, 0, 1, np.nan] msg = "Test complex searchsorted with nans, side='l'" b = a.searchsorted(a, side='l') assert_equal(b, np.arange(9), msg) msg = "Test complex searchsorted with nans, side='r'" b = a.searchsorted(a, side='r') assert_equal(b, np.arange(1, 10), msg) msg = "Test searchsorted with little endian, side='l'" a = np.array([0, 128], dtype='<i4') b = a.searchsorted(np.array(128, dtype='<i4')) assert_equal(b, 1, msg) msg = "Test searchsorted with big endian, side='l'" a = np.array([0, 128], dtype='>i4') b = a.searchsorted(np.array(128, dtype='>i4')) assert_equal(b, 1, msg) # Check 0 elements a = np.ones(0) b = a.searchsorted([0, 1, 2], 'l') assert_equal(b, [0, 0, 0]) b = a.searchsorted([0, 1, 2], 'r') assert_equal(b, [0, 0, 0]) a = np.ones(1) # Check 1 element b = a.searchsorted([0, 1, 2], 'l') assert_equal(b, [0, 0, 1]) b = a.searchsorted([0, 1, 2], 'r') assert_equal(b, [0, 1, 1]) # Check all elements equal a = np.ones(2) b = a.searchsorted([0, 1, 2], 'l') assert_equal(b, [0, 0, 2]) b = a.searchsorted([0, 1, 2], 'r') assert_equal(b, [0, 2, 2]) # Test searching unaligned array a = np.arange(10) aligned = np.empty(a.itemsize * a.size + 1, 'uint8') unaligned = aligned[1:].view(a.dtype) unaligned[:] = a # Test searching unaligned array b = unaligned.searchsorted(a, 'l') assert_equal(b, a) b = unaligned.searchsorted(a, 'r') assert_equal(b, a + 1) # Test searching for unaligned keys b = a.searchsorted(unaligned, 'l') assert_equal(b, a) b = a.searchsorted(unaligned, 'r') assert_equal(b, a + 1) # Test smart resetting of binsearch indices a = np.arange(5) b = a.searchsorted([6, 5, 4], 'l') assert_equal(b, [5, 5, 4]) b = a.searchsorted([6, 5, 4], 'r') assert_equal(b, [5, 5, 5]) # Test all type specific binary search functions types = ''.join((np.typecodes['AllInteger'], np.typecodes['AllFloat'], np.typecodes['Datetime'], '?O')) for dt in types: if dt == 'M': dt = 'M8[D]' if dt == '?': a = np.arange(2, dtype=dt) out = np.arange(2) else: a = np.arange(0, 5, dtype=dt) out = np.arange(5) b = a.searchsorted(a, 'l') assert_equal(b, out) b = a.searchsorted(a, 'r') assert_equal(b, out + 1) def test_searchsorted_unicode(self): # Test searchsorted on unicode strings. # 1.6.1 contained a string length miscalculation in # arraytypes.c.src:UNICODE_compare() which manifested as # incorrect/inconsistent results from searchsorted. a = np.array(['P:\\20x_dapi_cy3\\20x_dapi_cy3_20100185_1', 'P:\\20x_dapi_cy3\\20x_dapi_cy3_20100186_1', 'P:\\20x_dapi_cy3\\20x_dapi_cy3_20100187_1', 'P:\\20x_dapi_cy3\\20x_dapi_cy3_20100189_1', 'P:\\20x_dapi_cy3\\20x_dapi_cy3_20100190_1', 'P:\\20x_dapi_cy3\\20x_dapi_cy3_20100191_1', 'P:\\20x_dapi_cy3\\20x_dapi_cy3_20100192_1', 'P:\\20x_dapi_cy3\\20x_dapi_cy3_20100193_1', 'P:\\20x_dapi_cy3\\20x_dapi_cy3_20100194_1', 'P:\\20x_dapi_cy3\\20x_dapi_cy3_20100195_1', 'P:\\20x_dapi_cy3\\20x_dapi_cy3_20100196_1', 'P:\\20x_dapi_cy3\\20x_dapi_cy3_20100197_1', 'P:\\20x_dapi_cy3\\20x_dapi_cy3_20100198_1', 'P:\\20x_dapi_cy3\\20x_dapi_cy3_20100199_1'], dtype=np.unicode) ind = np.arange(len(a)) assert_equal([a.searchsorted(v, 'left') for v in a], ind) assert_equal([a.searchsorted(v, 'right') for v in a], ind + 1) assert_equal([a.searchsorted(a[i], 'left') for i in ind], ind) assert_equal([a.searchsorted(a[i], 'right') for i in ind], ind + 1) def test_searchsorted_with_sorter(self): a = np.array([5, 2, 1, 3, 4]) s = np.argsort(a) assert_raises(TypeError, np.searchsorted, a, 0, sorter=(1, (2, 3))) assert_raises(TypeError, np.searchsorted, a, 0, sorter=[1.1]) assert_raises(ValueError, np.searchsorted, a, 0, sorter=[1, 2, 3, 4]) assert_raises(ValueError, np.searchsorted, a, 0, sorter=[1, 2, 3, 4, 5, 6]) # bounds check assert_raises(ValueError, np.searchsorted, a, 4, sorter=[0, 1, 2, 3, 5]) assert_raises(ValueError, np.searchsorted, a, 0, sorter=[-1, 0, 1, 2, 3]) assert_raises(ValueError, np.searchsorted, a, 0, sorter=[4, 0, -1, 2, 3]) a = np.random.rand(300) s = a.argsort() b = np.sort(a) k = np.linspace(0, 1, 20) assert_equal(b.searchsorted(k), a.searchsorted(k, sorter=s)) a = np.array([0, 1, 2, 3, 5]*20) s = a.argsort() k = [0, 1, 2, 3, 5] expected = [0, 20, 40, 60, 80] assert_equal(a.searchsorted(k, side='l', sorter=s), expected) expected = [20, 40, 60, 80, 100] assert_equal(a.searchsorted(k, side='r', sorter=s), expected) # Test searching unaligned array keys = np.arange(10) a = keys.copy() np.random.shuffle(s) s = a.argsort() aligned = np.empty(a.itemsize * a.size + 1, 'uint8') unaligned = aligned[1:].view(a.dtype) # Test searching unaligned array unaligned[:] = a b = unaligned.searchsorted(keys, 'l', s) assert_equal(b, keys) b = unaligned.searchsorted(keys, 'r', s) assert_equal(b, keys + 1) # Test searching for unaligned keys unaligned[:] = keys b = a.searchsorted(unaligned, 'l', s) assert_equal(b, keys) b = a.searchsorted(unaligned, 'r', s) assert_equal(b, keys + 1) # Test all type specific indirect binary search functions types = ''.join((np.typecodes['AllInteger'], np.typecodes['AllFloat'], np.typecodes['Datetime'], '?O')) for dt in types: if dt == 'M': dt = 'M8[D]' if dt == '?': a = np.array([1, 0], dtype=dt) # We want the sorter array to be of a type that is different # from np.intp in all platforms, to check for #4698 s = np.array([1, 0], dtype=np.int16) out = np.array([1, 0]) else: a = np.array([3, 4, 1, 2, 0], dtype=dt) # We want the sorter array to be of a type that is different # from np.intp in all platforms, to check for #4698 s = np.array([4, 2, 3, 0, 1], dtype=np.int16) out = np.array([3, 4, 1, 2, 0], dtype=np.intp) b = a.searchsorted(a, 'l', s) assert_equal(b, out) b = a.searchsorted(a, 'r', s) assert_equal(b, out + 1) # Test non-contiguous sorter array a = np.array([3, 4, 1, 2, 0]) srt = np.empty((10,), dtype=np.intp) srt[1::2] = -1 srt[::2] = [4, 2, 3, 0, 1] s = srt[::2] out = np.array([3, 4, 1, 2, 0], dtype=np.intp) b = a.searchsorted(a, 'l', s) assert_equal(b, out) b = a.searchsorted(a, 'r', s) assert_equal(b, out + 1) def test_argpartition_out_of_range(self): # Test out of range values in kth raise an error, gh-5469 d = np.arange(10) assert_raises(ValueError, d.argpartition, 10) assert_raises(ValueError, d.argpartition, -11) # Test also for generic type argpartition, which uses sorting # and used to not bound check kth d_obj = np.arange(10, dtype=object) assert_raises(ValueError, d_obj.argpartition, 10) assert_raises(ValueError, d_obj.argpartition, -11) def test_partition_out_of_range(self): # Test out of range values in kth raise an error, gh-5469 d = np.arange(10) assert_raises(ValueError, d.partition, 10) assert_raises(ValueError, d.partition, -11) # Test also for generic type partition, which uses sorting # and used to not bound check kth d_obj = np.arange(10, dtype=object) assert_raises(ValueError, d_obj.partition, 10) assert_raises(ValueError, d_obj.partition, -11) def test_partition_empty_array(self): # check axis handling for multidimensional empty arrays a = np.array([]) a.shape = (3, 2, 1, 0) for axis in range(-a.ndim, a.ndim): msg = 'test empty array partition with axis={0}'.format(axis) assert_equal(np.partition(a, 0, axis=axis), a, msg) msg = 'test empty array partition with axis=None' assert_equal(np.partition(a, 0, axis=None), a.ravel(), msg) def test_argpartition_empty_array(self): # check axis handling for multidimensional empty arrays a = np.array([]) a.shape = (3, 2, 1, 0) for axis in range(-a.ndim, a.ndim): msg = 'test empty array argpartition with axis={0}'.format(axis) assert_equal(np.partition(a, 0, axis=axis), np.zeros_like(a, dtype=np.intp), msg) msg = 'test empty array argpartition with axis=None' assert_equal(np.partition(a, 0, axis=None), np.zeros_like(a.ravel(), dtype=np.intp), msg) def test_partition(self): d = np.arange(10) assert_raises(TypeError, np.partition, d, 2, kind=1) assert_raises(ValueError, np.partition, d, 2, kind="nonsense") assert_raises(ValueError, np.argpartition, d, 2, kind="nonsense") assert_raises(ValueError, d.partition, 2, axis=0, kind="nonsense") assert_raises(ValueError, d.argpartition, 2, axis=0, kind="nonsense") for k in ("introselect",): d = np.array([]) assert_array_equal(np.partition(d, 0, kind=k), d) assert_array_equal(np.argpartition(d, 0, kind=k), d) d = np.ones((1)) assert_array_equal(np.partition(d, 0, kind=k)[0], d) assert_array_equal(d[np.argpartition(d, 0, kind=k)], np.partition(d, 0, kind=k)) # kth not modified kth = np.array([30, 15, 5]) okth = kth.copy() np.partition(np.arange(40), kth) assert_array_equal(kth, okth) for r in ([2, 1], [1, 2], [1, 1]): d = np.array(r) tgt = np.sort(d) assert_array_equal(np.partition(d, 0, kind=k)[0], tgt[0]) assert_array_equal(np.partition(d, 1, kind=k)[1], tgt[1]) assert_array_equal(d[np.argpartition(d, 0, kind=k)], np.partition(d, 0, kind=k)) assert_array_equal(d[np.argpartition(d, 1, kind=k)], np.partition(d, 1, kind=k)) for i in range(d.size): d[i:].partition(0, kind=k) assert_array_equal(d, tgt) for r in ([3, 2, 1], [1, 2, 3], [2, 1, 3], [2, 3, 1], [1, 1, 1], [1, 2, 2], [2, 2, 1], [1, 2, 1]): d = np.array(r) tgt = np.sort(d) assert_array_equal(np.partition(d, 0, kind=k)[0], tgt[0]) assert_array_equal(np.partition(d, 1, kind=k)[1], tgt[1]) assert_array_equal(np.partition(d, 2, kind=k)[2], tgt[2]) assert_array_equal(d[np.argpartition(d, 0, kind=k)], np.partition(d, 0, kind=k)) assert_array_equal(d[np.argpartition(d, 1, kind=k)], np.partition(d, 1, kind=k)) assert_array_equal(d[np.argpartition(d, 2, kind=k)], np.partition(d, 2, kind=k)) for i in range(d.size): d[i:].partition(0, kind=k) assert_array_equal(d, tgt) d = np.ones((50)) assert_array_equal(np.partition(d, 0, kind=k), d) assert_array_equal(d[np.argpartition(d, 0, kind=k)], np.partition(d, 0, kind=k)) # sorted d = np.arange((49)) self.assertEqual(np.partition(d, 5, kind=k)[5], 5) self.assertEqual(np.partition(d, 15, kind=k)[15], 15) assert_array_equal(d[np.argpartition(d, 5, kind=k)], np.partition(d, 5, kind=k)) assert_array_equal(d[np.argpartition(d, 15, kind=k)], np.partition(d, 15, kind=k)) # rsorted d = np.arange((47))[::-1] self.assertEqual(np.partition(d, 6, kind=k)[6], 6) self.assertEqual(np.partition(d, 16, kind=k)[16], 16) assert_array_equal(d[np.argpartition(d, 6, kind=k)], np.partition(d, 6, kind=k)) assert_array_equal(d[np.argpartition(d, 16, kind=k)], np.partition(d, 16, kind=k)) assert_array_equal(np.partition(d, -6, kind=k), np.partition(d, 41, kind=k)) assert_array_equal(np.partition(d, -16, kind=k), np.partition(d, 31, kind=k)) assert_array_equal(d[np.argpartition(d, -6, kind=k)], np.partition(d, 41, kind=k)) # median of 3 killer, O(n^2) on pure median 3 pivot quickselect # exercises the median of median of 5 code used to keep O(n) d = np.arange(1000000) x = np.roll(d, d.size // 2) mid = x.size // 2 + 1 assert_equal(np.partition(x, mid)[mid], mid) d = np.arange(1000001) x = np.roll(d, d.size // 2 + 1) mid = x.size // 2 + 1 assert_equal(np.partition(x, mid)[mid], mid) # max d = np.ones(10) d[1] = 4 assert_equal(np.partition(d, (2, -1))[-1], 4) assert_equal(np.partition(d, (2, -1))[2], 1) assert_equal(d[np.argpartition(d, (2, -1))][-1], 4) assert_equal(d[np.argpartition(d, (2, -1))][2], 1) d[1] = np.nan assert_(np.isnan(d[np.argpartition(d, (2, -1))][-1])) assert_(np.isnan(np.partition(d, (2, -1))[-1])) # equal elements d = np.arange((47)) % 7 tgt = np.sort(np.arange((47)) % 7) np.random.shuffle(d) for i in range(d.size): self.assertEqual(np.partition(d, i, kind=k)[i], tgt[i]) assert_array_equal(d[np.argpartition(d, 6, kind=k)], np.partition(d, 6, kind=k)) assert_array_equal(d[np.argpartition(d, 16, kind=k)], np.partition(d, 16, kind=k)) for i in range(d.size): d[i:].partition(0, kind=k) assert_array_equal(d, tgt) d = np.array([0, 1, 2, 3, 4, 5, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 9]) kth = [0, 3, 19, 20] assert_equal(np.partition(d, kth, kind=k)[kth], (0, 3, 7, 7)) assert_equal(d[np.argpartition(d, kth, kind=k)][kth], (0, 3, 7, 7)) d = np.array([2, 1]) d.partition(0, kind=k) assert_raises(ValueError, d.partition, 2) assert_raises(ValueError, d.partition, 3, axis=1) assert_raises(ValueError, np.partition, d, 2) assert_raises(ValueError, np.partition, d, 2, axis=1) assert_raises(ValueError, d.argpartition, 2) assert_raises(ValueError, d.argpartition, 3, axis=1) assert_raises(ValueError, np.argpartition, d, 2) assert_raises(ValueError, np.argpartition, d, 2, axis=1) d = np.arange(10).reshape((2, 5)) d.partition(1, axis=0, kind=k) d.partition(4, axis=1, kind=k) np.partition(d, 1, axis=0, kind=k) np.partition(d, 4, axis=1, kind=k) np.partition(d, 1, axis=None, kind=k) np.partition(d, 9, axis=None, kind=k) d.argpartition(1, axis=0, kind=k) d.argpartition(4, axis=1, kind=k) np.argpartition(d, 1, axis=0, kind=k) np.argpartition(d, 4, axis=1, kind=k) np.argpartition(d, 1, axis=None, kind=k) np.argpartition(d, 9, axis=None, kind=k) assert_raises(ValueError, d.partition, 2, axis=0) assert_raises(ValueError, d.partition, 11, axis=1) assert_raises(TypeError, d.partition, 2, axis=None) assert_raises(ValueError, np.partition, d, 9, axis=1) assert_raises(ValueError, np.partition, d, 11, axis=None) assert_raises(ValueError, d.argpartition, 2, axis=0) assert_raises(ValueError, d.argpartition, 11, axis=1) assert_raises(ValueError, np.argpartition, d, 9, axis=1) assert_raises(ValueError, np.argpartition, d, 11, axis=None) td = [(dt, s) for dt in [np.int32, np.float32, np.complex64] for s in (9, 16)] for dt, s in td: aae = assert_array_equal at = self.assertTrue d = np.arange(s, dtype=dt) np.random.shuffle(d) d1 = np.tile(np.arange(s, dtype=dt), (4, 1)) map(np.random.shuffle, d1) d0 = np.transpose(d1) for i in range(d.size): p = np.partition(d, i, kind=k) self.assertEqual(p[i], i) # all before are smaller assert_array_less(p[:i], p[i]) # all after are larger assert_array_less(p[i], p[i + 1:]) aae(p, d[np.argpartition(d, i, kind=k)]) p = np.partition(d1, i, axis=1, kind=k) aae(p[:, i], np.array([i] * d1.shape[0], dtype=dt)) # array_less does not seem to work right at((p[:, :i].T <= p[:, i]).all(), msg="%d: %r <= %r" % (i, p[:, i], p[:, :i].T)) at((p[:, i + 1:].T > p[:, i]).all(), msg="%d: %r < %r" % (i, p[:, i], p[:, i + 1:].T)) aae(p, d1[np.arange(d1.shape[0])[:, None], np.argpartition(d1, i, axis=1, kind=k)]) p = np.partition(d0, i, axis=0, kind=k) aae(p[i,:], np.array([i] * d1.shape[0], dtype=dt)) # array_less does not seem to work right at((p[:i,:] <= p[i,:]).all(), msg="%d: %r <= %r" % (i, p[i,:], p[:i,:])) at((p[i + 1:,:] > p[i,:]).all(), msg="%d: %r < %r" % (i, p[i,:], p[:, i + 1:])) aae(p, d0[np.argpartition(d0, i, axis=0, kind=k), np.arange(d0.shape[1])[None,:]]) # check inplace dc = d.copy() dc.partition(i, kind=k) assert_equal(dc, np.partition(d, i, kind=k)) dc = d0.copy() dc.partition(i, axis=0, kind=k) assert_equal(dc, np.partition(d0, i, axis=0, kind=k)) dc = d1.copy() dc.partition(i, axis=1, kind=k) assert_equal(dc, np.partition(d1, i, axis=1, kind=k)) def assert_partitioned(self, d, kth): prev = 0 for k in np.sort(kth): assert_array_less(d[prev:k], d[k], err_msg='kth %d' % k) assert_((d[k:] >= d[k]).all(), msg="kth %d, %r not greater equal %d" % (k, d[k:], d[k])) prev = k + 1 def test_partition_iterative(self): d = np.arange(17) kth = (0, 1, 2, 429, 231) assert_raises(ValueError, d.partition, kth) assert_raises(ValueError, d.argpartition, kth) d = np.arange(10).reshape((2, 5)) assert_raises(ValueError, d.partition, kth, axis=0) assert_raises(ValueError, d.partition, kth, axis=1) assert_raises(ValueError, np.partition, d, kth, axis=1) assert_raises(ValueError, np.partition, d, kth, axis=None) d = np.array([3, 4, 2, 1]) p = np.partition(d, (0, 3)) self.assert_partitioned(p, (0, 3)) self.assert_partitioned(d[np.argpartition(d, (0, 3))], (0, 3)) assert_array_equal(p, np.partition(d, (-3, -1))) assert_array_equal(p, d[np.argpartition(d, (-3, -1))]) d = np.arange(17) np.random.shuffle(d) d.partition(range(d.size)) assert_array_equal(np.arange(17), d) np.random.shuffle(d) assert_array_equal(np.arange(17), d[d.argpartition(range(d.size))]) # test unsorted kth d = np.arange(17) np.random.shuffle(d) keys = np.array([1, 3, 8, -2]) np.random.shuffle(d) p = np.partition(d, keys) self.assert_partitioned(p, keys) p = d[np.argpartition(d, keys)] self.assert_partitioned(p, keys) np.random.shuffle(keys) assert_array_equal(np.partition(d, keys), p) assert_array_equal(d[np.argpartition(d, keys)], p) # equal kth d = np.arange(20)[::-1] self.assert_partitioned(np.partition(d, [5]*4), [5]) self.assert_partitioned(np.partition(d, [5]*4 + [6, 13]), [5]*4 + [6, 13]) self.assert_partitioned(d[np.argpartition(d, [5]*4)], [5]) self.assert_partitioned(d[np.argpartition(d, [5]*4 + [6, 13])], [5]*4 + [6, 13]) d = np.arange(12) np.random.shuffle(d) d1 = np.tile(np.arange(12), (4, 1)) map(np.random.shuffle, d1) d0 = np.transpose(d1) kth = (1, 6, 7, -1) p = np.partition(d1, kth, axis=1) pa = d1[np.arange(d1.shape[0])[:, None], d1.argpartition(kth, axis=1)] assert_array_equal(p, pa) for i in range(d1.shape[0]): self.assert_partitioned(p[i,:], kth) p = np.partition(d0, kth, axis=0) pa = d0[np.argpartition(d0, kth, axis=0), np.arange(d0.shape[1])[None,:]] assert_array_equal(p, pa) for i in range(d0.shape[1]): self.assert_partitioned(p[:, i], kth) def test_partition_cdtype(self): d = np.array([('Galahad', 1.7, 38), ('Arthur', 1.8, 41), ('Lancelot', 1.9, 38)], dtype=[('name', '|S10'), ('height', '<f8'), ('age', '<i4')]) tgt = np.sort(d, order=['age', 'height']) assert_array_equal(np.partition(d, range(d.size), order=['age', 'height']), tgt) assert_array_equal(d[np.argpartition(d, range(d.size), order=['age', 'height'])], tgt) for k in range(d.size): assert_equal(np.partition(d, k, order=['age', 'height'])[k], tgt[k]) assert_equal(d[np.argpartition(d, k, order=['age', 'height'])][k], tgt[k]) d = np.array(['Galahad', 'Arthur', 'zebra', 'Lancelot']) tgt = np.sort(d) assert_array_equal(np.partition(d, range(d.size)), tgt) for k in range(d.size): assert_equal(np.partition(d, k)[k], tgt[k]) assert_equal(d[np.argpartition(d, k)][k], tgt[k]) def test_partition_unicode_kind(self): d = np.arange(10) k = b'\xc3\xa4'.decode("UTF8") assert_raises(ValueError, d.partition, 2, kind=k) assert_raises(ValueError, d.argpartition, 2, kind=k) def test_partition_fuzz(self): # a few rounds of random data testing for j in range(10, 30): for i in range(1, j - 2): d = np.arange(j) np.random.shuffle(d) d = d % np.random.randint(2, 30) idx = np.random.randint(d.size) kth = [0, idx, i, i + 1] tgt = np.sort(d)[kth] assert_array_equal(np.partition(d, kth)[kth], tgt, err_msg="data: %r\n kth: %r" % (d, kth)) def test_argpartition_gh5524(self): # A test for functionality of argpartition on lists. d = [6,7,3,2,9,0] p = np.argpartition(d,1) self.assert_partitioned(np.array(d)[p],[1]) def test_flatten(self): x0 = np.array([[1, 2, 3], [4, 5, 6]], np.int32) x1 = np.array([[[1, 2], [3, 4]], [[5, 6], [7, 8]]], np.int32) y0 = np.array([1, 2, 3, 4, 5, 6], np.int32) y0f = np.array([1, 4, 2, 5, 3, 6], np.int32) y1 = np.array([1, 2, 3, 4, 5, 6, 7, 8], np.int32) y1f = np.array([1, 5, 3, 7, 2, 6, 4, 8], np.int32) assert_equal(x0.flatten(), y0) assert_equal(x0.flatten('F'), y0f) assert_equal(x0.flatten('F'), x0.T.flatten()) assert_equal(x1.flatten(), y1) assert_equal(x1.flatten('F'), y1f) assert_equal(x1.flatten('F'), x1.T.flatten()) def test_dot(self): a = np.array([[1, 0], [0, 1]]) b = np.array([[0, 1], [1, 0]]) c = np.array([[9, 1], [1, -9]]) assert_equal(np.dot(a, b), a.dot(b)) assert_equal(np.dot(np.dot(a, b), c), a.dot(b).dot(c)) # test passing in an output array c = np.zeros_like(a) a.dot(b, c) assert_equal(c, np.dot(a, b)) # test keyword args c = np.zeros_like(a) a.dot(b=b, out=c) assert_equal(c, np.dot(a, b)) def test_dot_override(self): class A(object): def __numpy_ufunc__(self, ufunc, method, pos, inputs, **kwargs): return "A" class B(object): def __numpy_ufunc__(self, ufunc, method, pos, inputs, **kwargs): return NotImplemented a = A() b = B() c = np.array([[1]]) assert_equal(np.dot(a, b), "A") assert_equal(c.dot(a), "A") assert_raises(TypeError, np.dot, b, c) assert_raises(TypeError, c.dot, b) def test_diagonal(self): a = np.arange(12).reshape((3, 4)) assert_equal(a.diagonal(), [0, 5, 10]) assert_equal(a.diagonal(0), [0, 5, 10]) assert_equal(a.diagonal(1), [1, 6, 11]) assert_equal(a.diagonal(-1), [4, 9]) b = np.arange(8).reshape((2, 2, 2)) assert_equal(b.diagonal(), [[0, 6], [1, 7]]) assert_equal(b.diagonal(0), [[0, 6], [1, 7]]) assert_equal(b.diagonal(1), [[2], [3]]) assert_equal(b.diagonal(-1), [[4], [5]]) assert_raises(ValueError, b.diagonal, axis1=0, axis2=0) assert_equal(b.diagonal(0, 1, 2), [[0, 3], [4, 7]]) assert_equal(b.diagonal(0, 0, 1), [[0, 6], [1, 7]]) assert_equal(b.diagonal(offset=1, axis1=0, axis2=2), [[1], [3]]) # Order of axis argument doesn't matter: assert_equal(b.diagonal(0, 2, 1), [[0, 3], [4, 7]]) def test_diagonal_view_notwriteable(self): # this test is only for 1.9, the diagonal view will be # writeable in 1.10. a = np.eye(3).diagonal() assert_(not a.flags.writeable) assert_(not a.flags.owndata) a = np.diagonal(np.eye(3)) assert_(not a.flags.writeable) assert_(not a.flags.owndata) a = np.diag(np.eye(3)) assert_(not a.flags.writeable) assert_(not a.flags.owndata) def test_diagonal_memleak(self): # Regression test for a bug that crept in at one point a = np.zeros((100, 100)) assert_(sys.getrefcount(a) < 50) for i in range(100): a.diagonal() assert_(sys.getrefcount(a) < 50) def test_put(self): icodes = np.typecodes['AllInteger'] fcodes = np.typecodes['AllFloat'] for dt in icodes + fcodes + 'O': tgt = np.array([0, 1, 0, 3, 0, 5], dtype=dt) # test 1-d a = np.zeros(6, dtype=dt) a.put([1, 3, 5], [1, 3, 5]) assert_equal(a, tgt) # test 2-d a = np.zeros((2, 3), dtype=dt) a.put([1, 3, 5], [1, 3, 5]) assert_equal(a, tgt.reshape(2, 3)) for dt in '?': tgt = np.array([False, True, False, True, False, True], dtype=dt) # test 1-d a = np.zeros(6, dtype=dt) a.put([1, 3, 5], [True]*3) assert_equal(a, tgt) # test 2-d a = np.zeros((2, 3), dtype=dt) a.put([1, 3, 5], [True]*3) assert_equal(a, tgt.reshape(2, 3)) # check must be writeable a = np.zeros(6) a.flags.writeable = False assert_raises(ValueError, a.put, [1, 3, 5], [1, 3, 5]) def test_ravel(self): a = np.array([[0, 1], [2, 3]]) assert_equal(a.ravel(), [0, 1, 2, 3]) assert_(not a.ravel().flags.owndata) assert_equal(a.ravel('F'), [0, 2, 1, 3]) assert_equal(a.ravel(order='C'), [0, 1, 2, 3]) assert_equal(a.ravel(order='F'), [0, 2, 1, 3]) assert_equal(a.ravel(order='A'), [0, 1, 2, 3]) assert_(not a.ravel(order='A').flags.owndata) assert_equal(a.ravel(order='K'), [0, 1, 2, 3]) assert_(not a.ravel(order='K').flags.owndata) assert_equal(a.ravel(), a.reshape(-1)) a = np.array([[0, 1], [2, 3]], order='F') assert_equal(a.ravel(), [0, 1, 2, 3]) assert_equal(a.ravel(order='A'), [0, 2, 1, 3]) assert_equal(a.ravel(order='K'), [0, 2, 1, 3]) assert_(not a.ravel(order='A').flags.owndata) assert_(not a.ravel(order='K').flags.owndata) assert_equal(a.ravel(), a.reshape(-1)) assert_equal(a.ravel(order='A'), a.reshape(-1, order='A')) a = np.array([[0, 1], [2, 3]])[::-1, :] assert_equal(a.ravel(), [2, 3, 0, 1]) assert_equal(a.ravel(order='C'), [2, 3, 0, 1]) assert_equal(a.ravel(order='F'), [2, 0, 3, 1]) assert_equal(a.ravel(order='A'), [2, 3, 0, 1]) # 'K' doesn't reverse the axes of negative strides assert_equal(a.ravel(order='K'), [2, 3, 0, 1]) assert_(a.ravel(order='K').flags.owndata) # Not contiguous and 1-sized axis with non matching stride a = np.arange(2**3 * 2)[::2] a = a.reshape(2, 1, 2, 2).swapaxes(-1, -2) strides = list(a.strides) strides[1] = 123 a.strides = strides assert_(np.may_share_memory(a.ravel(order='K'), a)) assert_equal(a.ravel('K'), np.arange(0, 15, 2)) # General case of possible ravel that is not contiguous but # works and includes a 1-sized axis with non matching stride a = a.swapaxes(-1, -2) # swap back to C-order assert_(np.may_share_memory(a.ravel(order='C'), a)) assert_(np.may_share_memory(a.ravel(order='K'), a)) a = a.T # swap all to Fortran order assert_(np.may_share_memory(a.ravel(order='F'), a)) assert_(np.may_share_memory(a.ravel(order='K'), a)) # Test negative strides: a = np.arange(4)[::-1].reshape(2, 2) assert_(np.may_share_memory(a.ravel(order='C'), a)) assert_(np.may_share_memory(a.ravel(order='K'), a)) assert_equal(a.ravel('C'), [3, 2, 1, 0]) assert_equal(a.ravel('K'), [3, 2, 1, 0]) # Test keeporder with weirdly strided 1-sized dims (1-d first stride) a = np.arange(8)[::2].reshape(1, 2, 2, 1) # neither C, nor F order strides = list(a.strides) strides[0] = -12 strides[-1] = 0 a.strides = strides assert_(np.may_share_memory(a.ravel(order='K'), a)) assert_equal(a.ravel('K'), a.ravel('C')) # 1-element tidy strides test (NPY_RELAXED_STRIDES_CHECKING): a = np.array([[1]]) a.strides = (123, 432) # If the stride is not 8, NPY_RELAXED_STRIDES_CHECKING is messing # them up on purpose: if np.ones(1).strides == (8,): assert_(np.may_share_memory(a.ravel('K'), a)) assert_equal(a.ravel('K').strides, (a.dtype.itemsize,)) for order in ('C', 'F', 'A', 'K'): # 0-d corner case: a = np.array(0) assert_equal(a.ravel(order), [0]) assert_(np.may_share_memory(a.ravel(order), a)) #Test that certain non-inplace ravels work right (mostly) for 'K': b = np.arange(2**4 * 2)[::2].reshape(2, 2, 2, 2) a = b[..., ::2] assert_equal(a.ravel('K'), [0, 4, 8, 12, 16, 20, 24, 28]) assert_equal(a.ravel('C'), [0, 4, 8, 12, 16, 20, 24, 28]) assert_equal(a.ravel('A'), [0, 4, 8, 12, 16, 20, 24, 28]) assert_equal(a.ravel('F'), [0, 16, 8, 24, 4, 20, 12, 28]) a = b[::2, ...] assert_equal(a.ravel('K'), [0, 2, 4, 6, 8, 10, 12, 14]) assert_equal(a.ravel('C'), [0, 2, 4, 6, 8, 10, 12, 14]) assert_equal(a.ravel('A'), [0, 2, 4, 6, 8, 10, 12, 14]) assert_equal(a.ravel('F'), [0, 8, 4, 12, 2, 10, 6, 14]) def test_swapaxes(self): a = np.arange(1*2*3*4).reshape(1, 2, 3, 4).copy() idx = np.indices(a.shape) assert_(a.flags['OWNDATA']) b = a.copy() # check exceptions assert_raises(ValueError, a.swapaxes, -5, 0) assert_raises(ValueError, a.swapaxes, 4, 0) assert_raises(ValueError, a.swapaxes, 0, -5) assert_raises(ValueError, a.swapaxes, 0, 4) for i in range(-4, 4): for j in range(-4, 4): for k, src in enumerate((a, b)): c = src.swapaxes(i, j) # check shape shape = list(src.shape) shape[i] = src.shape[j] shape[j] = src.shape[i] assert_equal(c.shape, shape, str((i, j, k))) # check array contents i0, i1, i2, i3 = [dim-1 for dim in c.shape] j0, j1, j2, j3 = [dim-1 for dim in src.shape] assert_equal(src[idx[j0], idx[j1], idx[j2], idx[j3]], c[idx[i0], idx[i1], idx[i2], idx[i3]], str((i, j, k))) # check a view is always returned, gh-5260 assert_(not c.flags['OWNDATA'], str((i, j, k))) # check on non-contiguous input array if k == 1: b = c def test_conjugate(self): a = np.array([1-1j, 1+1j, 23+23.0j]) ac = a.conj() assert_equal(a.real, ac.real) assert_equal(a.imag, -ac.imag) assert_equal(ac, a.conjugate()) assert_equal(ac, np.conjugate(a)) a = np.array([1-1j, 1+1j, 23+23.0j], 'F') ac = a.conj() assert_equal(a.real, ac.real) assert_equal(a.imag, -ac.imag) assert_equal(ac, a.conjugate()) assert_equal(ac, np.conjugate(a)) a = np.array([1, 2, 3]) ac = a.conj() assert_equal(a, ac) assert_equal(ac, a.conjugate()) assert_equal(ac, np.conjugate(a)) a = np.array([1.0, 2.0, 3.0]) ac = a.conj() assert_equal(a, ac) assert_equal(ac, a.conjugate()) assert_equal(ac, np.conjugate(a)) a = np.array([1-1j, 1+1j, 1, 2.0], object) ac = a.conj() assert_equal(ac, [k.conjugate() for k in a]) assert_equal(ac, a.conjugate()) assert_equal(ac, np.conjugate(a)) a = np.array([1-1j, 1, 2.0, 'f'], object) assert_raises(AttributeError, lambda: a.conj()) assert_raises(AttributeError, lambda: a.conjugate()) class TestBinop(object): def test_inplace(self): # test refcount 1 inplace conversion assert_array_almost_equal(np.array([0.5]) * np.array([1.0, 2.0]), [0.5, 1.0]) d = np.array([0.5, 0.5])[::2] assert_array_almost_equal(d * (d * np.array([1.0, 2.0])), [0.25, 0.5]) a = np.array([0.5]) b = np.array([0.5]) c = a + b c = a - b c = a * b c = a / b assert_equal(a, b) assert_almost_equal(c, 1.) c = a + b * 2. / b * a - a / b assert_equal(a, b) assert_equal(c, 0.5) # true divide a = np.array([5]) b = np.array([3]) c = (a * a) / b assert_almost_equal(c, 25 / 3) assert_equal(a, 5) assert_equal(b, 3) def test_extension_incref_elide(self): # test extension (e.g. cython) calling PyNumber_* slots without # increasing the reference counts # # def incref_elide(a): # d = input.copy() # refcount 1 # return d, d + d # PyNumber_Add without increasing refcount from numpy.core.multiarray_tests import incref_elide d = np.ones(5) orig, res = incref_elide(d) # the return original should not be changed to an inplace operation assert_array_equal(orig, d) assert_array_equal(res, d + d) def test_extension_incref_elide_stack(self): # scanning if the refcount == 1 object is on the python stack to check # that we are called directly from python is flawed as object may still # be above the stack pointer and we have no access to the top of it # # def incref_elide_l(d): # return l[4] + l[4] # PyNumber_Add without increasing refcount from numpy.core.multiarray_tests import incref_elide_l # padding with 1 makes sure the object on the stack is not overwriten l = [1, 1, 1, 1, np.ones(5)] res = incref_elide_l(l) # the return original should not be changed to an inplace operation assert_array_equal(l[4], np.ones(5)) assert_array_equal(res, l[4] + l[4]) def test_ufunc_override_rop_precedence(self): # Check that __rmul__ and other right-hand operations have # precedence over __numpy_ufunc__ ops = { '__add__': ('__radd__', np.add, True), '__sub__': ('__rsub__', np.subtract, True), '__mul__': ('__rmul__', np.multiply, True), '__truediv__': ('__rtruediv__', np.true_divide, True), '__floordiv__': ('__rfloordiv__', np.floor_divide, True), '__mod__': ('__rmod__', np.remainder, True), '__divmod__': ('__rdivmod__', None, False), '__pow__': ('__rpow__', np.power, True), '__lshift__': ('__rlshift__', np.left_shift, True), '__rshift__': ('__rrshift__', np.right_shift, True), '__and__': ('__rand__', np.bitwise_and, True), '__xor__': ('__rxor__', np.bitwise_xor, True), '__or__': ('__ror__', np.bitwise_or, True), '__ge__': ('__le__', np.less_equal, False), '__gt__': ('__lt__', np.less, False), '__le__': ('__ge__', np.greater_equal, False), '__lt__': ('__gt__', np.greater, False), '__eq__': ('__eq__', np.equal, False), '__ne__': ('__ne__', np.not_equal, False), } class OtherNdarraySubclass(np.ndarray): pass class OtherNdarraySubclassWithOverride(np.ndarray): def __numpy_ufunc__(self, *a, **kw): raise AssertionError(("__numpy_ufunc__ %r %r shouldn't have " "been called!") % (a, kw)) def check(op_name, ndsubclass): rop_name, np_op, has_iop = ops[op_name] if has_iop: iop_name = '__i' + op_name[2:] iop = getattr(operator, iop_name) if op_name == "__divmod__": op = divmod else: op = getattr(operator, op_name) # Dummy class def __init__(self, *a, **kw): pass def __numpy_ufunc__(self, *a, **kw): raise AssertionError(("__numpy_ufunc__ %r %r shouldn't have " "been called!") % (a, kw)) def __op__(self, *other): return "op" def __rop__(self, *other): return "rop" if ndsubclass: bases = (np.ndarray,) else: bases = (object,) dct = {'__init__': __init__, '__numpy_ufunc__': __numpy_ufunc__, op_name: __op__} if op_name != rop_name: dct[rop_name] = __rop__ cls = type("Rop" + rop_name, bases, dct) # Check behavior against both bare ndarray objects and a # ndarray subclasses with and without their own override obj = cls((1,), buffer=np.ones(1,)) arr_objs = [np.array([1]), np.array([2]).view(OtherNdarraySubclass), np.array([3]).view(OtherNdarraySubclassWithOverride), ] for arr in arr_objs: err_msg = "%r %r" % (op_name, arr,) # Check that ndarray op gives up if it sees a non-subclass if not isinstance(obj, arr.__class__): assert_equal(getattr(arr, op_name)(obj), NotImplemented, err_msg=err_msg) # Check that the Python binops have priority assert_equal(op(obj, arr), "op", err_msg=err_msg) if op_name == rop_name: assert_equal(op(arr, obj), "op", err_msg=err_msg) else: assert_equal(op(arr, obj), "rop", err_msg=err_msg) # Check that Python binops have priority also for in-place ops if has_iop: assert_equal(getattr(arr, iop_name)(obj), NotImplemented, err_msg=err_msg) if op_name != "__pow__": # inplace pow requires the other object to be # integer-like? assert_equal(iop(arr, obj), "rop", err_msg=err_msg) # Check that ufunc call __numpy_ufunc__ normally if np_op is not None: assert_raises(AssertionError, np_op, arr, obj, err_msg=err_msg) assert_raises(AssertionError, np_op, obj, arr, err_msg=err_msg) # Check all binary operations for op_name in sorted(ops.keys()): yield check, op_name, True yield check, op_name, False def test_ufunc_override_rop_simple(self): # Check parts of the binary op overriding behavior in an # explicit test case that is easier to understand. class SomeClass(object): def __numpy_ufunc__(self, *a, **kw): return "ufunc" def __mul__(self, other): return 123 def __rmul__(self, other): return 321 def __rsub__(self, other): return "no subs for me" def __gt__(self, other): return "yep" def __lt__(self, other): return "nope" class SomeClass2(SomeClass, np.ndarray): def __numpy_ufunc__(self, ufunc, method, i, inputs, **kw): if ufunc is np.multiply or ufunc is np.bitwise_and: return "ufunc" else: inputs = list(inputs) inputs[i] = np.asarray(self) func = getattr(ufunc, method) r = func(*inputs, **kw) if 'out' in kw: return r else: x = self.__class__(r.shape, dtype=r.dtype) x[...] = r return x class SomeClass3(SomeClass2): def __rsub__(self, other): return "sub for me" arr = np.array([0]) obj = SomeClass() obj2 = SomeClass2((1,), dtype=np.int_) obj2[0] = 9 obj3 = SomeClass3((1,), dtype=np.int_) obj3[0] = 4 # obj is first, so should get to define outcome. assert_equal(obj * arr, 123) # obj is second, but has __numpy_ufunc__ and defines __rmul__. assert_equal(arr * obj, 321) # obj is second, but has __numpy_ufunc__ and defines __rsub__. assert_equal(arr - obj, "no subs for me") # obj is second, but has __numpy_ufunc__ and defines __lt__. assert_equal(arr > obj, "nope") # obj is second, but has __numpy_ufunc__ and defines __gt__. assert_equal(arr < obj, "yep") # Called as a ufunc, obj.__numpy_ufunc__ is used. assert_equal(np.multiply(arr, obj), "ufunc") # obj is second, but has __numpy_ufunc__ and defines __rmul__. arr *= obj assert_equal(arr, 321) # obj2 is an ndarray subclass, so CPython takes care of the same rules. assert_equal(obj2 * arr, 123) assert_equal(arr * obj2, 321) assert_equal(arr - obj2, "no subs for me") assert_equal(arr > obj2, "nope") assert_equal(arr < obj2, "yep") # Called as a ufunc, obj2.__numpy_ufunc__ is called. assert_equal(np.multiply(arr, obj2), "ufunc") # Also when the method is not overridden. assert_equal(arr & obj2, "ufunc") arr *= obj2 assert_equal(arr, 321) obj2 += 33 assert_equal(obj2[0], 42) assert_equal(obj2.sum(), 42) assert_(isinstance(obj2, SomeClass2)) # Obj3 is subclass that defines __rsub__. CPython calls it. assert_equal(arr - obj3, "sub for me") assert_equal(obj2 - obj3, "sub for me") # obj3 is a subclass that defines __rmul__. CPython calls it. assert_equal(arr * obj3, 321) # But not here, since obj3.__rmul__ is obj2.__rmul__. assert_equal(obj2 * obj3, 123) # And of course, here obj3.__mul__ should be called. assert_equal(obj3 * obj2, 123) # obj3 defines __numpy_ufunc__ but obj3.__radd__ is obj2.__radd__. # (and both are just ndarray.__radd__); see #4815. res = obj2 + obj3 assert_equal(res, 46) assert_(isinstance(res, SomeClass2)) # Since obj3 is a subclass, it should have precedence, like CPython # would give, even though obj2 has __numpy_ufunc__ and __radd__. # See gh-4815 and gh-5747. res = obj3 + obj2 assert_equal(res, 46) assert_(isinstance(res, SomeClass3)) def test_ufunc_override_normalize_signature(self): # gh-5674 class SomeClass(object): def __numpy_ufunc__(self, ufunc, method, i, inputs, **kw): return kw a = SomeClass() kw = np.add(a, [1]) assert_('sig' not in kw and 'signature' not in kw) kw = np.add(a, [1], sig='ii->i') assert_('sig' not in kw and 'signature' in kw) assert_equal(kw['signature'], 'ii->i') kw = np.add(a, [1], signature='ii->i') assert_('sig' not in kw and 'signature' in kw) assert_equal(kw['signature'], 'ii->i') class TestCAPI(TestCase): def test_IsPythonScalar(self): from numpy.core.multiarray_tests import IsPythonScalar assert_(IsPythonScalar(b'foobar')) assert_(IsPythonScalar(1)) assert_(IsPythonScalar(2**80)) assert_(IsPythonScalar(2.)) assert_(IsPythonScalar("a")) class TestSubscripting(TestCase): def test_test_zero_rank(self): x = np.array([1, 2, 3]) self.assertTrue(isinstance(x[0], np.int_)) if sys.version_info[0] < 3: self.assertTrue(isinstance(x[0], int)) self.assertTrue(type(x[0, ...]) is np.ndarray) class TestPickling(TestCase): def test_roundtrip(self): import pickle carray = np.array([[2, 9], [7, 0], [3, 8]]) DATA = [ carray, np.transpose(carray), np.array([('xxx', 1, 2.0)], dtype=[('a', (str, 3)), ('b', int), ('c', float)]) ] for a in DATA: assert_equal(a, pickle.loads(a.dumps()), err_msg="%r" % a) def _loads(self, obj): if sys.version_info[0] >= 3: return np.loads(obj, encoding='latin1') else: return np.loads(obj) # version 0 pickles, using protocol=2 to pickle # version 0 doesn't have a version field def test_version0_int8(self): s = '\x80\x02cnumpy.core._internal\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x04\x85cnumpy\ndtype\nq\x04U\x02i1K\x00K\x01\x87Rq\x05(U\x01|NNJ\xff\xff\xff\xffJ\xff\xff\xff\xfftb\x89U\x04\x01\x02\x03\x04tb.' a = np.array([1, 2, 3, 4], dtype=np.int8) p = self._loads(asbytes(s)) assert_equal(a, p) def test_version0_float32(self): s = '\x80\x02cnumpy.core._internal\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x04\x85cnumpy\ndtype\nq\x04U\x02f4K\x00K\x01\x87Rq\x05(U\x01<NNJ\xff\xff\xff\xffJ\xff\xff\xff\xfftb\x89U\x10\x00\x00\x80?\x00\x00\x00@\x00\x00@@\x00\x00\x80@tb.' a = np.array([1.0, 2.0, 3.0, 4.0], dtype=np.float32) p = self._loads(asbytes(s)) assert_equal(a, p) def test_version0_object(self): s = '\x80\x02cnumpy.core._internal\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x02\x85cnumpy\ndtype\nq\x04U\x02O8K\x00K\x01\x87Rq\x05(U\x01|NNJ\xff\xff\xff\xffJ\xff\xff\xff\xfftb\x89]q\x06(}q\x07U\x01aK\x01s}q\x08U\x01bK\x02setb.' a = np.array([{'a':1}, {'b':2}]) p = self._loads(asbytes(s)) assert_equal(a, p) # version 1 pickles, using protocol=2 to pickle def test_version1_int8(self): s = '\x80\x02cnumpy.core._internal\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x01K\x04\x85cnumpy\ndtype\nq\x04U\x02i1K\x00K\x01\x87Rq\x05(K\x01U\x01|NNJ\xff\xff\xff\xffJ\xff\xff\xff\xfftb\x89U\x04\x01\x02\x03\x04tb.' a = np.array([1, 2, 3, 4], dtype=np.int8) p = self._loads(asbytes(s)) assert_equal(a, p) def test_version1_float32(self): s = '\x80\x02cnumpy.core._internal\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x01K\x04\x85cnumpy\ndtype\nq\x04U\x02f4K\x00K\x01\x87Rq\x05(K\x01U\x01<NNJ\xff\xff\xff\xffJ\xff\xff\xff\xfftb\x89U\x10\x00\x00\x80?\x00\x00\x00@\x00\x00@@\x00\x00\x80@tb.' a = np.array([1.0, 2.0, 3.0, 4.0], dtype=np.float32) p = self._loads(asbytes(s)) assert_equal(a, p) def test_version1_object(self): s = '\x80\x02cnumpy.core._internal\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x01K\x02\x85cnumpy\ndtype\nq\x04U\x02O8K\x00K\x01\x87Rq\x05(K\x01U\x01|NNJ\xff\xff\xff\xffJ\xff\xff\xff\xfftb\x89]q\x06(}q\x07U\x01aK\x01s}q\x08U\x01bK\x02setb.' a = np.array([{'a':1}, {'b':2}]) p = self._loads(asbytes(s)) assert_equal(a, p) def test_subarray_int_shape(self): s = "cnumpy.core.multiarray\n_reconstruct\np0\n(cnumpy\nndarray\np1\n(I0\ntp2\nS'b'\np3\ntp4\nRp5\n(I1\n(I1\ntp6\ncnumpy\ndtype\np7\n(S'V6'\np8\nI0\nI1\ntp9\nRp10\n(I3\nS'|'\np11\nN(S'a'\np12\ng3\ntp13\n(dp14\ng12\n(g7\n(S'V4'\np15\nI0\nI1\ntp16\nRp17\n(I3\nS'|'\np18\n(g7\n(S'i1'\np19\nI0\nI1\ntp20\nRp21\n(I3\nS'|'\np22\nNNNI-1\nI-1\nI0\ntp23\nb(I2\nI2\ntp24\ntp25\nNNI4\nI1\nI0\ntp26\nbI0\ntp27\nsg3\n(g7\n(S'V2'\np28\nI0\nI1\ntp29\nRp30\n(I3\nS'|'\np31\n(g21\nI2\ntp32\nNNI2\nI1\nI0\ntp33\nbI4\ntp34\nsI6\nI1\nI0\ntp35\nbI00\nS'\\x01\\x01\\x01\\x01\\x01\\x02'\np36\ntp37\nb." a = np.array([(1, (1, 2))], dtype=[('a', 'i1', (2, 2)), ('b', 'i1', 2)]) p = self._loads(asbytes(s)) assert_equal(a, p) class TestFancyIndexing(TestCase): def test_list(self): x = np.ones((1, 1)) x[:, [0]] = 2.0 assert_array_equal(x, np.array([[2.0]])) x = np.ones((1, 1, 1)) x[:,:, [0]] = 2.0 assert_array_equal(x, np.array([[[2.0]]])) def test_tuple(self): x = np.ones((1, 1)) x[:, (0,)] = 2.0 assert_array_equal(x, np.array([[2.0]])) x = np.ones((1, 1, 1)) x[:,:, (0,)] = 2.0 assert_array_equal(x, np.array([[[2.0]]])) def test_mask(self): x = np.array([1, 2, 3, 4]) m = np.array([0, 1, 0, 0], bool) assert_array_equal(x[m], np.array([2])) def test_mask2(self): x = np.array([[1, 2, 3, 4], [5, 6, 7, 8]]) m = np.array([0, 1], bool) m2 = np.array([[0, 1, 0, 0], [1, 0, 0, 0]], bool) m3 = np.array([[0, 1, 0, 0], [0, 0, 0, 0]], bool) assert_array_equal(x[m], np.array([[5, 6, 7, 8]])) assert_array_equal(x[m2], np.array([2, 5])) assert_array_equal(x[m3], np.array([2])) def test_assign_mask(self): x = np.array([1, 2, 3, 4]) m = np.array([0, 1, 0, 0], bool) x[m] = 5 assert_array_equal(x, np.array([1, 5, 3, 4])) def test_assign_mask2(self): xorig = np.array([[1, 2, 3, 4], [5, 6, 7, 8]]) m = np.array([0, 1], bool) m2 = np.array([[0, 1, 0, 0], [1, 0, 0, 0]], bool) m3 = np.array([[0, 1, 0, 0], [0, 0, 0, 0]], bool) x = xorig.copy() x[m] = 10 assert_array_equal(x, np.array([[1, 2, 3, 4], [10, 10, 10, 10]])) x = xorig.copy() x[m2] = 10 assert_array_equal(x, np.array([[1, 10, 3, 4], [10, 6, 7, 8]])) x = xorig.copy() x[m3] = 10 assert_array_equal(x, np.array([[1, 10, 3, 4], [5, 6, 7, 8]])) class TestStringCompare(TestCase): def test_string(self): g1 = np.array(["This", "is", "example"]) g2 = np.array(["This", "was", "example"]) assert_array_equal(g1 == g2, [g1[i] == g2[i] for i in [0, 1, 2]]) assert_array_equal(g1 != g2, [g1[i] != g2[i] for i in [0, 1, 2]]) assert_array_equal(g1 <= g2, [g1[i] <= g2[i] for i in [0, 1, 2]]) assert_array_equal(g1 >= g2, [g1[i] >= g2[i] for i in [0, 1, 2]]) assert_array_equal(g1 < g2, [g1[i] < g2[i] for i in [0, 1, 2]]) assert_array_equal(g1 > g2, [g1[i] > g2[i] for i in [0, 1, 2]]) def test_mixed(self): g1 = np.array(["spam", "spa", "spammer", "and eggs"]) g2 = "spam" assert_array_equal(g1 == g2, [x == g2 for x in g1]) assert_array_equal(g1 != g2, [x != g2 for x in g1]) assert_array_equal(g1 < g2, [x < g2 for x in g1]) assert_array_equal(g1 > g2, [x > g2 for x in g1]) assert_array_equal(g1 <= g2, [x <= g2 for x in g1]) assert_array_equal(g1 >= g2, [x >= g2 for x in g1]) def test_unicode(self): g1 = np.array([sixu("This"), sixu("is"), sixu("example")]) g2 = np.array([sixu("This"), sixu("was"), sixu("example")]) assert_array_equal(g1 == g2, [g1[i] == g2[i] for i in [0, 1, 2]]) assert_array_equal(g1 != g2, [g1[i] != g2[i] for i in [0, 1, 2]]) assert_array_equal(g1 <= g2, [g1[i] <= g2[i] for i in [0, 1, 2]]) assert_array_equal(g1 >= g2, [g1[i] >= g2[i] for i in [0, 1, 2]]) assert_array_equal(g1 < g2, [g1[i] < g2[i] for i in [0, 1, 2]]) assert_array_equal(g1 > g2, [g1[i] > g2[i] for i in [0, 1, 2]]) class TestArgmax(TestCase): nan_arr = [ ([0, 1, 2, 3, np.nan], 4), ([0, 1, 2, np.nan, 3], 3), ([np.nan, 0, 1, 2, 3], 0), ([np.nan, 0, np.nan, 2, 3], 0), ([0, 1, 2, 3, complex(0, np.nan)], 4), ([0, 1, 2, 3, complex(np.nan, 0)], 4), ([0, 1, 2, complex(np.nan, 0), 3], 3), ([0, 1, 2, complex(0, np.nan), 3], 3), ([complex(0, np.nan), 0, 1, 2, 3], 0), ([complex(np.nan, np.nan), 0, 1, 2, 3], 0), ([complex(np.nan, 0), complex(np.nan, 2), complex(np.nan, 1)], 0), ([complex(np.nan, np.nan), complex(np.nan, 2), complex(np.nan, 1)], 0), ([complex(np.nan, 0), complex(np.nan, 2), complex(np.nan, np.nan)], 0), ([complex(0, 0), complex(0, 2), complex(0, 1)], 1), ([complex(1, 0), complex(0, 2), complex(0, 1)], 0), ([complex(1, 0), complex(0, 2), complex(1, 1)], 2), ([np.datetime64('1923-04-14T12:43:12'), np.datetime64('1994-06-21T14:43:15'), np.datetime64('2001-10-15T04:10:32'), np.datetime64('1995-11-25T16:02:16'), np.datetime64('2005-01-04T03:14:12'), np.datetime64('2041-12-03T14:05:03')], 5), ([np.datetime64('1935-09-14T04:40:11'), np.datetime64('1949-10-12T12:32:11'), np.datetime64('2010-01-03T05:14:12'), np.datetime64('2015-11-20T12:20:59'), np.datetime64('1932-09-23T10:10:13'), np.datetime64('2014-10-10T03:50:30')], 3), # Assorted tests with NaTs ([np.datetime64('NaT'), np.datetime64('NaT'), np.datetime64('2010-01-03T05:14:12'), np.datetime64('NaT'), np.datetime64('2015-09-23T10:10:13'), np.datetime64('1932-10-10T03:50:30')], 4), ([np.datetime64('2059-03-14T12:43:12'), np.datetime64('1996-09-21T14:43:15'), np.datetime64('NaT'), np.datetime64('2022-12-25T16:02:16'), np.datetime64('1963-10-04T03:14:12'), np.datetime64('2013-05-08T18:15:23')], 0), ([np.timedelta64(2, 's'), np.timedelta64(1, 's'), np.timedelta64('NaT', 's'), np.timedelta64(3, 's')], 3), ([np.timedelta64('NaT', 's')] * 3, 0), ([timedelta(days=5, seconds=14), timedelta(days=2, seconds=35), timedelta(days=-1, seconds=23)], 0), ([timedelta(days=1, seconds=43), timedelta(days=10, seconds=5), timedelta(days=5, seconds=14)], 1), ([timedelta(days=10, seconds=24), timedelta(days=10, seconds=5), timedelta(days=10, seconds=43)], 2), ([False, False, False, False, True], 4), ([False, False, False, True, False], 3), ([True, False, False, False, False], 0), ([True, False, True, False, False], 0), # Can't reduce a "flexible type" #(['a', 'z', 'aa', 'zz'], 3), #(['zz', 'a', 'aa', 'a'], 0), #(['aa', 'z', 'zz', 'a'], 2), ] def test_all(self): a = np.random.normal(0, 1, (4, 5, 6, 7, 8)) for i in range(a.ndim): amax = a.max(i) aargmax = a.argmax(i) axes = list(range(a.ndim)) axes.remove(i) assert_(np.all(amax == aargmax.choose(*a.transpose(i,*axes)))) def test_combinations(self): for arr, pos in self.nan_arr: assert_equal(np.argmax(arr), pos, err_msg="%r" % arr) assert_equal(arr[np.argmax(arr)], np.max(arr), err_msg="%r" % arr) def test_output_shape(self): # see also gh-616 a = np.ones((10, 5)) # Check some simple shape mismatches out = np.ones(11, dtype=np.int_) assert_raises(ValueError, a.argmax, -1, out) out = np.ones((2, 5), dtype=np.int_) assert_raises(ValueError, a.argmax, -1, out) # these could be relaxed possibly (used to allow even the previous) out = np.ones((1, 10), dtype=np.int_) assert_raises(ValueError, a.argmax, -1, np.ones((1, 10))) out = np.ones(10, dtype=np.int_) a.argmax(-1, out=out) assert_equal(out, a.argmax(-1)) def test_argmax_unicode(self): d = np.zeros(6031, dtype='<U9') d[5942] = "as" assert_equal(d.argmax(), 5942) def test_np_vs_ndarray(self): # make sure both ndarray.argmax and numpy.argmax support out/axis args a = np.random.normal(size=(2,3)) #check positional args out1 = np.zeros(2, dtype=int) out2 = np.zeros(2, dtype=int) assert_equal(a.argmax(1, out1), np.argmax(a, 1, out2)) assert_equal(out1, out2) #check keyword args out1 = np.zeros(3, dtype=int) out2 = np.zeros(3, dtype=int) assert_equal(a.argmax(out=out1, axis=0), np.argmax(a, out=out2, axis=0)) assert_equal(out1, out2) class TestArgmin(TestCase): nan_arr = [ ([0, 1, 2, 3, np.nan], 4), ([0, 1, 2, np.nan, 3], 3), ([np.nan, 0, 1, 2, 3], 0), ([np.nan, 0, np.nan, 2, 3], 0), ([0, 1, 2, 3, complex(0, np.nan)], 4), ([0, 1, 2, 3, complex(np.nan, 0)], 4), ([0, 1, 2, complex(np.nan, 0), 3], 3), ([0, 1, 2, complex(0, np.nan), 3], 3), ([complex(0, np.nan), 0, 1, 2, 3], 0), ([complex(np.nan, np.nan), 0, 1, 2, 3], 0), ([complex(np.nan, 0), complex(np.nan, 2), complex(np.nan, 1)], 0), ([complex(np.nan, np.nan), complex(np.nan, 2), complex(np.nan, 1)], 0), ([complex(np.nan, 0), complex(np.nan, 2), complex(np.nan, np.nan)], 0), ([complex(0, 0), complex(0, 2), complex(0, 1)], 0), ([complex(1, 0), complex(0, 2), complex(0, 1)], 2), ([complex(1, 0), complex(0, 2), complex(1, 1)], 1), ([np.datetime64('1923-04-14T12:43:12'), np.datetime64('1994-06-21T14:43:15'), np.datetime64('2001-10-15T04:10:32'), np.datetime64('1995-11-25T16:02:16'), np.datetime64('2005-01-04T03:14:12'), np.datetime64('2041-12-03T14:05:03')], 0), ([np.datetime64('1935-09-14T04:40:11'), np.datetime64('1949-10-12T12:32:11'), np.datetime64('2010-01-03T05:14:12'), np.datetime64('2014-11-20T12:20:59'), np.datetime64('2015-09-23T10:10:13'), np.datetime64('1932-10-10T03:50:30')], 5), # Assorted tests with NaTs ([np.datetime64('NaT'), np.datetime64('NaT'), np.datetime64('2010-01-03T05:14:12'), np.datetime64('NaT'), np.datetime64('2015-09-23T10:10:13'), np.datetime64('1932-10-10T03:50:30')], 5), ([np.datetime64('2059-03-14T12:43:12'), np.datetime64('1996-09-21T14:43:15'), np.datetime64('NaT'), np.datetime64('2022-12-25T16:02:16'), np.datetime64('1963-10-04T03:14:12'), np.datetime64('2013-05-08T18:15:23')], 4), ([np.timedelta64(2, 's'), np.timedelta64(1, 's'), np.timedelta64('NaT', 's'), np.timedelta64(3, 's')], 1), ([np.timedelta64('NaT', 's')] * 3, 0), ([timedelta(days=5, seconds=14), timedelta(days=2, seconds=35), timedelta(days=-1, seconds=23)], 2), ([timedelta(days=1, seconds=43), timedelta(days=10, seconds=5), timedelta(days=5, seconds=14)], 0), ([timedelta(days=10, seconds=24), timedelta(days=10, seconds=5), timedelta(days=10, seconds=43)], 1), ([True, True, True, True, False], 4), ([True, True, True, False, True], 3), ([False, True, True, True, True], 0), ([False, True, False, True, True], 0), # Can't reduce a "flexible type" #(['a', 'z', 'aa', 'zz'], 0), #(['zz', 'a', 'aa', 'a'], 1), #(['aa', 'z', 'zz', 'a'], 3), ] def test_all(self): a = np.random.normal(0, 1, (4, 5, 6, 7, 8)) for i in range(a.ndim): amin = a.min(i) aargmin = a.argmin(i) axes = list(range(a.ndim)) axes.remove(i) assert_(np.all(amin == aargmin.choose(*a.transpose(i,*axes)))) def test_combinations(self): for arr, pos in self.nan_arr: assert_equal(np.argmin(arr), pos, err_msg="%r" % arr) assert_equal(arr[np.argmin(arr)], np.min(arr), err_msg="%r" % arr) def test_minimum_signed_integers(self): a = np.array([1, -2**7, -2**7 + 1], dtype=np.int8) assert_equal(np.argmin(a), 1) a = np.array([1, -2**15, -2**15 + 1], dtype=np.int16) assert_equal(np.argmin(a), 1) a = np.array([1, -2**31, -2**31 + 1], dtype=np.int32) assert_equal(np.argmin(a), 1) a = np.array([1, -2**63, -2**63 + 1], dtype=np.int64) assert_equal(np.argmin(a), 1) def test_output_shape(self): # see also gh-616 a = np.ones((10, 5)) # Check some simple shape mismatches out = np.ones(11, dtype=np.int_) assert_raises(ValueError, a.argmin, -1, out) out = np.ones((2, 5), dtype=np.int_) assert_raises(ValueError, a.argmin, -1, out) # these could be relaxed possibly (used to allow even the previous) out = np.ones((1, 10), dtype=np.int_) assert_raises(ValueError, a.argmin, -1, np.ones((1, 10))) out = np.ones(10, dtype=np.int_) a.argmin(-1, out=out) assert_equal(out, a.argmin(-1)) def test_argmin_unicode(self): d = np.ones(6031, dtype='<U9') d[6001] = "0" assert_equal(d.argmin(), 6001) def test_np_vs_ndarray(self): # make sure both ndarray.argmin and numpy.argmin support out/axis args a = np.random.normal(size=(2,3)) #check positional args out1 = np.zeros(2, dtype=int) out2 = np.ones(2, dtype=int) assert_equal(a.argmin(1, out1), np.argmin(a, 1, out2)) assert_equal(out1, out2) #check keyword args out1 = np.zeros(3, dtype=int) out2 = np.ones(3, dtype=int) assert_equal(a.argmin(out=out1, axis=0), np.argmin(a, out=out2, axis=0)) assert_equal(out1, out2) class TestMinMax(TestCase): def test_scalar(self): assert_raises(ValueError, np.amax, 1, 1) assert_raises(ValueError, np.amin, 1, 1) assert_equal(np.amax(1, axis=0), 1) assert_equal(np.amin(1, axis=0), 1) assert_equal(np.amax(1, axis=None), 1) assert_equal(np.amin(1, axis=None), 1) def test_axis(self): assert_raises(ValueError, np.amax, [1, 2, 3], 1000) assert_equal(np.amax([[1, 2, 3]], axis=1), 3) def test_datetime(self): # NaTs are ignored for dtype in ('m8[s]', 'm8[Y]'): a = np.arange(10).astype(dtype) a[3] = 'NaT' assert_equal(np.amin(a), a[0]) assert_equal(np.amax(a), a[9]) a[0] = 'NaT' assert_equal(np.amin(a), a[1]) assert_equal(np.amax(a), a[9]) a.fill('NaT') assert_equal(np.amin(a), a[0]) assert_equal(np.amax(a), a[0]) class TestNewaxis(TestCase): def test_basic(self): sk = np.array([0, -0.1, 0.1]) res = 250*sk[:, np.newaxis] assert_almost_equal(res.ravel(), 250*sk) class TestClip(TestCase): def _check_range(self, x, cmin, cmax): assert_(np.all(x >= cmin)) assert_(np.all(x <= cmax)) def _clip_type(self, type_group, array_max, clip_min, clip_max, inplace=False, expected_min=None, expected_max=None): if expected_min is None: expected_min = clip_min if expected_max is None: expected_max = clip_max for T in np.sctypes[type_group]: if sys.byteorder == 'little': byte_orders = ['=', '>'] else: byte_orders = ['<', '='] for byteorder in byte_orders: dtype = np.dtype(T).newbyteorder(byteorder) x = (np.random.random(1000) * array_max).astype(dtype) if inplace: x.clip(clip_min, clip_max, x) else: x = x.clip(clip_min, clip_max) byteorder = '=' if x.dtype.byteorder == '|': byteorder = '|' assert_equal(x.dtype.byteorder, byteorder) self._check_range(x, expected_min, expected_max) return x def test_basic(self): for inplace in [False, True]: self._clip_type( 'float', 1024, -12.8, 100.2, inplace=inplace) self._clip_type( 'float', 1024, 0, 0, inplace=inplace) self._clip_type( 'int', 1024, -120, 100.5, inplace=inplace) self._clip_type( 'int', 1024, 0, 0, inplace=inplace) self._clip_type( 'uint', 1024, 0, 0, inplace=inplace) self._clip_type( 'uint', 1024, -120, 100, inplace=inplace, expected_min=0) def test_record_array(self): rec = np.array([(-5, 2.0, 3.0), (5.0, 4.0, 3.0)], dtype=[('x', '<f8'), ('y', '<f8'), ('z', '<f8')]) y = rec['x'].clip(-0.3, 0.5) self._check_range(y, -0.3, 0.5) def test_max_or_min(self): val = np.array([0, 1, 2, 3, 4, 5, 6, 7]) x = val.clip(3) assert_(np.all(x >= 3)) x = val.clip(min=3) assert_(np.all(x >= 3)) x = val.clip(max=4) assert_(np.all(x <= 4)) class TestPutmask(object): def tst_basic(self, x, T, mask, val): np.putmask(x, mask, val) assert_(np.all(x[mask] == T(val))) assert_(x.dtype == T) def test_ip_types(self): unchecked_types = [str, unicode, np.void, object] x = np.random.random(1000)*100 mask = x < 40 for val in [-100, 0, 15]: for types in np.sctypes.values(): for T in types: if T not in unchecked_types: yield self.tst_basic, x.copy().astype(T), T, mask, val def test_mask_size(self): assert_raises(ValueError, np.putmask, np.array([1, 2, 3]), [True], 5) def tst_byteorder(self, dtype): x = np.array([1, 2, 3], dtype) np.putmask(x, [True, False, True], -1) assert_array_equal(x, [-1, 2, -1]) def test_ip_byteorder(self): for dtype in ('>i4', '<i4'): yield self.tst_byteorder, dtype def test_record_array(self): # Note mixed byteorder. rec = np.array([(-5, 2.0, 3.0), (5.0, 4.0, 3.0)], dtype=[('x', '<f8'), ('y', '>f8'), ('z', '<f8')]) np.putmask(rec['x'], [True, False], 10) assert_array_equal(rec['x'], [10, 5]) assert_array_equal(rec['y'], [2, 4]) assert_array_equal(rec['z'], [3, 3]) np.putmask(rec['y'], [True, False], 11) assert_array_equal(rec['x'], [10, 5]) assert_array_equal(rec['y'], [11, 4]) assert_array_equal(rec['z'], [3, 3]) def test_masked_array(self): ## x = np.array([1,2,3]) ## z = np.ma.array(x,mask=[True,False,False]) ## np.putmask(z,[True,True,True],3) pass class TestTake(object): def tst_basic(self, x): ind = list(range(x.shape[0])) assert_array_equal(x.take(ind, axis=0), x) def test_ip_types(self): unchecked_types = [str, unicode, np.void, object] x = np.random.random(24)*100 x.shape = 2, 3, 4 for types in np.sctypes.values(): for T in types: if T not in unchecked_types: yield self.tst_basic, x.copy().astype(T) def test_raise(self): x = np.random.random(24)*100 x.shape = 2, 3, 4 assert_raises(IndexError, x.take, [0, 1, 2], axis=0) assert_raises(IndexError, x.take, [-3], axis=0) assert_array_equal(x.take([-1], axis=0)[0], x[1]) def test_clip(self): x = np.random.random(24)*100 x.shape = 2, 3, 4 assert_array_equal(x.take([-1], axis=0, mode='clip')[0], x[0]) assert_array_equal(x.take([2], axis=0, mode='clip')[0], x[1]) def test_wrap(self): x = np.random.random(24)*100 x.shape = 2, 3, 4 assert_array_equal(x.take([-1], axis=0, mode='wrap')[0], x[1]) assert_array_equal(x.take([2], axis=0, mode='wrap')[0], x[0]) assert_array_equal(x.take([3], axis=0, mode='wrap')[0], x[1]) def tst_byteorder(self, dtype): x = np.array([1, 2, 3], dtype) assert_array_equal(x.take([0, 2, 1]), [1, 3, 2]) def test_ip_byteorder(self): for dtype in ('>i4', '<i4'): yield self.tst_byteorder, dtype def test_record_array(self): # Note mixed byteorder. rec = np.array([(-5, 2.0, 3.0), (5.0, 4.0, 3.0)], dtype=[('x', '<f8'), ('y', '>f8'), ('z', '<f8')]) rec1 = rec.take([1]) assert_(rec1['x'] == 5.0 and rec1['y'] == 4.0) class TestLexsort(TestCase): def test_basic(self): a = [1, 2, 1, 3, 1, 5] b = [0, 4, 5, 6, 2, 3] idx = np.lexsort((b, a)) expected_idx = np.array([0, 4, 2, 1, 3, 5]) assert_array_equal(idx, expected_idx) x = np.vstack((b, a)) idx = np.lexsort(x) assert_array_equal(idx, expected_idx) assert_array_equal(x[1][idx], np.sort(x[1])) def test_datetime(self): a = np.array([0,0,0], dtype='datetime64[D]') b = np.array([2,1,0], dtype='datetime64[D]') idx = np.lexsort((b, a)) expected_idx = np.array([2, 1, 0]) assert_array_equal(idx, expected_idx) a = np.array([0,0,0], dtype='timedelta64[D]') b = np.array([2,1,0], dtype='timedelta64[D]') idx = np.lexsort((b, a)) expected_idx = np.array([2, 1, 0]) assert_array_equal(idx, expected_idx) class TestIO(object): """Test tofile, fromfile, tobytes, and fromstring""" def setUp(self): shape = (2, 4, 3) rand = np.random.random self.x = rand(shape) + rand(shape).astype(np.complex)*1j self.x[0,:, 1] = [np.nan, np.inf, -np.inf, np.nan] self.dtype = self.x.dtype self.tempdir = tempfile.mkdtemp() self.filename = tempfile.mktemp(dir=self.tempdir) def tearDown(self): shutil.rmtree(self.tempdir) def test_bool_fromstring(self): v = np.array([True, False, True, False], dtype=np.bool_) y = np.fromstring('1 0 -2.3 0.0', sep=' ', dtype=np.bool_) assert_array_equal(v, y) def test_uint64_fromstring(self): d = np.fromstring("9923372036854775807 104783749223640", dtype=np.uint64, sep=' ') e = np.array([9923372036854775807, 104783749223640], dtype=np.uint64) assert_array_equal(d, e) def test_int64_fromstring(self): d = np.fromstring("-25041670086757 104783749223640", dtype=np.int64, sep=' ') e = np.array([-25041670086757, 104783749223640], dtype=np.int64) assert_array_equal(d, e) def test_empty_files_binary(self): f = open(self.filename, 'w') f.close() y = np.fromfile(self.filename) assert_(y.size == 0, "Array not empty") def test_empty_files_text(self): f = open(self.filename, 'w') f.close() y = np.fromfile(self.filename, sep=" ") assert_(y.size == 0, "Array not empty") def test_roundtrip_file(self): f = open(self.filename, 'wb') self.x.tofile(f) f.close() # NB. doesn't work with flush+seek, due to use of C stdio f = open(self.filename, 'rb') y = np.fromfile(f, dtype=self.dtype) f.close() assert_array_equal(y, self.x.flat) def test_roundtrip_filename(self): self.x.tofile(self.filename) y = np.fromfile(self.filename, dtype=self.dtype) assert_array_equal(y, self.x.flat) def test_roundtrip_binary_str(self): s = self.x.tobytes() y = np.fromstring(s, dtype=self.dtype) assert_array_equal(y, self.x.flat) s = self.x.tobytes('F') y = np.fromstring(s, dtype=self.dtype) assert_array_equal(y, self.x.flatten('F')) def test_roundtrip_str(self): x = self.x.real.ravel() s = "@".join(map(str, x)) y = np.fromstring(s, sep="@") # NB. str imbues less precision nan_mask = ~np.isfinite(x) assert_array_equal(x[nan_mask], y[nan_mask]) assert_array_almost_equal(x[~nan_mask], y[~nan_mask], decimal=5) def test_roundtrip_repr(self): x = self.x.real.ravel() s = "@".join(map(repr, x)) y = np.fromstring(s, sep="@") assert_array_equal(x, y) def test_file_position_after_fromfile(self): # gh-4118 sizes = [io.DEFAULT_BUFFER_SIZE//8, io.DEFAULT_BUFFER_SIZE, io.DEFAULT_BUFFER_SIZE*8] for size in sizes: f = open(self.filename, 'wb') f.seek(size-1) f.write(b'\0') f.close() for mode in ['rb', 'r+b']: err_msg = "%d %s" % (size, mode) f = open(self.filename, mode) f.read(2) np.fromfile(f, dtype=np.float64, count=1) pos = f.tell() f.close() assert_equal(pos, 10, err_msg=err_msg) def test_file_position_after_tofile(self): # gh-4118 sizes = [io.DEFAULT_BUFFER_SIZE//8, io.DEFAULT_BUFFER_SIZE, io.DEFAULT_BUFFER_SIZE*8] for size in sizes: err_msg = "%d" % (size,) f = open(self.filename, 'wb') f.seek(size-1) f.write(b'\0') f.seek(10) f.write(b'12') np.array([0], dtype=np.float64).tofile(f) pos = f.tell() f.close() assert_equal(pos, 10 + 2 + 8, err_msg=err_msg) f = open(self.filename, 'r+b') f.read(2) f.seek(0, 1) # seek between read&write required by ANSI C np.array([0], dtype=np.float64).tofile(f) pos = f.tell() f.close() assert_equal(pos, 10, err_msg=err_msg) def _check_from(self, s, value, **kw): y = np.fromstring(asbytes(s), **kw) assert_array_equal(y, value) f = open(self.filename, 'wb') f.write(asbytes(s)) f.close() y = np.fromfile(self.filename, **kw) assert_array_equal(y, value) def test_nan(self): self._check_from( "nan +nan -nan NaN nan(foo) +NaN(BAR) -NAN(q_u_u_x_)", [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan], sep=' ') def test_inf(self): self._check_from( "inf +inf -inf infinity -Infinity iNfInItY -inF", [np.inf, np.inf, -np.inf, np.inf, -np.inf, np.inf, -np.inf], sep=' ') def test_numbers(self): self._check_from("1.234 -1.234 .3 .3e55 -123133.1231e+133", [1.234, -1.234, .3, .3e55, -123133.1231e+133], sep=' ') def test_binary(self): self._check_from('\x00\x00\x80?\x00\x00\x00@\x00\x00@@\x00\x00\x80@', np.array([1, 2, 3, 4]), dtype='<f4') @dec.slow # takes > 1 minute on mechanical hard drive def test_big_binary(self): """Test workarounds for 32-bit limited fwrite, fseek, and ftell calls in windows. These normally would hang doing something like this. See http://projects.scipy.org/numpy/ticket/1660""" if sys.platform != 'win32': return try: # before workarounds, only up to 2**32-1 worked fourgbplus = 2**32 + 2**16 testbytes = np.arange(8, dtype=np.int8) n = len(testbytes) flike = tempfile.NamedTemporaryFile() f = flike.file np.tile(testbytes, fourgbplus // testbytes.nbytes).tofile(f) flike.seek(0) a = np.fromfile(f, dtype=np.int8) flike.close() assert_(len(a) == fourgbplus) # check only start and end for speed: assert_((a[:n] == testbytes).all()) assert_((a[-n:] == testbytes).all()) except (MemoryError, ValueError): pass def test_string(self): self._check_from('1,2,3,4', [1., 2., 3., 4.], sep=',') def test_counted_string(self): self._check_from('1,2,3,4', [1., 2., 3., 4.], count=4, sep=',') self._check_from('1,2,3,4', [1., 2., 3.], count=3, sep=',') self._check_from('1,2,3,4', [1., 2., 3., 4.], count=-1, sep=',') def test_string_with_ws(self): self._check_from('1 2 3 4 ', [1, 2, 3, 4], dtype=int, sep=' ') def test_counted_string_with_ws(self): self._check_from('1 2 3 4 ', [1, 2, 3], count=3, dtype=int, sep=' ') def test_ascii(self): self._check_from('1 , 2 , 3 , 4', [1., 2., 3., 4.], sep=',') self._check_from('1,2,3,4', [1., 2., 3., 4.], dtype=float, sep=',') def test_malformed(self): self._check_from('1.234 1,234', [1.234, 1.], sep=' ') def test_long_sep(self): self._check_from('1_x_3_x_4_x_5', [1, 3, 4, 5], sep='_x_') def test_dtype(self): v = np.array([1, 2, 3, 4], dtype=np.int_) self._check_from('1,2,3,4', v, sep=',', dtype=np.int_) def test_dtype_bool(self): # can't use _check_from because fromstring can't handle True/False v = np.array([True, False, True, False], dtype=np.bool_) s = '1,0,-2.3,0' f = open(self.filename, 'wb') f.write(asbytes(s)) f.close() y = np.fromfile(self.filename, sep=',', dtype=np.bool_) assert_(y.dtype == '?') assert_array_equal(y, v) def test_tofile_sep(self): x = np.array([1.51, 2, 3.51, 4], dtype=float) f = open(self.filename, 'w') x.tofile(f, sep=',') f.close() f = open(self.filename, 'r') s = f.read() f.close() assert_equal(s, '1.51,2.0,3.51,4.0') def test_tofile_format(self): x = np.array([1.51, 2, 3.51, 4], dtype=float) f = open(self.filename, 'w') x.tofile(f, sep=',', format='%.2f') f.close() f = open(self.filename, 'r') s = f.read() f.close() assert_equal(s, '1.51,2.00,3.51,4.00') def test_locale(self): in_foreign_locale(self.test_numbers)() in_foreign_locale(self.test_nan)() in_foreign_locale(self.test_inf)() in_foreign_locale(self.test_counted_string)() in_foreign_locale(self.test_ascii)() in_foreign_locale(self.test_malformed)() in_foreign_locale(self.test_tofile_sep)() in_foreign_locale(self.test_tofile_format)() class TestFromBuffer(object): def tst_basic(self, buffer, expected, kwargs): assert_array_equal(np.frombuffer(buffer,**kwargs), expected) def test_ip_basic(self): for byteorder in ['<', '>']: for dtype in [float, int, np.complex]: dt = np.dtype(dtype).newbyteorder(byteorder) x = (np.random.random((4, 7))*5).astype(dt) buf = x.tobytes() yield self.tst_basic, buf, x.flat, {'dtype':dt} def test_empty(self): yield self.tst_basic, asbytes(''), np.array([]), {} class TestFlat(TestCase): def setUp(self): a0 = np.arange(20.0) a = a0.reshape(4, 5) a0.shape = (4, 5) a.flags.writeable = False self.a = a self.b = a[::2, ::2] self.a0 = a0 self.b0 = a0[::2, ::2] def test_contiguous(self): testpassed = False try: self.a.flat[12] = 100.0 except ValueError: testpassed = True assert testpassed assert self.a.flat[12] == 12.0 def test_discontiguous(self): testpassed = False try: self.b.flat[4] = 100.0 except ValueError: testpassed = True assert testpassed assert self.b.flat[4] == 12.0 def test___array__(self): c = self.a.flat.__array__() d = self.b.flat.__array__() e = self.a0.flat.__array__() f = self.b0.flat.__array__() assert c.flags.writeable is False assert d.flags.writeable is False assert e.flags.writeable is True assert f.flags.writeable is True assert c.flags.updateifcopy is False assert d.flags.updateifcopy is False assert e.flags.updateifcopy is False assert f.flags.updateifcopy is True assert f.base is self.b0 class TestResize(TestCase): def test_basic(self): x = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) x.resize((5, 5)) assert_array_equal(x.flat[:9], np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]).flat) assert_array_equal(x[9:].flat, 0) def test_check_reference(self): x = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) y = x self.assertRaises(ValueError, x.resize, (5, 1)) del y # avoid pyflakes unused variable warning. def test_int_shape(self): x = np.eye(3) x.resize(3) assert_array_equal(x, np.eye(3)[0,:]) def test_none_shape(self): x = np.eye(3) x.resize(None) assert_array_equal(x, np.eye(3)) x.resize() assert_array_equal(x, np.eye(3)) def test_invalid_arguements(self): self.assertRaises(TypeError, np.eye(3).resize, 'hi') self.assertRaises(ValueError, np.eye(3).resize, -1) self.assertRaises(TypeError, np.eye(3).resize, order=1) self.assertRaises(TypeError, np.eye(3).resize, refcheck='hi') def test_freeform_shape(self): x = np.eye(3) x.resize(3, 2, 1) assert_(x.shape == (3, 2, 1)) def test_zeros_appended(self): x = np.eye(3) x.resize(2, 3, 3) assert_array_equal(x[0], np.eye(3)) assert_array_equal(x[1], np.zeros((3, 3))) def test_obj_obj(self): # check memory is initialized on resize, gh-4857 a = np.ones(10, dtype=[('k', object, 2)]) a.resize(15,) assert_equal(a.shape, (15,)) assert_array_equal(a['k'][-5:], 0) assert_array_equal(a['k'][:-5], 1) class TestRecord(TestCase): def test_field_rename(self): dt = np.dtype([('f', float), ('i', int)]) dt.names = ['p', 'q'] assert_equal(dt.names, ['p', 'q']) if sys.version_info[0] >= 3: def test_bytes_fields(self): # Bytes are not allowed in field names and not recognized in titles # on Py3 assert_raises(TypeError, np.dtype, [(asbytes('a'), int)]) assert_raises(TypeError, np.dtype, [(('b', asbytes('a')), int)]) dt = np.dtype([((asbytes('a'), 'b'), int)]) assert_raises(ValueError, dt.__getitem__, asbytes('a')) x = np.array([(1,), (2,), (3,)], dtype=dt) assert_raises(IndexError, x.__getitem__, asbytes('a')) y = x[0] assert_raises(IndexError, y.__getitem__, asbytes('a')) else: def test_unicode_field_titles(self): # Unicode field titles are added to field dict on Py2 title = unicode('b') dt = np.dtype([((title, 'a'), int)]) dt[title] dt['a'] x = np.array([(1,), (2,), (3,)], dtype=dt) x[title] x['a'] y = x[0] y[title] y['a'] def test_unicode_field_names(self): # Unicode field names are not allowed on Py2 title = unicode('b') assert_raises(TypeError, np.dtype, [(title, int)]) assert_raises(TypeError, np.dtype, [(('a', title), int)]) def test_field_names(self): # Test unicode and 8-bit / byte strings can be used a = np.zeros((1,), dtype=[('f1', 'i4'), ('f2', 'i4'), ('f3', [('sf1', 'i4')])]) is_py3 = sys.version_info[0] >= 3 if is_py3: funcs = (str,) # byte string indexing fails gracefully assert_raises(IndexError, a.__setitem__, asbytes('f1'), 1) assert_raises(IndexError, a.__getitem__, asbytes('f1')) assert_raises(IndexError, a['f1'].__setitem__, asbytes('sf1'), 1) assert_raises(IndexError, a['f1'].__getitem__, asbytes('sf1')) else: funcs = (str, unicode) for func in funcs: b = a.copy() fn1 = func('f1') b[fn1] = 1 assert_equal(b[fn1], 1) fnn = func('not at all') assert_raises(ValueError, b.__setitem__, fnn, 1) assert_raises(ValueError, b.__getitem__, fnn) b[0][fn1] = 2 assert_equal(b[fn1], 2) # Subfield assert_raises(IndexError, b[0].__setitem__, fnn, 1) assert_raises(IndexError, b[0].__getitem__, fnn) # Subfield fn3 = func('f3') sfn1 = func('sf1') b[fn3][sfn1] = 1 assert_equal(b[fn3][sfn1], 1) assert_raises(ValueError, b[fn3].__setitem__, fnn, 1) assert_raises(ValueError, b[fn3].__getitem__, fnn) # multiple Subfields fn2 = func('f2') b[fn2] = 3 assert_equal(b[['f1', 'f2']][0].tolist(), (2, 3)) assert_equal(b[['f2', 'f1']][0].tolist(), (3, 2)) assert_equal(b[['f1', 'f3']][0].tolist(), (2, (1,))) # view of subfield view/copy assert_equal(b[['f1', 'f2']][0].view(('i4', 2)).tolist(), (2, 3)) assert_equal(b[['f2', 'f1']][0].view(('i4', 2)).tolist(), (3, 2)) view_dtype = [('f1', 'i4'), ('f3', [('', 'i4')])] assert_equal(b[['f1', 'f3']][0].view(view_dtype).tolist(), (2, (1,))) # non-ascii unicode field indexing is well behaved if not is_py3: raise SkipTest('non ascii unicode field indexing skipped; ' 'raises segfault on python 2.x') else: assert_raises(ValueError, a.__setitem__, sixu('\u03e0'), 1) assert_raises(ValueError, a.__getitem__, sixu('\u03e0')) def test_field_names_deprecation(self): def collect_warnings(f, *args, **kwargs): with warnings.catch_warnings(record=True) as log: warnings.simplefilter("always") f(*args, **kwargs) return [w.category for w in log] a = np.zeros((1,), dtype=[('f1', 'i4'), ('f2', 'i4'), ('f3', [('sf1', 'i4')])]) a['f1'][0] = 1 a['f2'][0] = 2 a['f3'][0] = (3,) b = np.zeros((1,), dtype=[('f1', 'i4'), ('f2', 'i4'), ('f3', [('sf1', 'i4')])]) b['f1'][0] = 1 b['f2'][0] = 2 b['f3'][0] = (3,) # All the different functions raise a warning, but not an error, and # 'a' is not modified: assert_equal(collect_warnings(a[['f1', 'f2']].__setitem__, 0, (10, 20)), [FutureWarning]) assert_equal(a, b) # Views also warn subset = a[['f1', 'f2']] subset_view = subset.view() assert_equal(collect_warnings(subset_view['f1'].__setitem__, 0, 10), [FutureWarning]) # But the write goes through: assert_equal(subset['f1'][0], 10) # Only one warning per multiple field indexing, though (even if there # are multiple views involved): assert_equal(collect_warnings(subset['f1'].__setitem__, 0, 10), []) def test_record_hash(self): a = np.array([(1, 2), (1, 2)], dtype='i1,i2') a.flags.writeable = False b = np.array([(1, 2), (3, 4)], dtype=[('num1', 'i1'), ('num2', 'i2')]) b.flags.writeable = False c = np.array([(1, 2), (3, 4)], dtype='i1,i2') c.flags.writeable = False self.assertTrue(hash(a[0]) == hash(a[1])) self.assertTrue(hash(a[0]) == hash(b[0])) self.assertTrue(hash(a[0]) != hash(b[1])) self.assertTrue(hash(c[0]) == hash(a[0]) and c[0] == a[0]) def test_record_no_hash(self): a = np.array([(1, 2), (1, 2)], dtype='i1,i2') self.assertRaises(TypeError, hash, a[0]) def test_empty_structure_creation(self): # make sure these do not raise errors (gh-5631) np.array([()], dtype={'names': [], 'formats': [], 'offsets': [], 'itemsize': 12}) np.array([(), (), (), (), ()], dtype={'names': [], 'formats': [], 'offsets': [], 'itemsize': 12}) class TestView(TestCase): def test_basic(self): x = np.array([(1, 2, 3, 4), (5, 6, 7, 8)], dtype=[('r', np.int8), ('g', np.int8), ('b', np.int8), ('a', np.int8)]) # We must be specific about the endianness here: y = x.view(dtype='<i4') # ... and again without the keyword. z = x.view('<i4') assert_array_equal(y, z) assert_array_equal(y, [67305985, 134678021]) def _mean(a, **args): return a.mean(**args) def _var(a, **args): return a.var(**args) def _std(a, **args): return a.std(**args) class TestStats(TestCase): funcs = [_mean, _var, _std] def setUp(self): np.random.seed(range(3)) self.rmat = np.random.random((4, 5)) self.cmat = self.rmat + 1j * self.rmat self.omat = np.array([Decimal(repr(r)) for r in self.rmat.flat]) self.omat = self.omat.reshape(4, 5) def test_keepdims(self): mat = np.eye(3) for f in self.funcs: for axis in [0, 1]: res = f(mat, axis=axis, keepdims=True) assert_(res.ndim == mat.ndim) assert_(res.shape[axis] == 1) for axis in [None]: res = f(mat, axis=axis, keepdims=True) assert_(res.shape == (1, 1)) def test_out(self): mat = np.eye(3) for f in self.funcs: out = np.zeros(3) tgt = f(mat, axis=1) res = f(mat, axis=1, out=out) assert_almost_equal(res, out) assert_almost_equal(res, tgt) out = np.empty(2) assert_raises(ValueError, f, mat, axis=1, out=out) out = np.empty((2, 2)) assert_raises(ValueError, f, mat, axis=1, out=out) def test_dtype_from_input(self): icodes = np.typecodes['AllInteger'] fcodes = np.typecodes['AllFloat'] # object type for f in self.funcs: mat = np.array([[Decimal(1)]*3]*3) tgt = mat.dtype.type res = f(mat, axis=1).dtype.type assert_(res is tgt) # scalar case res = type(f(mat, axis=None)) assert_(res is Decimal) # integer types for f in self.funcs: for c in icodes: mat = np.eye(3, dtype=c) tgt = np.float64 res = f(mat, axis=1).dtype.type assert_(res is tgt) # scalar case res = f(mat, axis=None).dtype.type assert_(res is tgt) # mean for float types for f in [_mean]: for c in fcodes: mat = np.eye(3, dtype=c) tgt = mat.dtype.type res = f(mat, axis=1).dtype.type assert_(res is tgt) # scalar case res = f(mat, axis=None).dtype.type assert_(res is tgt) # var, std for float types for f in [_var, _std]: for c in fcodes: mat = np.eye(3, dtype=c) # deal with complex types tgt = mat.real.dtype.type res = f(mat, axis=1).dtype.type assert_(res is tgt) # scalar case res = f(mat, axis=None).dtype.type assert_(res is tgt) def test_dtype_from_dtype(self): mat = np.eye(3) # stats for integer types # FIXME: # this needs definition as there are lots places along the line # where type casting may take place. #for f in self.funcs: # for c in np.typecodes['AllInteger']: # tgt = np.dtype(c).type # res = f(mat, axis=1, dtype=c).dtype.type # assert_(res is tgt) # # scalar case # res = f(mat, axis=None, dtype=c).dtype.type # assert_(res is tgt) # stats for float types for f in self.funcs: for c in np.typecodes['AllFloat']: tgt = np.dtype(c).type res = f(mat, axis=1, dtype=c).dtype.type assert_(res is tgt) # scalar case res = f(mat, axis=None, dtype=c).dtype.type assert_(res is tgt) def test_ddof(self): for f in [_var]: for ddof in range(3): dim = self.rmat.shape[1] tgt = f(self.rmat, axis=1) * dim res = f(self.rmat, axis=1, ddof=ddof) * (dim - ddof) for f in [_std]: for ddof in range(3): dim = self.rmat.shape[1] tgt = f(self.rmat, axis=1) * np.sqrt(dim) res = f(self.rmat, axis=1, ddof=ddof) * np.sqrt(dim - ddof) assert_almost_equal(res, tgt) assert_almost_equal(res, tgt) def test_ddof_too_big(self): dim = self.rmat.shape[1] for f in [_var, _std]: for ddof in range(dim, dim + 2): with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') res = f(self.rmat, axis=1, ddof=ddof) assert_(not (res < 0).any()) assert_(len(w) > 0) assert_(issubclass(w[0].category, RuntimeWarning)) def test_empty(self): A = np.zeros((0, 3)) for f in self.funcs: for axis in [0, None]: with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') assert_(np.isnan(f(A, axis=axis)).all()) assert_(len(w) > 0) assert_(issubclass(w[0].category, RuntimeWarning)) for axis in [1]: with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') assert_equal(f(A, axis=axis), np.zeros([])) def test_mean_values(self): for mat in [self.rmat, self.cmat, self.omat]: for axis in [0, 1]: tgt = mat.sum(axis=axis) res = _mean(mat, axis=axis) * mat.shape[axis] assert_almost_equal(res, tgt) for axis in [None]: tgt = mat.sum(axis=axis) res = _mean(mat, axis=axis) * np.prod(mat.shape) assert_almost_equal(res, tgt) def test_var_values(self): for mat in [self.rmat, self.cmat, self.omat]: for axis in [0, 1, None]: msqr = _mean(mat * mat.conj(), axis=axis) mean = _mean(mat, axis=axis) tgt = msqr - mean * mean.conjugate() res = _var(mat, axis=axis) assert_almost_equal(res, tgt) def test_std_values(self): for mat in [self.rmat, self.cmat, self.omat]: for axis in [0, 1, None]: tgt = np.sqrt(_var(mat, axis=axis)) res = _std(mat, axis=axis) assert_almost_equal(res, tgt) def test_subclass(self): class TestArray(np.ndarray): def __new__(cls, data, info): result = np.array(data) result = result.view(cls) result.info = info return result def __array_finalize__(self, obj): self.info = getattr(obj, "info", '') dat = TestArray([[1, 2, 3, 4], [5, 6, 7, 8]], 'jubba') res = dat.mean(1) assert_(res.info == dat.info) res = dat.std(1) assert_(res.info == dat.info) res = dat.var(1) assert_(res.info == dat.info) class TestVdot(TestCase): def test_basic(self): dt_numeric = np.typecodes['AllFloat'] + np.typecodes['AllInteger'] dt_complex = np.typecodes['Complex'] # test real a = np.eye(3) for dt in dt_numeric + 'O': b = a.astype(dt) res = np.vdot(b, b) assert_(np.isscalar(res)) assert_equal(np.vdot(b, b), 3) # test complex a = np.eye(3) * 1j for dt in dt_complex + 'O': b = a.astype(dt) res = np.vdot(b, b) assert_(np.isscalar(res)) assert_equal(np.vdot(b, b), 3) # test boolean b = np.eye(3, dtype=np.bool) res = np.vdot(b, b) assert_(np.isscalar(res)) assert_equal(np.vdot(b, b), True) def test_vdot_array_order(self): a = np.array([[1, 2], [3, 4]], order='C') b = np.array([[1, 2], [3, 4]], order='F') res = np.vdot(a, a) # integer arrays are exact assert_equal(np.vdot(a, b), res) assert_equal(np.vdot(b, a), res) assert_equal(np.vdot(b, b), res) class TestDot(TestCase): def setUp(self): np.random.seed(128) self.A = np.random.rand(4, 2) self.b1 = np.random.rand(2, 1) self.b2 = np.random.rand(2) self.b3 = np.random.rand(1, 2) self.b4 = np.random.rand(4) self.N = 7 def test_dotmatmat(self): A = self.A res = np.dot(A.transpose(), A) tgt = np.array([[1.45046013, 0.86323640], [0.86323640, 0.84934569]]) assert_almost_equal(res, tgt, decimal=self.N) def test_dotmatvec(self): A, b1 = self.A, self.b1 res = np.dot(A, b1) tgt = np.array([[0.32114320], [0.04889721], [0.15696029], [0.33612621]]) assert_almost_equal(res, tgt, decimal=self.N) def test_dotmatvec2(self): A, b2 = self.A, self.b2 res = np.dot(A, b2) tgt = np.array([0.29677940, 0.04518649, 0.14468333, 0.31039293]) assert_almost_equal(res, tgt, decimal=self.N) def test_dotvecmat(self): A, b4 = self.A, self.b4 res = np.dot(b4, A) tgt = np.array([1.23495091, 1.12222648]) assert_almost_equal(res, tgt, decimal=self.N) def test_dotvecmat2(self): b3, A = self.b3, self.A res = np.dot(b3, A.transpose()) tgt = np.array([[0.58793804, 0.08957460, 0.30605758, 0.62716383]]) assert_almost_equal(res, tgt, decimal=self.N) def test_dotvecmat3(self): A, b4 = self.A, self.b4 res = np.dot(A.transpose(), b4) tgt = np.array([1.23495091, 1.12222648]) assert_almost_equal(res, tgt, decimal=self.N) def test_dotvecvecouter(self): b1, b3 = self.b1, self.b3 res = np.dot(b1, b3) tgt = np.array([[0.20128610, 0.08400440], [0.07190947, 0.03001058]]) assert_almost_equal(res, tgt, decimal=self.N) def test_dotvecvecinner(self): b1, b3 = self.b1, self.b3 res = np.dot(b3, b1) tgt = np.array([[ 0.23129668]]) assert_almost_equal(res, tgt, decimal=self.N) def test_dotcolumnvect1(self): b1 = np.ones((3, 1)) b2 = [5.3] res = np.dot(b1, b2) tgt = np.array([5.3, 5.3, 5.3]) assert_almost_equal(res, tgt, decimal=self.N) def test_dotcolumnvect2(self): b1 = np.ones((3, 1)).transpose() b2 = [6.2] res = np.dot(b2, b1) tgt = np.array([6.2, 6.2, 6.2]) assert_almost_equal(res, tgt, decimal=self.N) def test_dotvecscalar(self): np.random.seed(100) b1 = np.random.rand(1, 1) b2 = np.random.rand(1, 4) res = np.dot(b1, b2) tgt = np.array([[0.15126730, 0.23068496, 0.45905553, 0.00256425]]) assert_almost_equal(res, tgt, decimal=self.N) def test_dotvecscalar2(self): np.random.seed(100) b1 = np.random.rand(4, 1) b2 = np.random.rand(1, 1) res = np.dot(b1, b2) tgt = np.array([[0.00256425],[0.00131359],[0.00200324],[ 0.00398638]]) assert_almost_equal(res, tgt, decimal=self.N) def test_all(self): dims = [(), (1,), (1, 1)] dout = [(), (1,), (1, 1), (1,), (), (1,), (1, 1), (1,), (1, 1)] for dim, (dim1, dim2) in zip(dout, itertools.product(dims, dims)): b1 = np.zeros(dim1) b2 = np.zeros(dim2) res = np.dot(b1, b2) tgt = np.zeros(dim) assert_(res.shape == tgt.shape) assert_almost_equal(res, tgt, decimal=self.N) def test_vecobject(self): class Vec(object): def __init__(self, sequence=None): if sequence is None: sequence = [] self.array = np.array(sequence) def __add__(self, other): out = Vec() out.array = self.array + other.array return out def __sub__(self, other): out = Vec() out.array = self.array - other.array return out def __mul__(self, other): # with scalar out = Vec(self.array.copy()) out.array *= other return out def __rmul__(self, other): return self*other U_non_cont = np.transpose([[1., 1.], [1., 2.]]) U_cont = np.ascontiguousarray(U_non_cont) x = np.array([Vec([1., 0.]), Vec([0., 1.])]) zeros = np.array([Vec([0., 0.]), Vec([0., 0.])]) zeros_test = np.dot(U_cont, x) - np.dot(U_non_cont, x) assert_equal(zeros[0].array, zeros_test[0].array) assert_equal(zeros[1].array, zeros_test[1].array) def test_dot_2args(self): from numpy.core.multiarray import dot a = np.array([[1, 2], [3, 4]], dtype=float) b = np.array([[1, 0], [1, 1]], dtype=float) c = np.array([[3, 2], [7, 4]], dtype=float) d = dot(a, b) assert_allclose(c, d) def test_dot_3args(self): from numpy.core.multiarray import dot np.random.seed(22) f = np.random.random_sample((1024, 16)) v = np.random.random_sample((16, 32)) r = np.empty((1024, 32)) for i in range(12): dot(f, v, r) assert_equal(sys.getrefcount(r), 2) r2 = dot(f, v, out=None) assert_array_equal(r2, r) assert_(r is dot(f, v, out=r)) v = v[:, 0].copy() # v.shape == (16,) r = r[:, 0].copy() # r.shape == (1024,) r2 = dot(f, v) assert_(r is dot(f, v, r)) assert_array_equal(r2, r) def test_dot_3args_errors(self): from numpy.core.multiarray import dot np.random.seed(22) f = np.random.random_sample((1024, 16)) v = np.random.random_sample((16, 32)) r = np.empty((1024, 31)) assert_raises(ValueError, dot, f, v, r) r = np.empty((1024,)) assert_raises(ValueError, dot, f, v, r) r = np.empty((32,)) assert_raises(ValueError, dot, f, v, r) r = np.empty((32, 1024)) assert_raises(ValueError, dot, f, v, r) assert_raises(ValueError, dot, f, v, r.T) r = np.empty((1024, 64)) assert_raises(ValueError, dot, f, v, r[:, ::2]) assert_raises(ValueError, dot, f, v, r[:, :32]) r = np.empty((1024, 32), dtype=np.float32) assert_raises(ValueError, dot, f, v, r) r = np.empty((1024, 32), dtype=int) assert_raises(ValueError, dot, f, v, r) def test_dot_array_order(self): a = np.array([[1, 2], [3, 4]], order='C') b = np.array([[1, 2], [3, 4]], order='F') res = np.dot(a, a) # integer arrays are exact assert_equal(np.dot(a, b), res) assert_equal(np.dot(b, a), res) assert_equal(np.dot(b, b), res) def test_dot_scalar_and_matrix_of_objects(self): # Ticket #2469 arr = np.matrix([1, 2], dtype=object) desired = np.matrix([[3, 6]], dtype=object) assert_equal(np.dot(arr, 3), desired) assert_equal(np.dot(3, arr), desired) def test_dot_override(self): class A(object): def __numpy_ufunc__(self, ufunc, method, pos, inputs, **kwargs): return "A" class B(object): def __numpy_ufunc__(self, ufunc, method, pos, inputs, **kwargs): return NotImplemented a = A() b = B() c = np.array([[1]]) assert_equal(np.dot(a, b), "A") assert_equal(c.dot(a), "A") assert_raises(TypeError, np.dot, b, c) assert_raises(TypeError, c.dot, b) def test_accelerate_framework_sgemv_fix(self): def aligned_array(shape, align, dtype, order='C'): d = dtype(0) N = np.prod(shape) tmp = np.zeros(N * d.nbytes + align, dtype=np.uint8) address = tmp.__array_interface__["data"][0] for offset in range(align): if (address + offset) % align == 0: break tmp = tmp[offset:offset+N*d.nbytes].view(dtype=dtype) return tmp.reshape(shape, order=order) def as_aligned(arr, align, dtype, order='C'): aligned = aligned_array(arr.shape, align, dtype, order) aligned[:] = arr[:] return aligned def assert_dot_close(A, X, desired): assert_allclose(np.dot(A, X), desired, rtol=1e-5, atol=1e-7) m = aligned_array(100, 15, np.float32) s = aligned_array((100, 100), 15, np.float32) np.dot(s, m) # this will always segfault if the bug is present testdata = itertools.product((15,32), (10000,), (200,89), ('C','F')) for align, m, n, a_order in testdata: # Calculation in double precision A_d = np.random.rand(m, n) X_d = np.random.rand(n) desired = np.dot(A_d, X_d) # Calculation with aligned single precision A_f = as_aligned(A_d, align, np.float32, order=a_order) X_f = as_aligned(X_d, align, np.float32) assert_dot_close(A_f, X_f, desired) # Strided A rows A_d_2 = A_d[::2] desired = np.dot(A_d_2, X_d) A_f_2 = A_f[::2] assert_dot_close(A_f_2, X_f, desired) # Strided A columns, strided X vector A_d_22 = A_d_2[:, ::2] X_d_2 = X_d[::2] desired = np.dot(A_d_22, X_d_2) A_f_22 = A_f_2[:, ::2] X_f_2 = X_f[::2] assert_dot_close(A_f_22, X_f_2, desired) # Check the strides are as expected if a_order == 'F': assert_equal(A_f_22.strides, (8, 8 * m)) else: assert_equal(A_f_22.strides, (8 * n, 8)) assert_equal(X_f_2.strides, (8,)) # Strides in A rows + cols only X_f_2c = as_aligned(X_f_2, align, np.float32) assert_dot_close(A_f_22, X_f_2c, desired) # Strides just in A cols A_d_12 = A_d[:, ::2] desired = np.dot(A_d_12, X_d_2) A_f_12 = A_f[:, ::2] assert_dot_close(A_f_12, X_f_2c, desired) # Strides in A cols and X assert_dot_close(A_f_12, X_f_2, desired) class MatmulCommon(): """Common tests for '@' operator and numpy.matmul. Do not derive from TestCase to avoid nose running it. """ # Should work with these types. Will want to add # "O" at some point types = "?bhilqBHILQefdgFDG" def test_exceptions(self): dims = [ ((1,), (2,)), # mismatched vector vector ((2, 1,), (2,)), # mismatched matrix vector ((2,), (1, 2)), # mismatched vector matrix ((1, 2), (3, 1)), # mismatched matrix matrix ((1,), ()), # vector scalar ((), (1)), # scalar vector ((1, 1), ()), # matrix scalar ((), (1, 1)), # scalar matrix ((2, 2, 1), (3, 1, 2)), # cannot broadcast ] for dt, (dm1, dm2) in itertools.product(self.types, dims): a = np.ones(dm1, dtype=dt) b = np.ones(dm2, dtype=dt) assert_raises(ValueError, self.matmul, a, b) def test_shapes(self): dims = [ ((1, 1), (2, 1, 1)), # broadcast first argument ((2, 1, 1), (1, 1)), # broadcast second argument ((2, 1, 1), (2, 1, 1)), # matrix stack sizes match ] for dt, (dm1, dm2) in itertools.product(self.types, dims): a = np.ones(dm1, dtype=dt) b = np.ones(dm2, dtype=dt) res = self.matmul(a, b) assert_(res.shape == (2, 1, 1)) # vector vector returns scalars. for dt in self.types: a = np.ones((2,), dtype=dt) b = np.ones((2,), dtype=dt) c = self.matmul(a, b) assert_(np.array(c).shape == ()) def test_result_types(self): mat = np.ones((1,1)) vec = np.ones((1,)) for dt in self.types: m = mat.astype(dt) v = vec.astype(dt) for arg in [(m, v), (v, m), (m, m)]: res = self.matmul(*arg) assert_(res.dtype == dt) # vector vector returns scalars res = self.matmul(v, v) assert_(type(res) is np.dtype(dt).type) def test_vector_vector_values(self): vec = np.array([1, 2]) tgt = 5 for dt in self.types[1:]: v1 = vec.astype(dt) res = self.matmul(v1, v1) assert_equal(res, tgt) # boolean type vec = np.array([True, True], dtype='?') res = self.matmul(vec, vec) assert_equal(res, True) def test_vector_matrix_values(self): vec = np.array([1, 2]) mat1 = np.array([[1, 2], [3, 4]]) mat2 = np.stack([mat1]*2, axis=0) tgt1 = np.array([7, 10]) tgt2 = np.stack([tgt1]*2, axis=0) for dt in self.types[1:]: v = vec.astype(dt) m1 = mat1.astype(dt) m2 = mat2.astype(dt) res = self.matmul(v, m1) assert_equal(res, tgt1) res = self.matmul(v, m2) assert_equal(res, tgt2) # boolean type vec = np.array([True, False]) mat1 = np.array([[True, False], [False, True]]) mat2 = np.stack([mat1]*2, axis=0) tgt1 = np.array([True, False]) tgt2 = np.stack([tgt1]*2, axis=0) res = self.matmul(vec, mat1) assert_equal(res, tgt1) res = self.matmul(vec, mat2) assert_equal(res, tgt2) def test_matrix_vector_values(self): vec = np.array([1, 2]) mat1 = np.array([[1, 2], [3, 4]]) mat2 = np.stack([mat1]*2, axis=0) tgt1 = np.array([5, 11]) tgt2 = np.stack([tgt1]*2, axis=0) for dt in self.types[1:]: v = vec.astype(dt) m1 = mat1.astype(dt) m2 = mat2.astype(dt) res = self.matmul(m1, v) assert_equal(res, tgt1) res = self.matmul(m2, v) assert_equal(res, tgt2) # boolean type vec = np.array([True, False]) mat1 = np.array([[True, False], [False, True]]) mat2 = np.stack([mat1]*2, axis=0) tgt1 = np.array([True, False]) tgt2 = np.stack([tgt1]*2, axis=0) res = self.matmul(vec, mat1) assert_equal(res, tgt1) res = self.matmul(vec, mat2) assert_equal(res, tgt2) def test_matrix_matrix_values(self): mat1 = np.array([[1, 2], [3, 4]]) mat2 = np.array([[1, 0], [1, 1]]) mat12 = np.stack([mat1, mat2], axis=0) mat21 = np.stack([mat2, mat1], axis=0) tgt11 = np.array([[7, 10], [15, 22]]) tgt12 = np.array([[3, 2], [7, 4]]) tgt21 = np.array([[1, 2], [4, 6]]) tgt12_21 = np.stack([tgt12, tgt21], axis=0) tgt11_12 = np.stack((tgt11, tgt12), axis=0) tgt11_21 = np.stack((tgt11, tgt21), axis=0) for dt in self.types[1:]: m1 = mat1.astype(dt) m2 = mat2.astype(dt) m12 = mat12.astype(dt) m21 = mat21.astype(dt) # matrix @ matrix res = self.matmul(m1, m2) assert_equal(res, tgt12) res = self.matmul(m2, m1) assert_equal(res, tgt21) # stacked @ matrix res = self.matmul(m12, m1) assert_equal(res, tgt11_21) # matrix @ stacked res = self.matmul(m1, m12) assert_equal(res, tgt11_12) # stacked @ stacked res = self.matmul(m12, m21) assert_equal(res, tgt12_21) # boolean type m1 = np.array([[1, 1], [0, 0]], dtype=np.bool_) m2 = np.array([[1, 0], [1, 1]], dtype=np.bool_) m12 = np.stack([m1, m2], axis=0) m21 = np.stack([m2, m1], axis=0) tgt11 = m1 tgt12 = m1 tgt21 = np.array([[1, 1], [1, 1]], dtype=np.bool_) tgt12_21 = np.stack([tgt12, tgt21], axis=0) tgt11_12 = np.stack((tgt11, tgt12), axis=0) tgt11_21 = np.stack((tgt11, tgt21), axis=0) # matrix @ matrix res = self.matmul(m1, m2) assert_equal(res, tgt12) res = self.matmul(m2, m1) assert_equal(res, tgt21) # stacked @ matrix res = self.matmul(m12, m1) assert_equal(res, tgt11_21) # matrix @ stacked res = self.matmul(m1, m12) assert_equal(res, tgt11_12) # stacked @ stacked res = self.matmul(m12, m21) assert_equal(res, tgt12_21) def test_numpy_ufunc_override(self): class A(np.ndarray): def __new__(cls, *args, **kwargs): return np.array(*args, **kwargs).view(cls) def __numpy_ufunc__(self, ufunc, method, pos, inputs, **kwargs): return "A" class B(np.ndarray): def __new__(cls, *args, **kwargs): return np.array(*args, **kwargs).view(cls) def __numpy_ufunc__(self, ufunc, method, pos, inputs, **kwargs): return NotImplemented a = A([1, 2]) b = B([1, 2]) c = np.ones(2) assert_equal(self.matmul(a, b), "A") assert_equal(self.matmul(b, a), "A") assert_raises(TypeError, self.matmul, b, c) class TestMatmul(MatmulCommon, TestCase): matmul = np.matmul def test_out_arg(self): a = np.ones((2, 2), dtype=np.float) b = np.ones((2, 2), dtype=np.float) tgt = np.full((2,2), 2, dtype=np.float) # test as positional argument msg = "out positional argument" out = np.zeros((2, 2), dtype=np.float) self.matmul(a, b, out) assert_array_equal(out, tgt, err_msg=msg) # test as keyword argument msg = "out keyword argument" out = np.zeros((2, 2), dtype=np.float) self.matmul(a, b, out=out) assert_array_equal(out, tgt, err_msg=msg) # test out with not allowed type cast (safe casting) # einsum and cblas raise different error types, so # use Exception. msg = "out argument with illegal cast" out = np.zeros((2, 2), dtype=np.int32) assert_raises(Exception, self.matmul, a, b, out=out) # skip following tests for now, cblas does not allow non-contiguous # outputs and consistency with dot would require same type, # dimensions, subtype, and c_contiguous. # test out with allowed type cast # msg = "out argument with allowed cast" # out = np.zeros((2, 2), dtype=np.complex128) # self.matmul(a, b, out=out) # assert_array_equal(out, tgt, err_msg=msg) # test out non-contiguous # msg = "out argument with non-contiguous layout" # c = np.zeros((2, 2, 2), dtype=np.float) # self.matmul(a, b, out=c[..., 0]) # assert_array_equal(c, tgt, err_msg=msg) if sys.version_info[:2] >= (3, 5): class TestMatmulOperator(MatmulCommon, TestCase): import operator matmul = operator.matmul def test_array_priority_override(self): class A(object): __array_priority__ = 1000 def __matmul__(self, other): return "A" def __rmatmul__(self, other): return "A" a = A() b = np.ones(2) assert_equal(self.matmul(a, b), "A") assert_equal(self.matmul(b, a), "A") def test_matmul_inplace(): # It would be nice to support in-place matmul eventually, but for now # we don't have a working implementation, so better just to error out # and nudge people to writing "a = a @ b". a = np.eye(3) b = np.eye(3) assert_raises(TypeError, a.__imatmul__, b) import operator assert_raises(TypeError, operator.imatmul, a, b) # we avoid writing the token `exec` so as not to crash python 2's # parser exec_ = getattr(builtins, "exec") assert_raises(TypeError, exec_, "a @= b", globals(), locals()) class TestInner(TestCase): def test_inner_scalar_and_matrix_of_objects(self): # Ticket #4482 arr = np.matrix([1, 2], dtype=object) desired = np.matrix([[3, 6]], dtype=object) assert_equal(np.inner(arr, 3), desired) assert_equal(np.inner(3, arr), desired) def test_vecself(self): # Ticket 844. # Inner product of a vector with itself segfaults or give # meaningless result a = np.zeros(shape=(1, 80), dtype=np.float64) p = np.inner(a, a) assert_almost_equal(p, 0, decimal=14) class TestSummarization(TestCase): def test_1d(self): A = np.arange(1001) strA = '[ 0 1 2 ..., 998 999 1000]' assert_(str(A) == strA) reprA = 'array([ 0, 1, 2, ..., 998, 999, 1000])' assert_(repr(A) == reprA) def test_2d(self): A = np.arange(1002).reshape(2, 501) strA = '[[ 0 1 2 ..., 498 499 500]\n' \ ' [ 501 502 503 ..., 999 1000 1001]]' assert_(str(A) == strA) reprA = 'array([[ 0, 1, 2, ..., 498, 499, 500],\n' \ ' [ 501, 502, 503, ..., 999, 1000, 1001]])' assert_(repr(A) == reprA) class TestChoose(TestCase): def setUp(self): self.x = 2*np.ones((3,), dtype=int) self.y = 3*np.ones((3,), dtype=int) self.x2 = 2*np.ones((2, 3), dtype=int) self.y2 = 3*np.ones((2, 3), dtype=int) self.ind = [0, 0, 1] def test_basic(self): A = np.choose(self.ind, (self.x, self.y)) assert_equal(A, [2, 2, 3]) def test_broadcast1(self): A = np.choose(self.ind, (self.x2, self.y2)) assert_equal(A, [[2, 2, 3], [2, 2, 3]]) def test_broadcast2(self): A = np.choose(self.ind, (self.x, self.y2)) assert_equal(A, [[2, 2, 3], [2, 2, 3]]) # TODO: test for multidimensional NEIGH_MODE = {'zero': 0, 'one': 1, 'constant': 2, 'circular': 3, 'mirror': 4} class TestNeighborhoodIter(TestCase): # Simple, 2d tests def _test_simple2d(self, dt): # Test zero and one padding for simple data type x = np.array([[0, 1], [2, 3]], dtype=dt) r = [np.array([[0, 0, 0], [0, 0, 1]], dtype=dt), np.array([[0, 0, 0], [0, 1, 0]], dtype=dt), np.array([[0, 0, 1], [0, 2, 3]], dtype=dt), np.array([[0, 1, 0], [2, 3, 0]], dtype=dt)] l = test_neighborhood_iterator(x, [-1, 0, -1, 1], x[0], NEIGH_MODE['zero']) assert_array_equal(l, r) r = [np.array([[1, 1, 1], [1, 0, 1]], dtype=dt), np.array([[1, 1, 1], [0, 1, 1]], dtype=dt), np.array([[1, 0, 1], [1, 2, 3]], dtype=dt), np.array([[0, 1, 1], [2, 3, 1]], dtype=dt)] l = test_neighborhood_iterator(x, [-1, 0, -1, 1], x[0], NEIGH_MODE['one']) assert_array_equal(l, r) r = [np.array([[4, 4, 4], [4, 0, 1]], dtype=dt), np.array([[4, 4, 4], [0, 1, 4]], dtype=dt), np.array([[4, 0, 1], [4, 2, 3]], dtype=dt), np.array([[0, 1, 4], [2, 3, 4]], dtype=dt)] l = test_neighborhood_iterator(x, [-1, 0, -1, 1], 4, NEIGH_MODE['constant']) assert_array_equal(l, r) def test_simple2d(self): self._test_simple2d(np.float) def test_simple2d_object(self): self._test_simple2d(Decimal) def _test_mirror2d(self, dt): x = np.array([[0, 1], [2, 3]], dtype=dt) r = [np.array([[0, 0, 1], [0, 0, 1]], dtype=dt), np.array([[0, 1, 1], [0, 1, 1]], dtype=dt), np.array([[0, 0, 1], [2, 2, 3]], dtype=dt), np.array([[0, 1, 1], [2, 3, 3]], dtype=dt)] l = test_neighborhood_iterator(x, [-1, 0, -1, 1], x[0], NEIGH_MODE['mirror']) assert_array_equal(l, r) def test_mirror2d(self): self._test_mirror2d(np.float) def test_mirror2d_object(self): self._test_mirror2d(Decimal) # Simple, 1d tests def _test_simple(self, dt): # Test padding with constant values x = np.linspace(1, 5, 5).astype(dt) r = [[0, 1, 2], [1, 2, 3], [2, 3, 4], [3, 4, 5], [4, 5, 0]] l = test_neighborhood_iterator(x, [-1, 1], x[0], NEIGH_MODE['zero']) assert_array_equal(l, r) r = [[1, 1, 2], [1, 2, 3], [2, 3, 4], [3, 4, 5], [4, 5, 1]] l = test_neighborhood_iterator(x, [-1, 1], x[0], NEIGH_MODE['one']) assert_array_equal(l, r) r = [[x[4], 1, 2], [1, 2, 3], [2, 3, 4], [3, 4, 5], [4, 5, x[4]]] l = test_neighborhood_iterator(x, [-1, 1], x[4], NEIGH_MODE['constant']) assert_array_equal(l, r) def test_simple_float(self): self._test_simple(np.float) def test_simple_object(self): self._test_simple(Decimal) # Test mirror modes def _test_mirror(self, dt): x = np.linspace(1, 5, 5).astype(dt) r = np.array([[2, 1, 1, 2, 3], [1, 1, 2, 3, 4], [1, 2, 3, 4, 5], [2, 3, 4, 5, 5], [3, 4, 5, 5, 4]], dtype=dt) l = test_neighborhood_iterator(x, [-2, 2], x[1], NEIGH_MODE['mirror']) self.assertTrue([i.dtype == dt for i in l]) assert_array_equal(l, r) def test_mirror(self): self._test_mirror(np.float) def test_mirror_object(self): self._test_mirror(Decimal) # Circular mode def _test_circular(self, dt): x = np.linspace(1, 5, 5).astype(dt) r = np.array([[4, 5, 1, 2, 3], [5, 1, 2, 3, 4], [1, 2, 3, 4, 5], [2, 3, 4, 5, 1], [3, 4, 5, 1, 2]], dtype=dt) l = test_neighborhood_iterator(x, [-2, 2], x[0], NEIGH_MODE['circular']) assert_array_equal(l, r) def test_circular(self): self._test_circular(np.float) def test_circular_object(self): self._test_circular(Decimal) # Test stacking neighborhood iterators class TestStackedNeighborhoodIter(TestCase): # Simple, 1d test: stacking 2 constant-padded neigh iterators def test_simple_const(self): dt = np.float64 # Test zero and one padding for simple data type x = np.array([1, 2, 3], dtype=dt) r = [np.array([0], dtype=dt), np.array([0], dtype=dt), np.array([1], dtype=dt), np.array([2], dtype=dt), np.array([3], dtype=dt), np.array([0], dtype=dt), np.array([0], dtype=dt)] l = test_neighborhood_iterator_oob(x, [-2, 4], NEIGH_MODE['zero'], [0, 0], NEIGH_MODE['zero']) assert_array_equal(l, r) r = [np.array([1, 0, 1], dtype=dt), np.array([0, 1, 2], dtype=dt), np.array([1, 2, 3], dtype=dt), np.array([2, 3, 0], dtype=dt), np.array([3, 0, 1], dtype=dt)] l = test_neighborhood_iterator_oob(x, [-1, 3], NEIGH_MODE['zero'], [-1, 1], NEIGH_MODE['one']) assert_array_equal(l, r) # 2nd simple, 1d test: stacking 2 neigh iterators, mixing const padding and # mirror padding def test_simple_mirror(self): dt = np.float64 # Stacking zero on top of mirror x = np.array([1, 2, 3], dtype=dt) r = [np.array([0, 1, 1], dtype=dt), np.array([1, 1, 2], dtype=dt), np.array([1, 2, 3], dtype=dt), np.array([2, 3, 3], dtype=dt), np.array([3, 3, 0], dtype=dt)] l = test_neighborhood_iterator_oob(x, [-1, 3], NEIGH_MODE['mirror'], [-1, 1], NEIGH_MODE['zero']) assert_array_equal(l, r) # Stacking mirror on top of zero x = np.array([1, 2, 3], dtype=dt) r = [np.array([1, 0, 0], dtype=dt), np.array([0, 0, 1], dtype=dt), np.array([0, 1, 2], dtype=dt), np.array([1, 2, 3], dtype=dt), np.array([2, 3, 0], dtype=dt)] l = test_neighborhood_iterator_oob(x, [-1, 3], NEIGH_MODE['zero'], [-2, 0], NEIGH_MODE['mirror']) assert_array_equal(l, r) # Stacking mirror on top of zero: 2nd x = np.array([1, 2, 3], dtype=dt) r = [np.array([0, 1, 2], dtype=dt), np.array([1, 2, 3], dtype=dt), np.array([2, 3, 0], dtype=dt), np.array([3, 0, 0], dtype=dt), np.array([0, 0, 3], dtype=dt)] l = test_neighborhood_iterator_oob(x, [-1, 3], NEIGH_MODE['zero'], [0, 2], NEIGH_MODE['mirror']) assert_array_equal(l, r) # Stacking mirror on top of zero: 3rd x = np.array([1, 2, 3], dtype=dt) r = [np.array([1, 0, 0, 1, 2], dtype=dt), np.array([0, 0, 1, 2, 3], dtype=dt), np.array([0, 1, 2, 3, 0], dtype=dt), np.array([1, 2, 3, 0, 0], dtype=dt), np.array([2, 3, 0, 0, 3], dtype=dt)] l = test_neighborhood_iterator_oob(x, [-1, 3], NEIGH_MODE['zero'], [-2, 2], NEIGH_MODE['mirror']) assert_array_equal(l, r) # 3rd simple, 1d test: stacking 2 neigh iterators, mixing const padding and # circular padding def test_simple_circular(self): dt = np.float64 # Stacking zero on top of mirror x = np.array([1, 2, 3], dtype=dt) r = [np.array([0, 3, 1], dtype=dt), np.array([3, 1, 2], dtype=dt), np.array([1, 2, 3], dtype=dt), np.array([2, 3, 1], dtype=dt), np.array([3, 1, 0], dtype=dt)] l = test_neighborhood_iterator_oob(x, [-1, 3], NEIGH_MODE['circular'], [-1, 1], NEIGH_MODE['zero']) assert_array_equal(l, r) # Stacking mirror on top of zero x = np.array([1, 2, 3], dtype=dt) r = [np.array([3, 0, 0], dtype=dt), np.array([0, 0, 1], dtype=dt), np.array([0, 1, 2], dtype=dt), np.array([1, 2, 3], dtype=dt), np.array([2, 3, 0], dtype=dt)] l = test_neighborhood_iterator_oob(x, [-1, 3], NEIGH_MODE['zero'], [-2, 0], NEIGH_MODE['circular']) assert_array_equal(l, r) # Stacking mirror on top of zero: 2nd x = np.array([1, 2, 3], dtype=dt) r = [np.array([0, 1, 2], dtype=dt), np.array([1, 2, 3], dtype=dt), np.array([2, 3, 0], dtype=dt), np.array([3, 0, 0], dtype=dt), np.array([0, 0, 1], dtype=dt)] l = test_neighborhood_iterator_oob(x, [-1, 3], NEIGH_MODE['zero'], [0, 2], NEIGH_MODE['circular']) assert_array_equal(l, r) # Stacking mirror on top of zero: 3rd x = np.array([1, 2, 3], dtype=dt) r = [np.array([3, 0, 0, 1, 2], dtype=dt), np.array([0, 0, 1, 2, 3], dtype=dt), np.array([0, 1, 2, 3, 0], dtype=dt), np.array([1, 2, 3, 0, 0], dtype=dt), np.array([2, 3, 0, 0, 1], dtype=dt)] l = test_neighborhood_iterator_oob(x, [-1, 3], NEIGH_MODE['zero'], [-2, 2], NEIGH_MODE['circular']) assert_array_equal(l, r) # 4th simple, 1d test: stacking 2 neigh iterators, but with lower iterator # being strictly within the array def test_simple_strict_within(self): dt = np.float64 # Stacking zero on top of zero, first neighborhood strictly inside the # array x = np.array([1, 2, 3], dtype=dt) r = [np.array([1, 2, 3, 0], dtype=dt)] l = test_neighborhood_iterator_oob(x, [1, 1], NEIGH_MODE['zero'], [-1, 2], NEIGH_MODE['zero']) assert_array_equal(l, r) # Stacking mirror on top of zero, first neighborhood strictly inside the # array x = np.array([1, 2, 3], dtype=dt) r = [np.array([1, 2, 3, 3], dtype=dt)] l = test_neighborhood_iterator_oob(x, [1, 1], NEIGH_MODE['zero'], [-1, 2], NEIGH_MODE['mirror']) assert_array_equal(l, r) # Stacking mirror on top of zero, first neighborhood strictly inside the # array x = np.array([1, 2, 3], dtype=dt) r = [np.array([1, 2, 3, 1], dtype=dt)] l = test_neighborhood_iterator_oob(x, [1, 1], NEIGH_MODE['zero'], [-1, 2], NEIGH_MODE['circular']) assert_array_equal(l, r) class TestWarnings(object): def test_complex_warning(self): x = np.array([1, 2]) y = np.array([1-2j, 1+2j]) with warnings.catch_warnings(): warnings.simplefilter("error", np.ComplexWarning) assert_raises(np.ComplexWarning, x.__setitem__, slice(None), y) assert_equal(x, [1, 2]) class TestMinScalarType(object): def test_usigned_shortshort(self): dt = np.min_scalar_type(2**8-1) wanted = np.dtype('uint8') assert_equal(wanted, dt) def test_usigned_short(self): dt = np.min_scalar_type(2**16-1) wanted = np.dtype('uint16') assert_equal(wanted, dt) def test_usigned_int(self): dt = np.min_scalar_type(2**32-1) wanted = np.dtype('uint32') assert_equal(wanted, dt) def test_usigned_longlong(self): dt = np.min_scalar_type(2**63-1) wanted = np.dtype('uint64') assert_equal(wanted, dt) def test_object(self): dt = np.min_scalar_type(2**64) wanted = np.dtype('O') assert_equal(wanted, dt) if sys.version_info[:2] == (2, 6): from numpy.core.multiarray import memorysimpleview as memoryview from numpy.core._internal import _dtype_from_pep3118 class TestPEP3118Dtype(object): def _check(self, spec, wanted): dt = np.dtype(wanted) if isinstance(wanted, list) and isinstance(wanted[-1], tuple): if wanted[-1][0] == '': names = list(dt.names) names[-1] = '' dt.names = tuple(names) assert_equal(_dtype_from_pep3118(spec), dt, err_msg="spec %r != dtype %r" % (spec, wanted)) def test_native_padding(self): align = np.dtype('i').alignment for j in range(8): if j == 0: s = 'bi' else: s = 'b%dxi' % j self._check('@'+s, {'f0': ('i1', 0), 'f1': ('i', align*(1 + j//align))}) self._check('='+s, {'f0': ('i1', 0), 'f1': ('i', 1+j)}) def test_native_padding_2(self): # Native padding should work also for structs and sub-arrays self._check('x3T{xi}', {'f0': (({'f0': ('i', 4)}, (3,)), 4)}) self._check('^x3T{xi}', {'f0': (({'f0': ('i', 1)}, (3,)), 1)}) def test_trailing_padding(self): # Trailing padding should be included, *and*, the item size # should match the alignment if in aligned mode align = np.dtype('i').alignment def VV(n): return 'V%d' % (align*(1 + (n-1)//align)) self._check('ix', [('f0', 'i'), ('', VV(1))]) self._check('ixx', [('f0', 'i'), ('', VV(2))]) self._check('ixxx', [('f0', 'i'), ('', VV(3))]) self._check('ixxxx', [('f0', 'i'), ('', VV(4))]) self._check('i7x', [('f0', 'i'), ('', VV(7))]) self._check('^ix', [('f0', 'i'), ('', 'V1')]) self._check('^ixx', [('f0', 'i'), ('', 'V2')]) self._check('^ixxx', [('f0', 'i'), ('', 'V3')]) self._check('^ixxxx', [('f0', 'i'), ('', 'V4')]) self._check('^i7x', [('f0', 'i'), ('', 'V7')]) def test_native_padding_3(self): dt = np.dtype( [('a', 'b'), ('b', 'i'), ('sub', np.dtype('b,i')), ('c', 'i')], align=True) self._check("T{b:a:xxxi:b:T{b:f0:=i:f1:}:sub:xxxi:c:}", dt) dt = np.dtype( [('a', 'b'), ('b', 'i'), ('c', 'b'), ('d', 'b'), ('e', 'b'), ('sub', np.dtype('b,i', align=True))]) self._check("T{b:a:=i:b:b:c:b:d:b:e:T{b:f0:xxxi:f1:}:sub:}", dt) def test_padding_with_array_inside_struct(self): dt = np.dtype( [('a', 'b'), ('b', 'i'), ('c', 'b', (3,)), ('d', 'i')], align=True) self._check("T{b:a:xxxi:b:3b:c:xi:d:}", dt) def test_byteorder_inside_struct(self): # The byte order after @T{=i} should be '=', not '@'. # Check this by noting the absence of native alignment. self._check('@T{^i}xi', {'f0': ({'f0': ('i', 0)}, 0), 'f1': ('i', 5)}) def test_intra_padding(self): # Natively aligned sub-arrays may require some internal padding align = np.dtype('i').alignment def VV(n): return 'V%d' % (align*(1 + (n-1)//align)) self._check('(3)T{ix}', ({'f0': ('i', 0), '': (VV(1), 4)}, (3,))) class TestNewBufferProtocol(object): def _check_roundtrip(self, obj): obj = np.asarray(obj) x = memoryview(obj) y = np.asarray(x) y2 = np.array(x) assert_(not y.flags.owndata) assert_(y2.flags.owndata) assert_equal(y.dtype, obj.dtype) assert_equal(y.shape, obj.shape) assert_array_equal(obj, y) assert_equal(y2.dtype, obj.dtype) assert_equal(y2.shape, obj.shape) assert_array_equal(obj, y2) def test_roundtrip(self): x = np.array([1, 2, 3, 4, 5], dtype='i4') self._check_roundtrip(x) x = np.array([[1, 2], [3, 4]], dtype=np.float64) self._check_roundtrip(x) x = np.zeros((3, 3, 3), dtype=np.float32)[:, 0,:] self._check_roundtrip(x) dt = [('a', 'b'), ('b', 'h'), ('c', 'i'), ('d', 'l'), ('dx', 'q'), ('e', 'B'), ('f', 'H'), ('g', 'I'), ('h', 'L'), ('hx', 'Q'), ('i', np.single), ('j', np.double), ('k', np.longdouble), ('ix', np.csingle), ('jx', np.cdouble), ('kx', np.clongdouble), ('l', 'S4'), ('m', 'U4'), ('n', 'V3'), ('o', '?'), ('p', np.half), ] x = np.array( [(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, asbytes('aaaa'), 'bbbb', asbytes('xxx'), True, 1.0)], dtype=dt) self._check_roundtrip(x) x = np.array(([[1, 2], [3, 4]],), dtype=[('a', (int, (2, 2)))]) self._check_roundtrip(x) x = np.array([1, 2, 3], dtype='>i2') self._check_roundtrip(x) x = np.array([1, 2, 3], dtype='<i2') self._check_roundtrip(x) x = np.array([1, 2, 3], dtype='>i4') self._check_roundtrip(x) x = np.array([1, 2, 3], dtype='<i4') self._check_roundtrip(x) # check long long can be represented as non-native x = np.array([1, 2, 3], dtype='>q') self._check_roundtrip(x) # Native-only data types can be passed through the buffer interface # only in native byte order if sys.byteorder == 'little': x = np.array([1, 2, 3], dtype='>g') assert_raises(ValueError, self._check_roundtrip, x) x = np.array([1, 2, 3], dtype='<g') self._check_roundtrip(x) else: x = np.array([1, 2, 3], dtype='>g') self._check_roundtrip(x) x = np.array([1, 2, 3], dtype='<g') assert_raises(ValueError, self._check_roundtrip, x) def test_roundtrip_half(self): half_list = [ 1.0, -2.0, 6.5504 * 10**4, # (max half precision) 2**-14, # ~= 6.10352 * 10**-5 (minimum positive normal) 2**-24, # ~= 5.96046 * 10**-8 (minimum strictly positive subnormal) 0.0, -0.0, float('+inf'), float('-inf'), 0.333251953125, # ~= 1/3 ] x = np.array(half_list, dtype='>e') self._check_roundtrip(x) x = np.array(half_list, dtype='<e') self._check_roundtrip(x) def test_roundtrip_single_types(self): for typ in np.typeDict.values(): dtype = np.dtype(typ) if dtype.char in 'Mm': # datetimes cannot be used in buffers continue if dtype.char == 'V': # skip void continue x = np.zeros(4, dtype=dtype) self._check_roundtrip(x) if dtype.char not in 'qQgG': dt = dtype.newbyteorder('<') x = np.zeros(4, dtype=dt) self._check_roundtrip(x) dt = dtype.newbyteorder('>') x = np.zeros(4, dtype=dt) self._check_roundtrip(x) def test_roundtrip_scalar(self): # Issue #4015. self._check_roundtrip(0) def test_export_simple_1d(self): x = np.array([1, 2, 3, 4, 5], dtype='i') y = memoryview(x) assert_equal(y.format, 'i') assert_equal(y.shape, (5,)) assert_equal(y.ndim, 1) assert_equal(y.strides, (4,)) assert_equal(y.suboffsets, EMPTY) assert_equal(y.itemsize, 4) def test_export_simple_nd(self): x = np.array([[1, 2], [3, 4]], dtype=np.float64) y = memoryview(x) assert_equal(y.format, 'd') assert_equal(y.shape, (2, 2)) assert_equal(y.ndim, 2) assert_equal(y.strides, (16, 8)) assert_equal(y.suboffsets, EMPTY) assert_equal(y.itemsize, 8) def test_export_discontiguous(self): x = np.zeros((3, 3, 3), dtype=np.float32)[:, 0,:] y = memoryview(x) assert_equal(y.format, 'f') assert_equal(y.shape, (3, 3)) assert_equal(y.ndim, 2) assert_equal(y.strides, (36, 4)) assert_equal(y.suboffsets, EMPTY) assert_equal(y.itemsize, 4) def test_export_record(self): dt = [('a', 'b'), ('b', 'h'), ('c', 'i'), ('d', 'l'), ('dx', 'q'), ('e', 'B'), ('f', 'H'), ('g', 'I'), ('h', 'L'), ('hx', 'Q'), ('i', np.single), ('j', np.double), ('k', np.longdouble), ('ix', np.csingle), ('jx', np.cdouble), ('kx', np.clongdouble), ('l', 'S4'), ('m', 'U4'), ('n', 'V3'), ('o', '?'), ('p', np.half), ] x = np.array( [(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, asbytes('aaaa'), 'bbbb', asbytes(' '), True, 1.0)], dtype=dt) y = memoryview(x) assert_equal(y.shape, (1,)) assert_equal(y.ndim, 1) assert_equal(y.suboffsets, EMPTY) sz = sum([np.dtype(b).itemsize for a, b in dt]) if np.dtype('l').itemsize == 4: assert_equal(y.format, 'T{b:a:=h:b:i:c:l:d:q:dx:B:e:@H:f:=I:g:L:h:Q:hx:f:i:d:j:^g:k:=Zf:ix:Zd:jx:^Zg:kx:4s:l:=4w:m:3x:n:?:o:@e:p:}') else: assert_equal(y.format, 'T{b:a:=h:b:i:c:q:d:q:dx:B:e:@H:f:=I:g:Q:h:Q:hx:f:i:d:j:^g:k:=Zf:ix:Zd:jx:^Zg:kx:4s:l:=4w:m:3x:n:?:o:@e:p:}') # Cannot test if NPY_RELAXED_STRIDES_CHECKING changes the strides if not (np.ones(1).strides[0] == np.iinfo(np.intp).max): assert_equal(y.strides, (sz,)) assert_equal(y.itemsize, sz) def test_export_subarray(self): x = np.array(([[1, 2], [3, 4]],), dtype=[('a', ('i', (2, 2)))]) y = memoryview(x) assert_equal(y.format, 'T{(2,2)i:a:}') assert_equal(y.shape, EMPTY) assert_equal(y.ndim, 0) assert_equal(y.strides, EMPTY) assert_equal(y.suboffsets, EMPTY) assert_equal(y.itemsize, 16) def test_export_endian(self): x = np.array([1, 2, 3], dtype='>i') y = memoryview(x) if sys.byteorder == 'little': assert_equal(y.format, '>i') else: assert_equal(y.format, 'i') x = np.array([1, 2, 3], dtype='<i') y = memoryview(x) if sys.byteorder == 'little': assert_equal(y.format, 'i') else: assert_equal(y.format, '<i') def test_export_flags(self): # Check SIMPLE flag, see also gh-3613 (exception should be BufferError) assert_raises(ValueError, get_buffer_info, np.arange(5)[::2], ('SIMPLE',)) def test_padding(self): for j in range(8): x = np.array([(1,), (2,)], dtype={'f0': (int, j)}) self._check_roundtrip(x) def test_reference_leak(self): count_1 = sys.getrefcount(np.core._internal) a = np.zeros(4) b = memoryview(a) c = np.asarray(b) count_2 = sys.getrefcount(np.core._internal) assert_equal(count_1, count_2) del c # avoid pyflakes unused variable warning. def test_padded_struct_array(self): dt1 = np.dtype( [('a', 'b'), ('b', 'i'), ('sub', np.dtype('b,i')), ('c', 'i')], align=True) x1 = np.arange(dt1.itemsize, dtype=np.int8).view(dt1) self._check_roundtrip(x1) dt2 = np.dtype( [('a', 'b'), ('b', 'i'), ('c', 'b', (3,)), ('d', 'i')], align=True) x2 = np.arange(dt2.itemsize, dtype=np.int8).view(dt2) self._check_roundtrip(x2) dt3 = np.dtype( [('a', 'b'), ('b', 'i'), ('c', 'b'), ('d', 'b'), ('e', 'b'), ('sub', np.dtype('b,i', align=True))]) x3 = np.arange(dt3.itemsize, dtype=np.int8).view(dt3) self._check_roundtrip(x3) def test_relaxed_strides(self): # Test that relaxed strides are converted to non-relaxed c = np.ones((1, 10, 10), dtype='i8') # Check for NPY_RELAXED_STRIDES_CHECKING: if np.ones((10, 1), order="C").flags.f_contiguous: c.strides = (-1, 80, 8) assert memoryview(c).strides == (800, 80, 8) # Writing C-contiguous data to a BytesIO buffer should work fd = io.BytesIO() fd.write(c.data) fortran = c.T assert memoryview(fortran).strides == (8, 80, 800) arr = np.ones((1, 10)) if arr.flags.f_contiguous: shape, strides = get_buffer_info(arr, ['F_CONTIGUOUS']) assert_(strides[0] == 8) arr = np.ones((10, 1), order='F') shape, strides = get_buffer_info(arr, ['C_CONTIGUOUS']) assert_(strides[-1] == 8) class TestArrayAttributeDeletion(object): def test_multiarray_writable_attributes_deletion(self): """ticket #2046, should not seqfault, raise AttributeError""" a = np.ones(2) attr = ['shape', 'strides', 'data', 'dtype', 'real', 'imag', 'flat'] for s in attr: assert_raises(AttributeError, delattr, a, s) def test_multiarray_not_writable_attributes_deletion(self): a = np.ones(2) attr = ["ndim", "flags", "itemsize", "size", "nbytes", "base", "ctypes", "T", "__array_interface__", "__array_struct__", "__array_priority__", "__array_finalize__"] for s in attr: assert_raises(AttributeError, delattr, a, s) def test_multiarray_flags_writable_attribute_deletion(self): a = np.ones(2).flags attr = ['updateifcopy', 'aligned', 'writeable'] for s in attr: assert_raises(AttributeError, delattr, a, s) def test_multiarray_flags_not_writable_attribute_deletion(self): a = np.ones(2).flags attr = ["contiguous", "c_contiguous", "f_contiguous", "fortran", "owndata", "fnc", "forc", "behaved", "carray", "farray", "num"] for s in attr: assert_raises(AttributeError, delattr, a, s) def test_array_interface(): # Test scalar coercion within the array interface class Foo(object): def __init__(self, value): self.value = value self.iface = {'typestr': '=f8'} def __float__(self): return float(self.value) @property def __array_interface__(self): return self.iface f = Foo(0.5) assert_equal(np.array(f), 0.5) assert_equal(np.array([f]), [0.5]) assert_equal(np.array([f, f]), [0.5, 0.5]) assert_equal(np.array(f).dtype, np.dtype('=f8')) # Test various shape definitions f.iface['shape'] = () assert_equal(np.array(f), 0.5) f.iface['shape'] = None assert_raises(TypeError, np.array, f) f.iface['shape'] = (1, 1) assert_equal(np.array(f), [[0.5]]) f.iface['shape'] = (2,) assert_raises(ValueError, np.array, f) # test scalar with no shape class ArrayLike(object): array = np.array(1) __array_interface__ = array.__array_interface__ assert_equal(np.array(ArrayLike()), 1) def test_flat_element_deletion(): it = np.ones(3).flat try: del it[1] del it[1:2] except TypeError: pass except: raise AssertionError def test_scalar_element_deletion(): a = np.zeros(2, dtype=[('x', 'int'), ('y', 'int')]) assert_raises(ValueError, a[0].__delitem__, 'x') class TestMemEventHook(TestCase): def test_mem_seteventhook(self): # The actual tests are within the C code in # multiarray/multiarray_tests.c.src test_pydatamem_seteventhook_start() # force an allocation and free of a numpy array # needs to be larger then limit of small memory cacher in ctors.c a = np.zeros(1000) del a test_pydatamem_seteventhook_end() class TestMapIter(TestCase): def test_mapiter(self): # The actual tests are within the C code in # multiarray/multiarray_tests.c.src a = np.arange(12).reshape((3, 4)).astype(float) index = ([1, 1, 2, 0], [0, 0, 2, 3]) vals = [50, 50, 30, 16] test_inplace_increment(a, index, vals) assert_equal(a, [[0.00, 1., 2.0, 19.], [104., 5., 6.0, 7.0], [8.00, 9., 40., 11.]]) b = np.arange(6).astype(float) index = (np.array([1, 2, 0]),) vals = [50, 4, 100.1] test_inplace_increment(b, index, vals) assert_equal(b, [100.1, 51., 6., 3., 4., 5.]) class TestAsCArray(TestCase): def test_1darray(self): array = np.arange(24, dtype=np.double) from_c = test_as_c_array(array, 3) assert_equal(array[3], from_c) def test_2darray(self): array = np.arange(24, dtype=np.double).reshape(3, 8) from_c = test_as_c_array(array, 2, 4) assert_equal(array[2, 4], from_c) def test_3darray(self): array = np.arange(24, dtype=np.double).reshape(2, 3, 4) from_c = test_as_c_array(array, 1, 2, 3) assert_equal(array[1, 2, 3], from_c) class TestConversion(TestCase): def test_array_scalar_relational_operation(self): #All integer for dt1 in np.typecodes['AllInteger']: assert_(1 > np.array(0, dtype=dt1), "type %s failed" % (dt1,)) assert_(not 1 < np.array(0, dtype=dt1), "type %s failed" % (dt1,)) for dt2 in np.typecodes['AllInteger']: assert_(np.array(1, dtype=dt1) > np.array(0, dtype=dt2), "type %s and %s failed" % (dt1, dt2)) assert_(not np.array(1, dtype=dt1) < np.array(0, dtype=dt2), "type %s and %s failed" % (dt1, dt2)) #Unsigned integers for dt1 in 'BHILQP': assert_(-1 < np.array(1, dtype=dt1), "type %s failed" % (dt1,)) assert_(not -1 > np.array(1, dtype=dt1), "type %s failed" % (dt1,)) assert_(-1 != np.array(1, dtype=dt1), "type %s failed" % (dt1,)) #unsigned vs signed for dt2 in 'bhilqp': assert_(np.array(1, dtype=dt1) > np.array(-1, dtype=dt2), "type %s and %s failed" % (dt1, dt2)) assert_(not np.array(1, dtype=dt1) < np.array(-1, dtype=dt2), "type %s and %s failed" % (dt1, dt2)) assert_(np.array(1, dtype=dt1) != np.array(-1, dtype=dt2), "type %s and %s failed" % (dt1, dt2)) #Signed integers and floats for dt1 in 'bhlqp' + np.typecodes['Float']: assert_(1 > np.array(-1, dtype=dt1), "type %s failed" % (dt1,)) assert_(not 1 < np.array(-1, dtype=dt1), "type %s failed" % (dt1,)) assert_(-1 == np.array(-1, dtype=dt1), "type %s failed" % (dt1,)) for dt2 in 'bhlqp' + np.typecodes['Float']: assert_(np.array(1, dtype=dt1) > np.array(-1, dtype=dt2), "type %s and %s failed" % (dt1, dt2)) assert_(not np.array(1, dtype=dt1) < np.array(-1, dtype=dt2), "type %s and %s failed" % (dt1, dt2)) assert_(np.array(-1, dtype=dt1) == np.array(-1, dtype=dt2), "type %s and %s failed" % (dt1, dt2)) class TestWhere(TestCase): def test_basic(self): dts = [np.bool, np.int16, np.int32, np.int64, np.double, np.complex128, np.longdouble, np.clongdouble] for dt in dts: c = np.ones(53, dtype=np.bool) assert_equal(np.where( c, dt(0), dt(1)), dt(0)) assert_equal(np.where(~c, dt(0), dt(1)), dt(1)) assert_equal(np.where(True, dt(0), dt(1)), dt(0)) assert_equal(np.where(False, dt(0), dt(1)), dt(1)) d = np.ones_like(c).astype(dt) e = np.zeros_like(d) r = d.astype(dt) c[7] = False r[7] = e[7] assert_equal(np.where(c, e, e), e) assert_equal(np.where(c, d, e), r) assert_equal(np.where(c, d, e[0]), r) assert_equal(np.where(c, d[0], e), r) assert_equal(np.where(c[::2], d[::2], e[::2]), r[::2]) assert_equal(np.where(c[1::2], d[1::2], e[1::2]), r[1::2]) assert_equal(np.where(c[::3], d[::3], e[::3]), r[::3]) assert_equal(np.where(c[1::3], d[1::3], e[1::3]), r[1::3]) assert_equal(np.where(c[::-2], d[::-2], e[::-2]), r[::-2]) assert_equal(np.where(c[::-3], d[::-3], e[::-3]), r[::-3]) assert_equal(np.where(c[1::-3], d[1::-3], e[1::-3]), r[1::-3]) def test_exotic(self): # object assert_array_equal(np.where(True, None, None), np.array(None)) # zero sized m = np.array([], dtype=bool).reshape(0, 3) b = np.array([], dtype=np.float64).reshape(0, 3) assert_array_equal(np.where(m, 0, b), np.array([]).reshape(0, 3)) # object cast d = np.array([-1.34, -0.16, -0.54, -0.31, -0.08, -0.95, 0.000, 0.313, 0.547, -0.18, 0.876, 0.236, 1.969, 0.310, 0.699, 1.013, 1.267, 0.229, -1.39, 0.487]) nan = float('NaN') e = np.array(['5z', '0l', nan, 'Wz', nan, nan, 'Xq', 'cs', nan, nan, 'QN', nan, nan, 'Fd', nan, nan, 'kp', nan, '36', 'i1'], dtype=object) m = np.array([0,0,1,0,1,1,0,0,1,1,0,1,1,0,1,1,0,1,0,0], dtype=bool) r = e[:] r[np.where(m)] = d[np.where(m)] assert_array_equal(np.where(m, d, e), r) r = e[:] r[np.where(~m)] = d[np.where(~m)] assert_array_equal(np.where(m, e, d), r) assert_array_equal(np.where(m, e, e), e) # minimal dtype result with NaN scalar (e.g required by pandas) d = np.array([1., 2.], dtype=np.float32) e = float('NaN') assert_equal(np.where(True, d, e).dtype, np.float32) e = float('Infinity') assert_equal(np.where(True, d, e).dtype, np.float32) e = float('-Infinity') assert_equal(np.where(True, d, e).dtype, np.float32) # also check upcast e = float(1e150) assert_equal(np.where(True, d, e).dtype, np.float64) def test_ndim(self): c = [True, False] a = np.zeros((2, 25)) b = np.ones((2, 25)) r = np.where(np.array(c)[:,np.newaxis], a, b) assert_array_equal(r[0], a[0]) assert_array_equal(r[1], b[0]) a = a.T b = b.T r = np.where(c, a, b) assert_array_equal(r[:,0], a[:,0]) assert_array_equal(r[:,1], b[:,0]) def test_dtype_mix(self): c = np.array([False, True, False, False, False, False, True, False, False, False, True, False]) a = np.uint32(1) b = np.array([5., 0., 3., 2., -1., -4., 0., -10., 10., 1., 0., 3.], dtype=np.float64) r = np.array([5., 1., 3., 2., -1., -4., 1., -10., 10., 1., 1., 3.], dtype=np.float64) assert_equal(np.where(c, a, b), r) a = a.astype(np.float32) b = b.astype(np.int64) assert_equal(np.where(c, a, b), r) # non bool mask c = c.astype(np.int) c[c != 0] = 34242324 assert_equal(np.where(c, a, b), r) # invert tmpmask = c != 0 c[c == 0] = 41247212 c[tmpmask] = 0 assert_equal(np.where(c, b, a), r) def test_foreign(self): c = np.array([False, True, False, False, False, False, True, False, False, False, True, False]) r = np.array([5., 1., 3., 2., -1., -4., 1., -10., 10., 1., 1., 3.], dtype=np.float64) a = np.ones(1, dtype='>i4') b = np.array([5., 0., 3., 2., -1., -4., 0., -10., 10., 1., 0., 3.], dtype=np.float64) assert_equal(np.where(c, a, b), r) b = b.astype('>f8') assert_equal(np.where(c, a, b), r) a = a.astype('<i4') assert_equal(np.where(c, a, b), r) c = c.astype('>i4') assert_equal(np.where(c, a, b), r) def test_error(self): c = [True, True] a = np.ones((4, 5)) b = np.ones((5, 5)) assert_raises(ValueError, np.where, c, a, a) assert_raises(ValueError, np.where, c[0], a, b) def test_string(self): # gh-4778 check strings are properly filled with nulls a = np.array("abc") b = np.array("x" * 753) assert_equal(np.where(True, a, b), "abc") assert_equal(np.where(False, b, a), "abc") # check native datatype sized strings a = np.array("abcd") b = np.array("x" * 8) assert_equal(np.where(True, a, b), "abcd") assert_equal(np.where(False, b, a), "abcd") class TestSizeOf(TestCase): def test_empty_array(self): x = np.array([]) assert_(sys.getsizeof(x) > 0) def check_array(self, dtype): elem_size = dtype(0).itemsize for length in [10, 50, 100, 500]: x = np.arange(length, dtype=dtype) assert_(sys.getsizeof(x) > length * elem_size) def test_array_int32(self): self.check_array(np.int32) def test_array_int64(self): self.check_array(np.int64) def test_array_float32(self): self.check_array(np.float32) def test_array_float64(self): self.check_array(np.float64) def test_view(self): d = np.ones(100) assert_(sys.getsizeof(d[...]) < sys.getsizeof(d)) def test_reshape(self): d = np.ones(100) assert_(sys.getsizeof(d) < sys.getsizeof(d.reshape(100, 1, 1).copy())) def test_resize(self): d = np.ones(100) old = sys.getsizeof(d) d.resize(50) assert_(old > sys.getsizeof(d)) d.resize(150) assert_(old < sys.getsizeof(d)) def test_error(self): d = np.ones(100) assert_raises(TypeError, d.__sizeof__, "a") class TestHashing(TestCase): def test_collections_hashable(self): x = np.array([]) self.assertFalse(isinstance(x, collections.Hashable)) from numpy.core._internal import _view_is_safe class TestObjViewSafetyFuncs(TestCase): def test_view_safety(self): psize = np.dtype('p').itemsize # creates dtype but with extra character code - for missing 'p' fields def mtype(s): n, offset, fields = 0, 0, [] for c in s.split(','): # subarrays won't work if c != '-': fields.append(('f{0}'.format(n), c, offset)) n += 1 offset += np.dtype(c).itemsize if c != '-' else psize names, formats, offsets = zip(*fields) return np.dtype({'names': names, 'formats': formats, 'offsets': offsets, 'itemsize': offset}) # test nonequal itemsizes with objects: # these should succeed: _view_is_safe(np.dtype('O,p,O,p'), np.dtype('O,p,O,p,O,p')) _view_is_safe(np.dtype('O,O'), np.dtype('O,O,O')) # these should fail: assert_raises(TypeError, _view_is_safe, np.dtype('O,O,p'), np.dtype('O,O')) assert_raises(TypeError, _view_is_safe, np.dtype('O,O,p'), np.dtype('O,p')) assert_raises(TypeError, _view_is_safe, np.dtype('O,O,p'), np.dtype('p,O')) # test nonequal itemsizes with missing fields: # these should succeed: _view_is_safe(mtype('-,p,-,p'), mtype('-,p,-,p,-,p')) _view_is_safe(np.dtype('p,p'), np.dtype('p,p,p')) # these should fail: assert_raises(TypeError, _view_is_safe, mtype('p,p,-'), mtype('p,p')) assert_raises(TypeError, _view_is_safe, mtype('p,p,-'), mtype('p,-')) assert_raises(TypeError, _view_is_safe, mtype('p,p,-'), mtype('-,p')) # scans through positions at which we can view a type def scanView(d1, otype): goodpos = [] for shift in range(d1.itemsize - np.dtype(otype).itemsize+1): d2 = np.dtype({'names': ['f0'], 'formats': [otype], 'offsets': [shift], 'itemsize': d1.itemsize}) try: _view_is_safe(d1, d2) except TypeError: pass else: goodpos.append(shift) return goodpos # test partial overlap with object field assert_equal(scanView(np.dtype('p,O,p,p,O,O'), 'p'), [0] + list(range(2*psize, 3*psize+1))) assert_equal(scanView(np.dtype('p,O,p,p,O,O'), 'O'), [psize, 4*psize, 5*psize]) # test partial overlap with missing field assert_equal(scanView(mtype('p,-,p,p,-,-'), 'p'), [0] + list(range(2*psize, 3*psize+1))) # test nested structures with objects: nestedO = np.dtype([('f0', 'p'), ('f1', 'p,O,p')]) assert_equal(scanView(nestedO, 'p'), list(range(psize+1)) + [3*psize]) assert_equal(scanView(nestedO, 'O'), [2*psize]) # test nested structures with missing fields: nestedM = np.dtype([('f0', 'p'), ('f1', mtype('p,-,p'))]) assert_equal(scanView(nestedM, 'p'), list(range(psize+1)) + [3*psize]) # test subarrays with objects subarrayO = np.dtype('p,(2,3)O,p') assert_equal(scanView(subarrayO, 'p'), [0, 7*psize]) assert_equal(scanView(subarrayO, 'O'), list(range(psize, 6*psize+1, psize))) #test dtype with overlapping fields overlapped = np.dtype({'names': ['f0', 'f1', 'f2', 'f3'], 'formats': ['p', 'p', 'p', 'p'], 'offsets': [0, 1, 3*psize-1, 3*psize], 'itemsize': 4*psize}) assert_equal(scanView(overlapped, 'p'), [0, 1, 3*psize-1, 3*psize]) class TestArrayPriority(TestCase): # This will go away when __array_priority__ is settled, meanwhile # it serves to check unintended changes. op = operator binary_ops = [ op.pow, op.add, op.sub, op.mul, op.floordiv, op.truediv, op.mod, op.and_, op.or_, op.xor, op.lshift, op.rshift, op.mod, op.gt, op.ge, op.lt, op.le, op.ne, op.eq ] if sys.version_info[0] < 3: binary_ops.append(op.div) class Foo(np.ndarray): __array_priority__ = 100. def __new__(cls, *args, **kwargs): return np.array(*args, **kwargs).view(cls) class Bar(np.ndarray): __array_priority__ = 101. def __new__(cls, *args, **kwargs): return np.array(*args, **kwargs).view(cls) class Other(object): __array_priority__ = 1000. def _all(self, other): return self.__class__() __add__ = __radd__ = _all __sub__ = __rsub__ = _all __mul__ = __rmul__ = _all __pow__ = __rpow__ = _all __div__ = __rdiv__ = _all __mod__ = __rmod__ = _all __truediv__ = __rtruediv__ = _all __floordiv__ = __rfloordiv__ = _all __and__ = __rand__ = _all __xor__ = __rxor__ = _all __or__ = __ror__ = _all __lshift__ = __rlshift__ = _all __rshift__ = __rrshift__ = _all __eq__ = _all __ne__ = _all __gt__ = _all __ge__ = _all __lt__ = _all __le__ = _all def test_ndarray_subclass(self): a = np.array([1, 2]) b = self.Bar([1, 2]) for f in self.binary_ops: msg = repr(f) assert_(isinstance(f(a, b), self.Bar), msg) assert_(isinstance(f(b, a), self.Bar), msg) def test_ndarray_other(self): a = np.array([1, 2]) b = self.Other() for f in self.binary_ops: msg = repr(f) assert_(isinstance(f(a, b), self.Other), msg) assert_(isinstance(f(b, a), self.Other), msg) def test_subclass_subclass(self): a = self.Foo([1, 2]) b = self.Bar([1, 2]) for f in self.binary_ops: msg = repr(f) assert_(isinstance(f(a, b), self.Bar), msg) assert_(isinstance(f(b, a), self.Bar), msg) def test_subclass_other(self): a = self.Foo([1, 2]) b = self.Other() for f in self.binary_ops: msg = repr(f) assert_(isinstance(f(a, b), self.Other), msg) assert_(isinstance(f(b, a), self.Other), msg) class TestBytestringArrayNonzero(TestCase): def test_empty_bstring_array_is_falsey(self): self.assertFalse(np.array([''], dtype=np.str)) def test_whitespace_bstring_array_is_falsey(self): a = np.array(['spam'], dtype=np.str) a[0] = ' \0\0' self.assertFalse(a) def test_all_null_bstring_array_is_falsey(self): a = np.array(['spam'], dtype=np.str) a[0] = '\0\0\0\0' self.assertFalse(a) def test_null_inside_bstring_array_is_truthy(self): a = np.array(['spam'], dtype=np.str) a[0] = ' \0 \0' self.assertTrue(a) class TestUnicodeArrayNonzero(TestCase): def test_empty_ustring_array_is_falsey(self): self.assertFalse(np.array([''], dtype=np.unicode)) def test_whitespace_ustring_array_is_falsey(self): a = np.array(['eggs'], dtype=np.unicode) a[0] = ' \0\0' self.assertFalse(a) def test_all_null_ustring_array_is_falsey(self): a = np.array(['eggs'], dtype=np.unicode) a[0] = '\0\0\0\0' self.assertFalse(a) def test_null_inside_ustring_array_is_truthy(self): a = np.array(['eggs'], dtype=np.unicode) a[0] = ' \0 \0' self.assertTrue(a) if __name__ == "__main__": run_module_suite()
MichaelAquilina/numpy
numpy/core/tests/test_multiarray.py
Python
bsd-3-clause
220,691
import urllib from canvas import util def make_cookie_key(key): return 'after_signup_' + str(key) def _get(request, key): key = make_cookie_key(key) val = request.COOKIES.get(key) if val is not None: val = util.loads(urllib.unquote(val)) return (key, val,) def get_posted_comment(request): ''' Gets a comment waiting to be posted, if one exists. Returns a pair containing the cookie key used to retrieve it and its deserialized JSON. ''' #TODO use dcramer's django-cookies so that we don't rely on having the response object to mutate cookies. # That would make this API much cleaner and isolated. return _get(request, 'post_comment')
canvasnetworks/canvas
website/canvas/after_signup.py
Python
bsd-3-clause
698
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/chromeos/bluetooth/bluetooth_adapter.h" #include "base/bind.h" #include "base/lazy_instance.h" #include "base/logging.h" #include "base/stl_util.h" #include "base/values.h" #include "chrome/browser/chromeos/bluetooth/bluetooth_device.h" #include "chromeos/dbus/bluetooth_adapter_client.h" #include "chromeos/dbus/bluetooth_device_client.h" #include "chromeos/dbus/bluetooth_manager_client.h" #include "chromeos/dbus/bluetooth_out_of_band_client.h" #include "chromeos/dbus/dbus_thread_manager.h" #include "dbus/object_path.h" namespace { // Shared default adapter instance, we don't want to keep this class around // if nobody is using it so use a WeakPtr and create the object when needed; // since Google C++ Style (and clang's static analyzer) forbids us having // exit-time destructors we use a leaky lazy instance for it. base::LazyInstance<base::WeakPtr<chromeos::BluetoothAdapter> >::Leaky default_adapter = LAZY_INSTANCE_INITIALIZER; } // namespace namespace chromeos { BluetoothAdapter::BluetoothAdapter() : weak_ptr_factory_(this), track_default_(false), powered_(false), discovering_(false) { DBusThreadManager::Get()->GetBluetoothManagerClient()-> AddObserver(this); DBusThreadManager::Get()->GetBluetoothAdapterClient()-> AddObserver(this); DBusThreadManager::Get()->GetBluetoothDeviceClient()-> AddObserver(this); } BluetoothAdapter::~BluetoothAdapter() { DBusThreadManager::Get()->GetBluetoothDeviceClient()-> RemoveObserver(this); DBusThreadManager::Get()->GetBluetoothAdapterClient()-> RemoveObserver(this); DBusThreadManager::Get()->GetBluetoothManagerClient()-> RemoveObserver(this); STLDeleteValues(&devices_); } void BluetoothAdapter::AddObserver(Observer* observer) { DCHECK(observer); observers_.AddObserver(observer); } void BluetoothAdapter::RemoveObserver(Observer* observer) { DCHECK(observer); observers_.RemoveObserver(observer); } bool BluetoothAdapter::IsPresent() const { return !object_path_.value().empty(); } bool BluetoothAdapter::IsPowered() const { return powered_; } void BluetoothAdapter::SetPowered(bool powered, const base::Closure& callback, const ErrorCallback& error_callback) { DBusThreadManager::Get()->GetBluetoothAdapterClient()-> GetProperties(object_path_)->powered.Set( powered, base::Bind(&BluetoothAdapter::OnSetPowered, weak_ptr_factory_.GetWeakPtr(), callback, error_callback)); } bool BluetoothAdapter::IsDiscovering() const { return discovering_; } void BluetoothAdapter::SetDiscovering(bool discovering, const base::Closure& callback, const ErrorCallback& error_callback) { if (discovering) { DBusThreadManager::Get()->GetBluetoothAdapterClient()-> StartDiscovery(object_path_, base::Bind(&BluetoothAdapter::OnStartDiscovery, weak_ptr_factory_.GetWeakPtr(), callback, error_callback)); } else { DBusThreadManager::Get()->GetBluetoothAdapterClient()-> StopDiscovery(object_path_, base::Bind(&BluetoothAdapter::OnStopDiscovery, weak_ptr_factory_.GetWeakPtr(), callback, error_callback)); } } BluetoothAdapter::DeviceList BluetoothAdapter::GetDevices() { ConstDeviceList const_devices = const_cast<const BluetoothAdapter *>(this)->GetDevices(); DeviceList devices; for (ConstDeviceList::const_iterator i = const_devices.begin(); i != const_devices.end(); ++i) devices.push_back(const_cast<BluetoothDevice *>(*i)); return devices; } BluetoothAdapter::ConstDeviceList BluetoothAdapter::GetDevices() const { ConstDeviceList devices; for (DevicesMap::const_iterator iter = devices_.begin(); iter != devices_.end(); ++iter) devices.push_back(iter->second); return devices; } BluetoothDevice* BluetoothAdapter::GetDevice(const std::string& address) { return const_cast<BluetoothDevice *>( const_cast<const BluetoothAdapter *>(this)->GetDevice(address)); } const BluetoothDevice* BluetoothAdapter::GetDevice( const std::string& address) const { DevicesMap::const_iterator iter = devices_.find(address); if (iter != devices_.end()) return iter->second; return NULL; } void BluetoothAdapter::ReadLocalOutOfBandPairingData( const BluetoothOutOfBandPairingDataCallback& callback, const ErrorCallback& error_callback) { DBusThreadManager::Get()->GetBluetoothOutOfBandClient()-> ReadLocalData(object_path_, base::Bind(&BluetoothAdapter::OnReadLocalData, weak_ptr_factory_.GetWeakPtr(), callback, error_callback)); } void BluetoothAdapter::TrackDefaultAdapter() { DVLOG(1) << "Tracking default adapter"; track_default_ = true; DBusThreadManager::Get()->GetBluetoothManagerClient()-> DefaultAdapter(base::Bind(&BluetoothAdapter::AdapterCallback, weak_ptr_factory_.GetWeakPtr())); } void BluetoothAdapter::FindAdapter(const std::string& address) { DVLOG(1) << "Using adapter " << address; track_default_ = false; DBusThreadManager::Get()->GetBluetoothManagerClient()-> FindAdapter(address, base::Bind(&BluetoothAdapter::AdapterCallback, weak_ptr_factory_.GetWeakPtr())); } void BluetoothAdapter::AdapterCallback(const dbus::ObjectPath& adapter_path, bool success) { if (success) { ChangeAdapter(adapter_path); } else if (!object_path_.value().empty()) { RemoveAdapter(); } } void BluetoothAdapter::DefaultAdapterChanged( const dbus::ObjectPath& adapter_path) { if (track_default_) ChangeAdapter(adapter_path); } void BluetoothAdapter::AdapterRemoved(const dbus::ObjectPath& adapter_path) { if (adapter_path == object_path_) RemoveAdapter(); } void BluetoothAdapter::ChangeAdapter(const dbus::ObjectPath& adapter_path) { if (adapter_path == object_path_) return; // Determine whether this is a change of adapter or gaining an adapter, // remember for later so we can send the right notification. const bool new_adapter = object_path_.value().empty(); if (new_adapter) { DVLOG(1) << "Adapter path initialized to " << adapter_path.value(); } else { DVLOG(1) << "Adapter path changed from " << object_path_.value() << " to " << adapter_path.value(); // Invalidate the devices list, since the property update does not // remove them. ClearDevices(); } object_path_ = adapter_path; // Update properties to their new values. BluetoothAdapterClient::Properties* properties = DBusThreadManager::Get()->GetBluetoothAdapterClient()-> GetProperties(object_path_); address_ = properties->address.value(); PoweredChanged(properties->powered.value()); DiscoveringChanged(properties->discovering.value()); DevicesChanged(properties->devices.value()); // Notify observers if we did not have an adapter before, the case of // moving from one to another is hidden from layers above. if (new_adapter) FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, AdapterPresentChanged(this, true)); } void BluetoothAdapter::RemoveAdapter() { DVLOG(1) << "Adapter lost."; PoweredChanged(false); DiscoveringChanged(false); ClearDevices(); object_path_ = dbus::ObjectPath(""); address_.clear(); FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, AdapterPresentChanged(this, false)); } void BluetoothAdapter::OnSetPowered(const base::Closure& callback, const ErrorCallback& error_callback, bool success) { if (success) callback.Run(); else error_callback.Run(); } void BluetoothAdapter::PoweredChanged(bool powered) { if (powered == powered_) return; powered_ = powered; FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, AdapterPoweredChanged(this, powered_)); } void BluetoothAdapter::OnStartDiscovery(const base::Closure& callback, const ErrorCallback& error_callback, const dbus::ObjectPath& adapter_path, bool success) { if (success) { DVLOG(1) << object_path_.value() << ": started discovery."; // Clear devices found in previous discovery attempts ClearDiscoveredDevices(); callback.Run(); } else { // TODO(keybuk): in future, don't run the callback if the error was just // that we were already discovering. error_callback.Run(); } } void BluetoothAdapter::OnStopDiscovery(const base::Closure& callback, const ErrorCallback& error_callback, const dbus::ObjectPath& adapter_path, bool success) { if (success) { DVLOG(1) << object_path_.value() << ": stopped discovery."; callback.Run(); // Leave found devices available for perusing. } else { // TODO(keybuk): in future, don't run the callback if the error was just // that we weren't discovering. error_callback.Run(); } } void BluetoothAdapter::DiscoveringChanged(bool discovering) { if (discovering == discovering_) return; discovering_ = discovering; FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, AdapterDiscoveringChanged(this, discovering_)); } void BluetoothAdapter::OnReadLocalData( const BluetoothOutOfBandPairingDataCallback& callback, const ErrorCallback& error_callback, const BluetoothOutOfBandPairingData& data, bool success) { if (success) callback.Run(data); else error_callback.Run(); } void BluetoothAdapter::AdapterPropertyChanged( const dbus::ObjectPath& adapter_path, const std::string& property_name) { if (adapter_path != object_path_) return; BluetoothAdapterClient::Properties* properties = DBusThreadManager::Get()->GetBluetoothAdapterClient()-> GetProperties(object_path_); if (property_name == properties->powered.name()) { PoweredChanged(properties->powered.value()); } else if (property_name == properties->discovering.name()) { DiscoveringChanged(properties->discovering.value()); } else if (property_name == properties->devices.name()) { DevicesChanged(properties->devices.value()); } } void BluetoothAdapter::DevicePropertyChanged( const dbus::ObjectPath& device_path, const std::string& property_name) { UpdateDevice(device_path); } void BluetoothAdapter::UpdateDevice(const dbus::ObjectPath& device_path) { BluetoothDeviceClient::Properties* properties = DBusThreadManager::Get()->GetBluetoothDeviceClient()-> GetProperties(device_path); // When we first see a device, we may not know the address yet and need to // wait for the DevicePropertyChanged signal before adding the device. const std::string address = properties->address.value(); if (address.empty()) return; // The device may be already known to us, either because this is an update // to properties, or the device going from discovered to connected and // pairing gaining an object path in the process. In any case, we want // to update the existing object, not create a new one. DevicesMap::iterator iter = devices_.find(address); BluetoothDevice* device; const bool update_device = (iter != devices_.end()); if (update_device) { device = iter->second; } else { device = BluetoothDevice::Create(this); devices_[address] = device; } const bool was_paired = device->IsPaired(); if (!was_paired) { DVLOG(1) << "Assigned object path " << device_path.value() << " to device " << address; device->SetObjectPath(device_path); } device->Update(properties, true); // Don't send a duplicate added event for supported devices that were // previously visible or for already paired devices, send a changed // event instead. We always send one event or the other since we always // inform observers about paired devices whether or not they're supported. if (update_device && (device->IsSupported() || was_paired)) { FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceChanged(this, device)); } else { FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceAdded(this, device)); } } void BluetoothAdapter::ClearDevices() { DevicesMap replace; devices_.swap(replace); for (DevicesMap::iterator iter = replace.begin(); iter != replace.end(); ++iter) { BluetoothDevice* device = iter->second; if (device->IsSupported() || device->IsPaired()) FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceRemoved(this, device)); delete device; } } void BluetoothAdapter::DeviceCreated(const dbus::ObjectPath& adapter_path, const dbus::ObjectPath& device_path) { if (adapter_path != object_path_) return; UpdateDevice(device_path); } void BluetoothAdapter::DeviceRemoved(const dbus::ObjectPath& adapter_path, const dbus::ObjectPath& device_path) { if (adapter_path != object_path_) return; DevicesMap::iterator iter = devices_.begin(); while (iter != devices_.end()) { BluetoothDevice* device = iter->second; DevicesMap::iterator temp = iter; ++iter; if (device->object_path_ != device_path) continue; // DeviceRemoved can also be called to indicate a device that is visible // during discovery has disconnected, but it is still visible to the // adapter, so don't remove in that case and only clear the object path. if (!device->IsVisible()) { FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceRemoved(this, device)); DVLOG(1) << "Removed device " << device->address(); delete device; devices_.erase(temp); } else { DVLOG(1) << "Removed object path from device " << device->address(); device->RemoveObjectPath(); // If the device is not supported then we want to act as if it was // removed, even though it is still visible to the adapter. if (!device->IsSupported()) { FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceRemoved(this, device)); } else { FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceChanged(this, device)); } } } } void BluetoothAdapter::DevicesChanged( const std::vector<dbus::ObjectPath>& devices) { for (std::vector<dbus::ObjectPath>::const_iterator iter = devices.begin(); iter != devices.end(); ++iter) UpdateDevice(*iter); } void BluetoothAdapter::ClearDiscoveredDevices() { DevicesMap::iterator iter = devices_.begin(); while (iter != devices_.end()) { BluetoothDevice* device = iter->second; DevicesMap::iterator temp = iter; ++iter; if (!device->IsPaired()) { if (device->IsSupported()) FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceRemoved(this, device)); delete device; devices_.erase(temp); } } } void BluetoothAdapter::DeviceFound( const dbus::ObjectPath& adapter_path, const std::string& address, const BluetoothDeviceClient::Properties& properties) { if (adapter_path != object_path_) return; // DeviceFound can also be called to indicate that a device we've // paired with is now visible to the adapter during discovery, in which // case we want to update the existing object, not create a new one. BluetoothDevice* device; DevicesMap::iterator iter = devices_.find(address); const bool update_device = (iter != devices_.end()); if (update_device) { device = iter->second; } else { device = BluetoothDevice::Create(this); devices_[address] = device; } DVLOG(1) << "Device " << address << " is visible to the adapter"; device->SetVisible(true); device->Update(&properties, false); // Don't send a duplicated added event for duplicate signals for supported // devices that were previously visible (should never happen) or for already // paired devices, send a changed event instead. We do not inform observers // if we find or update an unconnected and unsupported device. if (update_device && (device->IsSupported() || device->IsPaired())) { FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceChanged(this, device)); } else if (device->IsSupported()) { FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceAdded(this, device)); } } void BluetoothAdapter::DeviceDisappeared(const dbus::ObjectPath& adapter_path, const std::string& address) { if (adapter_path != object_path_) return; DevicesMap::iterator iter = devices_.find(address); if (iter == devices_.end()) return; BluetoothDevice* device = iter->second; // DeviceDisappeared can also be called to indicate that a device we've // paired with is no longer visible to the adapter, so don't remove // in that case and only clear the visible flag. if (!device->IsPaired()) { if (device->IsSupported()) FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceRemoved(this, device)); DVLOG(1) << "Discovered device " << device->address() << " is no longer visible to the adapter"; delete device; devices_.erase(iter); } else { DVLOG(1) << "Paired device " << device->address() << " is no longer visible to the adapter"; device->SetVisible(false); FOR_EACH_OBSERVER(BluetoothAdapter::Observer, observers_, DeviceChanged(this, device)); } } // static scoped_refptr<BluetoothAdapter> BluetoothAdapter::DefaultAdapter() { if (!default_adapter.Get().get()) { BluetoothAdapter* new_adapter = new BluetoothAdapter; default_adapter.Get() = new_adapter->weak_ptr_factory_.GetWeakPtr(); default_adapter.Get()->TrackDefaultAdapter(); } return scoped_refptr<BluetoothAdapter>(default_adapter.Get()); } // static BluetoothAdapter* BluetoothAdapter::Create(const std::string& address) { BluetoothAdapter* adapter = new BluetoothAdapter; adapter->FindAdapter(address); return adapter; } } // namespace chromeos
keishi/chromium
chrome/browser/chromeos/bluetooth/bluetooth_adapter.cc
C++
bsd-3-clause
19,256
using System; using System.Collections.Generic; using System.Linq; using System.Runtime.Serialization; using System.Text; namespace Inbox2.Platform.Channels.Entities { [Serializable] [DataContract] public enum ProfileType { [EnumMember(Value = "1")] Default = 0, [EnumMember(Value = "2")] Social = 10, } }
Klaudit/inbox2_desktop
Code/Platform/Channels/Entities/ProfileType.cs
C#
bsd-3-clause
342
#ifndef NT2_GALLERY_INCLUDE_FUNCTIONS_SCALAR_PARTER_HPP_INCLUDED #define NT2_GALLERY_INCLUDE_FUNCTIONS_SCALAR_PARTER_HPP_INCLUDED #include <nt2/gallery/functions/parter.hpp> #endif
hainm/pythran
third_party/nt2/gallery/include/functions/scalar/parter.hpp
C++
bsd-3-clause
183
// Copyright 2014 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package ppc64 import ( "cmd/compile/internal/gc" "cmd/internal/obj" "cmd/internal/obj/ppc64" ) const ( LeftRdwr uint32 = gc.LeftRead | gc.LeftWrite RightRdwr uint32 = gc.RightRead | gc.RightWrite ) // This table gives the basic information about instruction // generated by the compiler and processed in the optimizer. // See opt.h for bit definitions. // // Instructions not generated need not be listed. // As an exception to that rule, we typically write down all the // size variants of an operation even if we just use a subset. // // The table is formatted for 8-space tabs. var progtable = [ppc64.ALAST]obj.ProgInfo{ obj.ATYPE: {Flags: gc.Pseudo | gc.Skip}, obj.ATEXT: {Flags: gc.Pseudo}, obj.AFUNCDATA: {Flags: gc.Pseudo}, obj.APCDATA: {Flags: gc.Pseudo}, obj.AUNDEF: {Flags: gc.Break}, obj.AUSEFIELD: {Flags: gc.OK}, obj.ACHECKNIL: {Flags: gc.LeftRead}, obj.AVARDEF: {Flags: gc.Pseudo | gc.RightWrite}, obj.AVARKILL: {Flags: gc.Pseudo | gc.RightWrite}, obj.AVARLIVE: {Flags: gc.Pseudo | gc.LeftRead}, // NOP is an internal no-op that also stands // for USED and SET annotations, not the Power opcode. obj.ANOP: {Flags: gc.LeftRead | gc.RightWrite}, // Integer ppc64.AADD: {Flags: gc.SizeQ | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.ASUB: {Flags: gc.SizeQ | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.ANEG: {Flags: gc.SizeQ | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.AAND: {Flags: gc.SizeQ | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.AOR: {Flags: gc.SizeQ | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.AXOR: {Flags: gc.SizeQ | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.AMULLD: {Flags: gc.SizeQ | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.AMULLW: {Flags: gc.SizeL | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.AMULHD: {Flags: gc.SizeL | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.AMULHDU: {Flags: gc.SizeL | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.ADIVD: {Flags: gc.SizeQ | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.ADIVDU: {Flags: gc.SizeQ | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.ASLD: {Flags: gc.SizeQ | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.ASRD: {Flags: gc.SizeQ | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.ASRAD: {Flags: gc.SizeQ | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.ACMP: {Flags: gc.SizeQ | gc.LeftRead | gc.RightRead}, ppc64.ACMPU: {Flags: gc.SizeQ | gc.LeftRead | gc.RightRead}, ppc64.ATD: {Flags: gc.SizeQ | gc.RightRead}, // Floating point. ppc64.AFADD: {Flags: gc.SizeD | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.AFADDS: {Flags: gc.SizeF | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.AFSUB: {Flags: gc.SizeD | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.AFSUBS: {Flags: gc.SizeF | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.AFMUL: {Flags: gc.SizeD | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.AFMULS: {Flags: gc.SizeF | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.AFDIV: {Flags: gc.SizeD | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.AFDIVS: {Flags: gc.SizeF | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.AFCTIDZ: {Flags: gc.SizeF | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.AFCFID: {Flags: gc.SizeF | gc.LeftRead | gc.RegRead | gc.RightWrite}, ppc64.AFCMPU: {Flags: gc.SizeD | gc.LeftRead | gc.RightRead}, ppc64.AFRSP: {Flags: gc.SizeD | gc.LeftRead | gc.RightWrite | gc.Conv}, // Moves ppc64.AMOVB: {Flags: gc.SizeB | gc.LeftRead | gc.RightWrite | gc.Move | gc.Conv}, ppc64.AMOVBU: {Flags: gc.SizeB | gc.LeftRead | gc.RightWrite | gc.Move | gc.Conv | gc.PostInc}, ppc64.AMOVBZ: {Flags: gc.SizeB | gc.LeftRead | gc.RightWrite | gc.Move | gc.Conv}, ppc64.AMOVH: {Flags: gc.SizeW | gc.LeftRead | gc.RightWrite | gc.Move | gc.Conv}, ppc64.AMOVHU: {Flags: gc.SizeW | gc.LeftRead | gc.RightWrite | gc.Move | gc.Conv | gc.PostInc}, ppc64.AMOVHZ: {Flags: gc.SizeW | gc.LeftRead | gc.RightWrite | gc.Move | gc.Conv}, ppc64.AMOVW: {Flags: gc.SizeL | gc.LeftRead | gc.RightWrite | gc.Move | gc.Conv}, // there is no AMOVWU. ppc64.AMOVWZU: {Flags: gc.SizeL | gc.LeftRead | gc.RightWrite | gc.Move | gc.Conv | gc.PostInc}, ppc64.AMOVWZ: {Flags: gc.SizeL | gc.LeftRead | gc.RightWrite | gc.Move | gc.Conv}, ppc64.AMOVD: {Flags: gc.SizeQ | gc.LeftRead | gc.RightWrite | gc.Move}, ppc64.AMOVDU: {Flags: gc.SizeQ | gc.LeftRead | gc.RightWrite | gc.Move | gc.PostInc}, ppc64.AFMOVS: {Flags: gc.SizeF | gc.LeftRead | gc.RightWrite | gc.Move | gc.Conv}, ppc64.AFMOVD: {Flags: gc.SizeD | gc.LeftRead | gc.RightWrite | gc.Move}, // Jumps ppc64.ABR: {Flags: gc.Jump | gc.Break}, ppc64.ABL: {Flags: gc.Call}, ppc64.ABEQ: {Flags: gc.Cjmp}, ppc64.ABNE: {Flags: gc.Cjmp}, ppc64.ABGE: {Flags: gc.Cjmp}, ppc64.ABLT: {Flags: gc.Cjmp}, ppc64.ABGT: {Flags: gc.Cjmp}, ppc64.ABLE: {Flags: gc.Cjmp}, obj.ARET: {Flags: gc.Break}, obj.ADUFFZERO: {Flags: gc.Call}, obj.ADUFFCOPY: {Flags: gc.Call}, } var initproginfo_initialized int func initproginfo() { var addvariant = []int{V_CC, V_V, V_CC | V_V} if initproginfo_initialized != 0 { return } initproginfo_initialized = 1 // Perform one-time expansion of instructions in progtable to // their CC, V, and VCC variants var as2 int var i int var variant int for as := int(0); as < len(progtable); as++ { if progtable[as].Flags == 0 { continue } variant = as2variant(as) for i = 0; i < len(addvariant); i++ { as2 = variant2as(as, variant|addvariant[i]) if as2 != 0 && progtable[as2].Flags == 0 { progtable[as2] = progtable[as] } } } } func proginfo(p *obj.Prog) { initproginfo() info := &p.Info *info = progtable[p.As] if info.Flags == 0 { gc.Fatalf("proginfo: unknown instruction %v", p) } if (info.Flags&gc.RegRead != 0) && p.Reg == 0 { info.Flags &^= gc.RegRead info.Flags |= gc.RightRead /*CanRegRead |*/ } if (p.From.Type == obj.TYPE_MEM || p.From.Type == obj.TYPE_ADDR) && p.From.Reg != 0 { info.Regindex |= RtoB(int(p.From.Reg)) if info.Flags&gc.PostInc != 0 { info.Regset |= RtoB(int(p.From.Reg)) } } if (p.To.Type == obj.TYPE_MEM || p.To.Type == obj.TYPE_ADDR) && p.To.Reg != 0 { info.Regindex |= RtoB(int(p.To.Reg)) if info.Flags&gc.PostInc != 0 { info.Regset |= RtoB(int(p.To.Reg)) } } if p.From.Type == obj.TYPE_ADDR && p.From.Sym != nil && (info.Flags&gc.LeftRead != 0) { info.Flags &^= gc.LeftRead info.Flags |= gc.LeftAddr } if p.As == obj.ADUFFZERO { info.Reguse |= 1<<0 | RtoB(ppc64.REG_R3) info.Regset |= RtoB(ppc64.REG_R3) } if p.As == obj.ADUFFCOPY { // TODO(austin) Revisit when duffcopy is implemented info.Reguse |= RtoB(ppc64.REG_R3) | RtoB(ppc64.REG_R4) | RtoB(ppc64.REG_R5) info.Regset |= RtoB(ppc64.REG_R3) | RtoB(ppc64.REG_R4) } } // Instruction variants table. Initially this contains entries only // for the "base" form of each instruction. On the first call to // as2variant or variant2as, we'll add the variants to the table. var varianttable = [ppc64.ALAST][4]int{ ppc64.AADD: {ppc64.AADD, ppc64.AADDCC, ppc64.AADDV, ppc64.AADDVCC}, ppc64.AADDC: {ppc64.AADDC, ppc64.AADDCCC, ppc64.AADDCV, ppc64.AADDCVCC}, ppc64.AADDE: {ppc64.AADDE, ppc64.AADDECC, ppc64.AADDEV, ppc64.AADDEVCC}, ppc64.AADDME: {ppc64.AADDME, ppc64.AADDMECC, ppc64.AADDMEV, ppc64.AADDMEVCC}, ppc64.AADDZE: {ppc64.AADDZE, ppc64.AADDZECC, ppc64.AADDZEV, ppc64.AADDZEVCC}, ppc64.AAND: {ppc64.AAND, ppc64.AANDCC, 0, 0}, ppc64.AANDN: {ppc64.AANDN, ppc64.AANDNCC, 0, 0}, ppc64.ACNTLZD: {ppc64.ACNTLZD, ppc64.ACNTLZDCC, 0, 0}, ppc64.ACNTLZW: {ppc64.ACNTLZW, ppc64.ACNTLZWCC, 0, 0}, ppc64.ADIVD: {ppc64.ADIVD, ppc64.ADIVDCC, ppc64.ADIVDV, ppc64.ADIVDVCC}, ppc64.ADIVDU: {ppc64.ADIVDU, ppc64.ADIVDUCC, ppc64.ADIVDUV, ppc64.ADIVDUVCC}, ppc64.ADIVW: {ppc64.ADIVW, ppc64.ADIVWCC, ppc64.ADIVWV, ppc64.ADIVWVCC}, ppc64.ADIVWU: {ppc64.ADIVWU, ppc64.ADIVWUCC, ppc64.ADIVWUV, ppc64.ADIVWUVCC}, ppc64.AEQV: {ppc64.AEQV, ppc64.AEQVCC, 0, 0}, ppc64.AEXTSB: {ppc64.AEXTSB, ppc64.AEXTSBCC, 0, 0}, ppc64.AEXTSH: {ppc64.AEXTSH, ppc64.AEXTSHCC, 0, 0}, ppc64.AEXTSW: {ppc64.AEXTSW, ppc64.AEXTSWCC, 0, 0}, ppc64.AFABS: {ppc64.AFABS, ppc64.AFABSCC, 0, 0}, ppc64.AFADD: {ppc64.AFADD, ppc64.AFADDCC, 0, 0}, ppc64.AFADDS: {ppc64.AFADDS, ppc64.AFADDSCC, 0, 0}, ppc64.AFCFID: {ppc64.AFCFID, ppc64.AFCFIDCC, 0, 0}, ppc64.AFCTID: {ppc64.AFCTID, ppc64.AFCTIDCC, 0, 0}, ppc64.AFCTIDZ: {ppc64.AFCTIDZ, ppc64.AFCTIDZCC, 0, 0}, ppc64.AFCTIW: {ppc64.AFCTIW, ppc64.AFCTIWCC, 0, 0}, ppc64.AFCTIWZ: {ppc64.AFCTIWZ, ppc64.AFCTIWZCC, 0, 0}, ppc64.AFDIV: {ppc64.AFDIV, ppc64.AFDIVCC, 0, 0}, ppc64.AFDIVS: {ppc64.AFDIVS, ppc64.AFDIVSCC, 0, 0}, ppc64.AFMADD: {ppc64.AFMADD, ppc64.AFMADDCC, 0, 0}, ppc64.AFMADDS: {ppc64.AFMADDS, ppc64.AFMADDSCC, 0, 0}, ppc64.AFMOVD: {ppc64.AFMOVD, ppc64.AFMOVDCC, 0, 0}, ppc64.AFMSUB: {ppc64.AFMSUB, ppc64.AFMSUBCC, 0, 0}, ppc64.AFMSUBS: {ppc64.AFMSUBS, ppc64.AFMSUBSCC, 0, 0}, ppc64.AFMUL: {ppc64.AFMUL, ppc64.AFMULCC, 0, 0}, ppc64.AFMULS: {ppc64.AFMULS, ppc64.AFMULSCC, 0, 0}, ppc64.AFNABS: {ppc64.AFNABS, ppc64.AFNABSCC, 0, 0}, ppc64.AFNEG: {ppc64.AFNEG, ppc64.AFNEGCC, 0, 0}, ppc64.AFNMADD: {ppc64.AFNMADD, ppc64.AFNMADDCC, 0, 0}, ppc64.AFNMADDS: {ppc64.AFNMADDS, ppc64.AFNMADDSCC, 0, 0}, ppc64.AFNMSUB: {ppc64.AFNMSUB, ppc64.AFNMSUBCC, 0, 0}, ppc64.AFNMSUBS: {ppc64.AFNMSUBS, ppc64.AFNMSUBSCC, 0, 0}, ppc64.AFRES: {ppc64.AFRES, ppc64.AFRESCC, 0, 0}, ppc64.AFRSP: {ppc64.AFRSP, ppc64.AFRSPCC, 0, 0}, ppc64.AFRSQRTE: {ppc64.AFRSQRTE, ppc64.AFRSQRTECC, 0, 0}, ppc64.AFSEL: {ppc64.AFSEL, ppc64.AFSELCC, 0, 0}, ppc64.AFSQRT: {ppc64.AFSQRT, ppc64.AFSQRTCC, 0, 0}, ppc64.AFSQRTS: {ppc64.AFSQRTS, ppc64.AFSQRTSCC, 0, 0}, ppc64.AFSUB: {ppc64.AFSUB, ppc64.AFSUBCC, 0, 0}, ppc64.AFSUBS: {ppc64.AFSUBS, ppc64.AFSUBSCC, 0, 0}, ppc64.AMTFSB0: {ppc64.AMTFSB0, ppc64.AMTFSB0CC, 0, 0}, ppc64.AMTFSB1: {ppc64.AMTFSB1, ppc64.AMTFSB1CC, 0, 0}, ppc64.AMULHD: {ppc64.AMULHD, ppc64.AMULHDCC, 0, 0}, ppc64.AMULHDU: {ppc64.AMULHDU, ppc64.AMULHDUCC, 0, 0}, ppc64.AMULHW: {ppc64.AMULHW, ppc64.AMULHWCC, 0, 0}, ppc64.AMULHWU: {ppc64.AMULHWU, ppc64.AMULHWUCC, 0, 0}, ppc64.AMULLD: {ppc64.AMULLD, ppc64.AMULLDCC, ppc64.AMULLDV, ppc64.AMULLDVCC}, ppc64.AMULLW: {ppc64.AMULLW, ppc64.AMULLWCC, ppc64.AMULLWV, ppc64.AMULLWVCC}, ppc64.ANAND: {ppc64.ANAND, ppc64.ANANDCC, 0, 0}, ppc64.ANEG: {ppc64.ANEG, ppc64.ANEGCC, ppc64.ANEGV, ppc64.ANEGVCC}, ppc64.ANOR: {ppc64.ANOR, ppc64.ANORCC, 0, 0}, ppc64.AOR: {ppc64.AOR, ppc64.AORCC, 0, 0}, ppc64.AORN: {ppc64.AORN, ppc64.AORNCC, 0, 0}, ppc64.AREM: {ppc64.AREM, ppc64.AREMCC, ppc64.AREMV, ppc64.AREMVCC}, ppc64.AREMD: {ppc64.AREMD, ppc64.AREMDCC, ppc64.AREMDV, ppc64.AREMDVCC}, ppc64.AREMDU: {ppc64.AREMDU, ppc64.AREMDUCC, ppc64.AREMDUV, ppc64.AREMDUVCC}, ppc64.AREMU: {ppc64.AREMU, ppc64.AREMUCC, ppc64.AREMUV, ppc64.AREMUVCC}, ppc64.ARLDC: {ppc64.ARLDC, ppc64.ARLDCCC, 0, 0}, ppc64.ARLDCL: {ppc64.ARLDCL, ppc64.ARLDCLCC, 0, 0}, ppc64.ARLDCR: {ppc64.ARLDCR, ppc64.ARLDCRCC, 0, 0}, ppc64.ARLDMI: {ppc64.ARLDMI, ppc64.ARLDMICC, 0, 0}, ppc64.ARLWMI: {ppc64.ARLWMI, ppc64.ARLWMICC, 0, 0}, ppc64.ARLWNM: {ppc64.ARLWNM, ppc64.ARLWNMCC, 0, 0}, ppc64.ASLD: {ppc64.ASLD, ppc64.ASLDCC, 0, 0}, ppc64.ASLW: {ppc64.ASLW, ppc64.ASLWCC, 0, 0}, ppc64.ASRAD: {ppc64.ASRAD, ppc64.ASRADCC, 0, 0}, ppc64.ASRAW: {ppc64.ASRAW, ppc64.ASRAWCC, 0, 0}, ppc64.ASRD: {ppc64.ASRD, ppc64.ASRDCC, 0, 0}, ppc64.ASRW: {ppc64.ASRW, ppc64.ASRWCC, 0, 0}, ppc64.ASUB: {ppc64.ASUB, ppc64.ASUBCC, ppc64.ASUBV, ppc64.ASUBVCC}, ppc64.ASUBC: {ppc64.ASUBC, ppc64.ASUBCCC, ppc64.ASUBCV, ppc64.ASUBCVCC}, ppc64.ASUBE: {ppc64.ASUBE, ppc64.ASUBECC, ppc64.ASUBEV, ppc64.ASUBEVCC}, ppc64.ASUBME: {ppc64.ASUBME, ppc64.ASUBMECC, ppc64.ASUBMEV, ppc64.ASUBMEVCC}, ppc64.ASUBZE: {ppc64.ASUBZE, ppc64.ASUBZECC, ppc64.ASUBZEV, ppc64.ASUBZEVCC}, ppc64.AXOR: {ppc64.AXOR, ppc64.AXORCC, 0, 0}, } var initvariants_initialized int func initvariants() { if initvariants_initialized != 0 { return } initvariants_initialized = 1 var j int for i := int(0); i < len(varianttable); i++ { if varianttable[i][0] == 0 { // Instruction has no variants varianttable[i][0] = i continue } // Copy base form to other variants if varianttable[i][0] == i { for j = 0; j < len(varianttable[i]); j++ { varianttable[varianttable[i][j]] = varianttable[i] } } } } // as2variant returns the variant (V_*) flags of instruction as. func as2variant(as int) int { initvariants() for i := int(0); i < len(varianttable[as]); i++ { if varianttable[as][i] == as { return i } } gc.Fatalf("as2variant: instruction %v is not a variant of itself", obj.Aconv(as)) return 0 } // variant2as returns the instruction as with the given variant (V_*) flags. // If no such variant exists, this returns 0. func variant2as(as int, flags int) int { initvariants() return varianttable[as][flags] }
mwhudson/go
src/cmd/compile/internal/ppc64/prog.go
GO
bsd-3-clause
13,297
/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE122_Heap_Based_Buffer_Overflow__cpp_CWE805_int_loop_04.cpp Label Definition File: CWE122_Heap_Based_Buffer_Overflow__cpp_CWE805.label.xml Template File: sources-sink-04.tmpl.cpp */ /* * @description * CWE: 122 Heap Based Buffer Overflow * BadSource: Allocate using new[] and set data pointer to a small buffer * GoodSource: Allocate using new[] and set data pointer to a large buffer * Sink: loop * BadSink : Copy int array to data using a loop * Flow Variant: 04 Control flow: if(STATIC_CONST_TRUE) and if(STATIC_CONST_FALSE) * * */ #include "std_testcase.h" /* The two variables below are declared "const", so a tool should be able to identify that reads of these will always return their initialized values. */ static const int STATIC_CONST_TRUE = 1; /* true */ static const int STATIC_CONST_FALSE = 0; /* false */ namespace CWE122_Heap_Based_Buffer_Overflow__cpp_CWE805_int_loop_04 { #ifndef OMITBAD void bad() { int * data; data = NULL; if(STATIC_CONST_TRUE) { /* FLAW: Allocate using new[] and point data to a small buffer that is smaller than the large buffer used in the sinks */ data = new int[50]; } { int source[100] = {0}; /* fill with 0's */ { size_t i; /* POTENTIAL FLAW: Possible buffer overflow if data < 100 */ for (i = 0; i < 100; i++) { data[i] = source[i]; } printIntLine(data[0]); delete [] data; } } } #endif /* OMITBAD */ #ifndef OMITGOOD /* goodG2B1() - use goodsource and badsink by changing the STATIC_CONST_TRUE to STATIC_CONST_FALSE */ static void goodG2B1() { int * data; data = NULL; if(STATIC_CONST_FALSE) { /* INCIDENTAL: CWE 561 Dead Code, the code below will never run */ printLine("Benign, fixed string"); } else { /* FIX: Allocate using new[] and point data to a large buffer that is at least as large as the large buffer used in the sink */ data = new int[100]; } { int source[100] = {0}; /* fill with 0's */ { size_t i; /* POTENTIAL FLAW: Possible buffer overflow if data < 100 */ for (i = 0; i < 100; i++) { data[i] = source[i]; } printIntLine(data[0]); delete [] data; } } } /* goodG2B2() - use goodsource and badsink by reversing the blocks in the if statement */ static void goodG2B2() { int * data; data = NULL; if(STATIC_CONST_TRUE) { /* FIX: Allocate using new[] and point data to a large buffer that is at least as large as the large buffer used in the sink */ data = new int[100]; } { int source[100] = {0}; /* fill with 0's */ { size_t i; /* POTENTIAL FLAW: Possible buffer overflow if data < 100 */ for (i = 0; i < 100; i++) { data[i] = source[i]; } printIntLine(data[0]); delete [] data; } } } void good() { goodG2B1(); goodG2B2(); } #endif /* OMITGOOD */ } /* close namespace */ /* Below is the main(). It is only used when building this testcase on its own for testing or for building a binary to use in testing binary analysis tools. It is not used when compiling all the testcases as one application, which is how source code analysis tools are tested. */ #ifdef INCLUDEMAIN using namespace CWE122_Heap_Based_Buffer_Overflow__cpp_CWE805_int_loop_04; /* so that we can use good and bad easily */ int main(int argc, char * argv[]) { /* seed randomness */ srand( (unsigned)time(NULL) ); #ifndef OMITGOOD printLine("Calling good()..."); good(); printLine("Finished good()"); #endif /* OMITGOOD */ #ifndef OMITBAD printLine("Calling bad()..."); bad(); printLine("Finished bad()"); #endif /* OMITBAD */ return 0; } #endif
JianpingZeng/xcc
xcc/test/juliet/testcases/CWE122_Heap_Based_Buffer_Overflow/s03/CWE122_Heap_Based_Buffer_Overflow__cpp_CWE805_int_loop_04.cpp
C++
bsd-3-clause
4,189
using System; using System.Collections; namespace MyGeneration.CodeSmithConversion.Template { public enum CstTokenType { Code = 0, ResponseWriteShortcutCode, RunAtServerCode, Literal, Comment, EscapedStartTag, EscapedEndTag } /// <summary> /// Summary description for CstToken. /// </summary> public class CstToken { private string text; private CstTokenType tokenType = CstTokenType.Literal; public CstToken(CstTokenType tokenType, string text) { this.tokenType = tokenType; this.text = text; } public string Text { get { return text; } set { text = value; } } public CstTokenType TokenType { get { return tokenType; } set { tokenType = value; } } } }
cafephin/mygeneration
src/plugins/MyGeneration.UI.Plugins.CodeSmith2MyGen/Template/CstToken.cs
C#
bsd-3-clause
726
/** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * @emails oncall+jsinfra */ 'use strict'; jest.unmock('everyObject'); var everyObject = require('everyObject'); describe('everyObject', function() { var mockObject; var mockCallback; beforeEach(() => { mockObject = {foo: 1, bar: 2, baz: 3}; mockCallback = jest.fn(); }); it('handles null', () => { everyObject(null, mockCallback); expect(mockCallback).not.toBeCalled(); }); it('returns true if all properties pass the test', () => { mockCallback.mockImplementation(() => true); var result = everyObject(mockObject, mockCallback); expect(result).toBeTruthy(); expect(mockCallback.mock.calls).toEqual([ [1, 'foo', mockObject], [2, 'bar', mockObject], [3, 'baz', mockObject] ]); }); it('returns false if any of the properties fail the test', () => { mockCallback.mockImplementation(() => false); var result = everyObject(mockObject, mockCallback); expect(result).toBeFalsy(); expect(mockCallback).toBeCalled(); }); it('returns immediately upon finding a property that fails the test', () => { mockCallback.mockImplementation(() => false); var result = everyObject(mockObject, mockCallback); expect(result).toBeFalsy(); expect(mockCallback.mock.calls.length).toEqual(1); }); });
chicoxyzzy/fbjs
packages/fbjs/src/functional/__tests__/everyObject-test.js
JavaScript
bsd-3-clause
1,608
/* * Copyright (c) 2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ #include "UmbrellaProtocol.h" #include <folly/Bits.h> #include "mcrouter/lib/McReply.h" #include "mcrouter/lib/McRequest.h" #include "mcrouter/lib/mc/umbrella.h" #ifndef LIBMC_FBTRACE_DISABLE #include "mcrouter/lib/mc/mc_fbtrace_info.h" #endif static_assert( mc_nops == 27, "If you add a new mc_op, make sure to update lib/mc/umbrella_conv.h"); static_assert( UM_NOPS == 28, "If you add a new mc_op, make sure to update lib/mc/umbrella_conv.h"); static_assert( mc_nres == 31, "If you add a new mc_res, make sure to update lib/mc/umbrella_conv.h"); namespace facebook { namespace memcache { UmbrellaParseStatus umbrellaParseHeader(const uint8_t* buf, size_t nbuf, UmbrellaMessageInfo& infoOut) { if (nbuf < sizeof(entry_list_msg_t)) { return UmbrellaParseStatus::NOT_ENOUGH_DATA; } entry_list_msg_t* header = (entry_list_msg_t*) buf; if (header->msg_header.magic_byte != ENTRY_LIST_MAGIC_BYTE) { return UmbrellaParseStatus::MESSAGE_PARSE_ERROR; } infoOut.version = static_cast<UmbrellaVersion>(header->msg_header.version); if (infoOut.version == UmbrellaVersion::BASIC) { /* Basic version layout: }0NNSSSS, <um_elist_entry_t>*nentries, body Where N is nentries and S is message size, both big endian */ size_t messageSize = folly::Endian::big<uint32_t>(header->total_size); uint16_t nentries = folly::Endian::big<uint16_t>(header->nentries); infoOut.headerSize = sizeof(entry_list_msg_t) + sizeof(um_elist_entry_t) * nentries; if (infoOut.headerSize > messageSize) { return UmbrellaParseStatus::MESSAGE_PARSE_ERROR; } infoOut.bodySize = messageSize - infoOut.headerSize; } else if (infoOut.version == UmbrellaVersion::TYPED_REQUEST) { /* Typed request layout: }1TTSSSSFFFFRRRR, body Where T is type ID, S is message size, F is flags and R is reqid (all little-endian) */ size_t messageSize = folly::Endian::little<uint32_t>(header->total_size); infoOut.typeId = folly::Endian::little<uint16_t>(header->nentries); infoOut.headerSize = sizeof(entry_list_msg_t) + sizeof(uint32_t) + sizeof(uint32_t); if (infoOut.headerSize > messageSize) { return UmbrellaParseStatus::MESSAGE_PARSE_ERROR; } infoOut.bodySize = messageSize - infoOut.headerSize; } else { return UmbrellaParseStatus::MESSAGE_PARSE_ERROR; } return UmbrellaParseStatus::OK; } uint64_t umbrellaDetermineReqId(const uint8_t* header, size_t nheader) { auto msg = reinterpret_cast<const entry_list_msg_t*>(header); size_t nentries = folly::Endian::big((uint16_t)msg->nentries); if (reinterpret_cast<const uint8_t*>(&msg->entries[nentries]) != header + nheader) { throw std::runtime_error("Invalid number of entries"); } for (size_t i = 0; i < nentries; ++i) { auto& entry = msg->entries[i]; size_t tag = folly::Endian::big((uint16_t)entry.tag); if (tag == msg_reqid) { uint64_t val = folly::Endian::big((uint64_t)entry.data.val); if (val == 0) { throw std::runtime_error("invalid reqid"); } return val; } } throw std::runtime_error("missing reqid"); } McRequest umbrellaParseRequest(const folly::IOBuf& source, const uint8_t* header, size_t nheader, const uint8_t* body, size_t nbody, mc_op_t& opOut, uint64_t& reqidOut) { McRequest req; opOut = mc_op_unknown; reqidOut = 0; auto msg = reinterpret_cast<const entry_list_msg_t*>(header); size_t nentries = folly::Endian::big((uint16_t)msg->nentries); if (reinterpret_cast<const uint8_t*>(&msg->entries[nentries]) != header + nheader) { throw std::runtime_error("Invalid number of entries"); } for (size_t i = 0; i < nentries; ++i) { auto& entry = msg->entries[i]; size_t tag = folly::Endian::big((uint16_t)entry.tag); size_t val = folly::Endian::big((uint64_t)entry.data.val); switch (tag) { case msg_op: if (val >= UM_NOPS) { throw std::runtime_error("op out of range"); } opOut = static_cast<mc_op_t>(umbrella_op_to_mc[val]); break; case msg_reqid: if (val == 0) { throw std::runtime_error("invalid reqid"); } reqidOut = val; break; case msg_flags: req.setFlags(val); break; case msg_exptime: req.setExptime(val); break; case msg_delta: req.setDelta(val); break; case msg_cas: req.setCas(val); break; case msg_lease_id: req.setLeaseToken(val); break; case msg_key: if (!req.setKeyFrom( source, body + folly::Endian::big((uint32_t)entry.data.str.offset), folly::Endian::big((uint32_t)entry.data.str.len) - 1)) { throw std::runtime_error("Key: invalid offset/length"); } break; case msg_value: if (!req.setValueFrom( source, body + folly::Endian::big((uint32_t)entry.data.str.offset), folly::Endian::big((uint32_t)entry.data.str.len) - 1)) { throw std::runtime_error("Value: invalid offset/length"); } break; #ifndef LIBMC_FBTRACE_DISABLE case msg_fbtrace: { auto off = folly::Endian::big((uint32_t)entry.data.str.offset); auto len = folly::Endian::big((uint32_t)entry.data.str.len) - 1; if (len > FBTRACE_METADATA_SZ) { throw std::runtime_error("Fbtrace metadata too large"); } if (off + len > nbody || off + len < off) { throw std::runtime_error("Fbtrace metadata field invalid"); } auto fbtraceInfo = new_mc_fbtrace_info(0); memcpy(fbtraceInfo->metadata, body + off, len); req.setFbtraceInfo(fbtraceInfo); break; } #endif default: /* Ignore unknown tags silently */ break; } } if (opOut == mc_op_unknown) { throw std::runtime_error("Request missing operation"); } if (!reqidOut) { throw std::runtime_error("Request missing reqid"); } return req; } UmbrellaSerializedMessage::UmbrellaSerializedMessage() { /* These will not change from message to message */ msg_.msg_header.magic_byte = ENTRY_LIST_MAGIC_BYTE; msg_.msg_header.version = UMBRELLA_VERSION_BASIC; iovs_[0].iov_base = &msg_; iovs_[0].iov_len = sizeof(msg_); iovs_[1].iov_base = entries_; } void UmbrellaSerializedMessage::clear() { nEntries_ = nStrings_ = offset_ = 0; error_ = false; } bool UmbrellaSerializedMessage::prepare(const McReply& reply, mc_op_t op, uint64_t reqid, struct iovec*& iovOut, size_t& niovOut) { niovOut = 0; appendInt(I32, msg_op, umbrella_op_from_mc[op]); appendInt(U64, msg_reqid, reqid); appendInt(I32, msg_result, umbrella_res_from_mc[reply.result()]); if (reply.appSpecificErrorCode()) { appendInt(I32, msg_err_code, reply.appSpecificErrorCode()); } if (reply.flags()) { appendInt(U64, msg_flags, reply.flags()); } if (reply.exptime()) { appendInt(U64, msg_exptime, reply.exptime()); } if (reply.delta()) { appendInt(U64, msg_delta, reply.delta()); } if (reply.leaseToken()) { appendInt(U64, msg_lease_id, reply.leaseToken()); } if (reply.cas()) { appendInt(U64, msg_cas, reply.cas()); } if (reply.number()) { appendInt(U64, msg_number, reply.number()); } /* TODO: if we intend to pass chained IOBufs as values, we can optimize this to write multiple iovs directly */ if (reply.hasValue()) { auto valueRange = reply.valueRangeSlow(); appendString(msg_value, reinterpret_cast<const uint8_t*>(valueRange.begin()), valueRange.size()); } /* NOTE: this check must come after all append*() calls */ if (error_) { return false; } niovOut = finalizeMessage(); iovOut = iovs_; return true; } void UmbrellaSerializedMessage::appendInt( entry_type_t type, int32_t tag, uint64_t val) { if (nEntries_ >= kInlineEntries) { error_ = true; return; } um_elist_entry_t& entry = entries_[nEntries_++]; entry.type = folly::Endian::big((uint16_t)type); entry.tag = folly::Endian::big((uint16_t)tag); entry.data.val = folly::Endian::big((uint64_t)val); } void UmbrellaSerializedMessage::appendString( int32_t tag, const uint8_t* data, size_t len, entry_type_t type) { if (nStrings_ >= kInlineStrings) { error_ = true; return; } strings_[nStrings_++] = folly::StringPiece((const char*)data, len); um_elist_entry_t& entry = entries_[nEntries_++]; entry.type = folly::Endian::big((uint16_t)type); entry.tag = folly::Endian::big((uint16_t)tag); entry.data.str.offset = folly::Endian::big((uint32_t)offset_); entry.data.str.len = folly::Endian::big((uint32_t)(len + 1)); offset_ += len + 1; } size_t UmbrellaSerializedMessage::finalizeMessage() { static char nul = '\0'; size_t size = sizeof(entry_list_msg_t) + sizeof(um_elist_entry_t) * nEntries_ + offset_; msg_.total_size = folly::Endian::big((uint32_t)size); msg_.nentries = folly::Endian::big((uint16_t)nEntries_); iovs_[1].iov_len = sizeof(um_elist_entry_t) * nEntries_; size_t niovOut = 2; for (size_t i = 0; i < nStrings_; i++) { iovs_[niovOut].iov_base = (char *)strings_[i].begin(); iovs_[niovOut].iov_len = strings_[i].size(); niovOut++; iovs_[niovOut].iov_base = &nul; iovs_[niovOut].iov_len = 1; niovOut++; } return niovOut; } }}
evertrue/mcrouter
mcrouter/lib/network/UmbrellaProtocol.cpp
C++
bsd-3-clause
10,000
<?php /** * Zend Framework * * LICENSE * * This source file is subject to the new BSD license that is bundled * with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://framework.zend.com/license/new-bsd * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@zend.com so we can send you a copy immediately. * * @category Zend * @package Zend_Mail * @subpackage Protocol * @copyright Copyright (c) 2005-2011 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ /** * @namespace */ namespace Zend\Mail; use Zend\Validator\Hostname as HostnameValidator, Zend\Validator, Zend\Mail\Protocol; /** * Zend_Mail_Protocol_Abstract * * Provides low-level methods for concrete adapters to communicate with a remote mail server and track requests and responses. * * @uses \Zend\Mail\Protocol\Exception * @uses \Zend\Validator\ValidatorChain * @uses \Zend\Validator\Hostname\Hostname * @category Zend * @package Zend_Mail * @subpackage Protocol * @copyright Copyright (c) 2005-2011 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License * @todo Implement proxy settings */ abstract class AbstractProtocol { /** * Mail default EOL string */ const EOL = "\r\n"; /** * Default timeout in seconds for initiating session */ const TIMEOUT_CONNECTION = 30; /** * Maximum of the transaction log * @var integer */ protected $_maximumLog = 64; /** * Hostname or IP address of remote server * @var string */ protected $_host; /** * Port number of connection * @var integer */ protected $_port; /** * Instance of Zend\Validator\ValidatorChain to check hostnames * @var \Zend\Validator\ValidatorChain */ protected $_validHost; /** * Socket connection resource * @var resource */ protected $_socket; /** * Last request sent to server * @var string */ protected $_request; /** * Array of server responses to last request * @var array */ protected $_response; /** * String template for parsing server responses using sscanf (default: 3 digit code and response string) * @var resource * @deprecated Since 1.10.3 */ protected $_template = '%d%s'; /** * Log of mail requests and server responses for a session * @var array */ private $_log = array(); /** * Constructor. * * @param string $host OPTIONAL Hostname of remote connection (default: 127.0.0.1) * @param integer $port OPTIONAL Port number (default: null) * @throws \Zend\Mail\Protocol\Exception * @return void */ public function __construct($host = '127.0.0.1', $port = null) { $this->_validHost = new Validator\ValidatorChain(); $this->_validHost->addValidator(new HostnameValidator(HostnameValidator::ALLOW_ALL)); if (!$this->_validHost->isValid($host)) { throw new Protocol\Exception\RuntimeException(implode(', ', $this->_validHost->getMessages())); } $this->_host = $host; $this->_port = $port; } /** * Class destructor to cleanup open resources * * @return void */ public function __destruct() { $this->_disconnect(); } /** * Set the maximum log size * * @param integer $maximumLog Maximum log size * @return void */ public function setMaximumLog($maximumLog) { $this->_maximumLog = (int) $maximumLog; } /** * Get the maximum log size * * @return int the maximum log size */ public function getMaximumLog() { return $this->_maximumLog; } /** * Create a connection to the remote host * * Concrete adapters for this class will implement their own unique connect scripts, using the _connect() method to create the socket resource. */ abstract public function connect(); /** * Retrieve the last client request * * @return string */ public function getRequest() { return $this->_request; } /** * Retrieve the last server response * * @return array */ public function getResponse() { return $this->_response; } /** * Retrieve the transaction log * * @return string */ public function getLog() { return implode('', $this->_log); } /** * Reset the transaction log * * @return void */ public function resetLog() { $this->_log = array(); } /** * Add the transaction log * * @param string new transaction * @return void */ protected function _addLog($value) { if ($this->_maximumLog >= 0 && count($this->_log) >= $this->_maximumLog) { array_shift($this->_log); } $this->_log[] = $value; } /** * Connect to the server using the supplied transport and target * * An example $remote string may be 'tcp://mail.example.com:25' or 'ssh://hostname.com:2222' * * @param string $remote Remote * @throws \Zend\Mail\Protocol\Exception * @return boolean */ protected function _connect($remote) { $errorNum = 0; $errorStr = ''; // open connection $this->_socket = @stream_socket_client($remote, $errorNum, $errorStr, self::TIMEOUT_CONNECTION); if ($this->_socket === false) { if ($errorNum == 0) { $errorStr = 'Could not open socket'; } throw new Protocol\Exception\RuntimeException($errorStr); } if (($result = stream_set_timeout($this->_socket, self::TIMEOUT_CONNECTION)) === false) { throw new Protocol\Exception\RuntimeException('Could not set stream timeout'); } return $result; } /** * Disconnect from remote host and free resource * * @return void */ protected function _disconnect() { if (is_resource($this->_socket)) { fclose($this->_socket); } } /** * Send the given request followed by a LINEEND to the server. * * @param string $request * @throws \Zend\Mail\Protocol\Exception * @return integer|boolean Number of bytes written to remote host */ protected function _send($request) { if (!is_resource($this->_socket)) { throw new Protocol\Exception\RuntimeException('No connection has been established to ' . $this->_host); } $this->_request = $request; $result = fwrite($this->_socket, $request . self::EOL); // Save request to internal log $this->_addLog($request . self::EOL); if ($result === false) { throw new Protocol\Exception\RuntimeException('Could not send request to ' . $this->_host); } return $result; } /** * Get a line from the stream. * * @var integer $timeout Per-request timeout value if applicable * @throws \Zend\Mail\Protocol\Exception * @return string */ protected function _receive($timeout = null) { if (!is_resource($this->_socket)) { throw new Protocol\Exception\RuntimeException('No connection has been established to ' . $this->_host); } // Adapters may wish to supply per-commend timeouts according to appropriate RFC if ($timeout !== null) { stream_set_timeout($this->_socket, $timeout); } // Retrieve response $reponse = fgets($this->_socket, 1024); // Save request to internal log $this->_addLog($reponse); // Check meta data to ensure connection is still valid $info = stream_get_meta_data($this->_socket); if (!empty($info['timed_out'])) { throw new Protocol\Exception\RuntimeException($this->_host . ' has timed out'); } if ($reponse === false) { throw new Protocol\Exception\RuntimeException('Could not read from ' . $this->_host); } return $reponse; } /** * Parse server response for successful codes * * Read the response from the stream and check for expected return code. * Throws a Zend_Mail_Protocol_Exception if an unexpected code is returned. * * @param string|array $code One or more codes that indicate a successful response * @throws \Zend\Mail\Protocol\Exception * @return string Last line of response string */ protected function _expect($code, $timeout = null) { $this->_response = array(); $cmd = ''; $more = ''; $msg = ''; $errMsg = ''; if (!is_array($code)) { $code = array($code); } do { $this->_response[] = $result = $this->_receive($timeout); list($cmd, $more, $msg) = preg_split('/([\s-]+)/', $result, 2, PREG_SPLIT_DELIM_CAPTURE); if ($errMsg !== '') { $errMsg .= ' ' . $msg; } elseif ($cmd === null || !in_array($cmd, $code)) { $errMsg = $msg; } } while (strpos($more, '-') === 0); // The '-' message prefix indicates an information string instead of a response string. if ($errMsg !== '') { throw new Protocol\Exception\RuntimeException($errMsg); } return $msg; } }
magicobject/zf2
library/Zend/Mail/AbstractProtocol.php
PHP
bsd-3-clause
9,877
/* Copyright (c) 2013, Groupon, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of GROUPON nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.groupon.mapreduce.mongo.in; import com.groupon.mapreduce.mongo.WritableBSONObject; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import java.io.IOException; import java.util.Iterator; /** * This reads Mongo Records from an Extent and returns Hadoop Records as WritableBSONObjects. The key * returned to the Mapper is the _id field from the Mongo Record as Text. */ public class MongoRecordReader extends RecordReader<Text, WritableBSONObject> { private Record current = null; private Iterator<Record> iterator = null; private FileSystem fs; @Override public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException { MongoInputSplit mongoInputSplit = (MongoInputSplit) inputSplit; fs = ((MongoInputSplit) inputSplit).getExtent().getPath().getFileSystem(taskAttemptContext.getConfiguration()); iterator = mongoInputSplit.getExtent().iterator(fs); } @Override public boolean nextKeyValue() throws IOException, InterruptedException { if (!iterator.hasNext()) return false; current = iterator.next(); return true; } @Override public Text getCurrentKey() throws IOException, InterruptedException { return new Text(current.getId(fs)); } @Override public WritableBSONObject getCurrentValue() throws IOException, InterruptedException { return new WritableBSONObject(current.getContent(fs)); } @Override public float getProgress() throws IOException, InterruptedException { if (!iterator.hasNext()) return 1.0f; return 0.0f; } @Override public void close() throws IOException { } }
groupon/mongo-deep-mapreduce
src/main/java/com/groupon/mapreduce/mongo/in/MongoRecordReader.java
Java
bsd-3-clause
3,408
import numpy as np from Coupling import Coupling class Coupling2DCavities2D(Coupling): """ Coupling for cavity2D to cavity transmission. """ @property def impedance_from(self): """ Choses the right impedance of subsystem_from. Applies boundary conditions correction as well. """ return self.subsystem_from.impedance @property def impedance_to(self): """ Choses the right impedance of subsystem_from. Applies boundary conditions correction as well. """ return self.subsystem_to.impedance @property def tau(self): """ Transmission coefficient. """ return np.zeros(self.frequency.amount) @property def clf(self): """ Coupling loss factor for transmission from a 2D cavity to a cavity. .. math:: \\eta_{12} = \\frac{ \\tau_{12}}{4 \\pi} See BAC, equation 3.14 """ return self.tau / (4.0 * np.pi)
FRidh/Sea
Sea/model/couplings/Coupling2DCavities2D.py
Python
bsd-3-clause
1,049
''' The `Filter` hierarchy contains Transformer classes that take a `Stim` of one type as input and return a `Stim` of the same type as output (but with some changes to its data). ''' from .audio import (AudioTrimmingFilter, AudioResamplingFilter) from .base import TemporalTrimmingFilter from .image import (ImageCroppingFilter, ImageResizingFilter, PillowImageFilter) from .text import (WordStemmingFilter, TokenizingFilter, TokenRemovalFilter, PunctuationRemovalFilter, LowerCasingFilter) from .video import (FrameSamplingFilter, VideoTrimmingFilter) __all__ = [ 'AudioTrimmingFilter', 'AudioResamplingFilter', 'TemporalTrimmingFilter', 'ImageCroppingFilter', 'ImageResizingFilter', 'PillowImageFilter', 'WordStemmingFilter', 'TokenizingFilter', 'TokenRemovalFilter', 'PunctuationRemovalFilter', 'LowerCasingFilter', 'FrameSamplingFilter', 'VideoTrimmingFilter' ]
tyarkoni/pliers
pliers/filters/__init__.py
Python
bsd-3-clause
1,079
#!/usr/bin/env python from setuptools import setup, find_packages setup(name='reddit_gold', description='reddit gold', version='0.1', author='Chad Birch', author_email='chad@reddit.com', packages=find_packages(), install_requires=[ 'r2', ], entry_points={ 'r2.plugin': ['gold = reddit_gold:Gold'] }, include_package_data=True, zip_safe=False, )
madbook/reddit-plugin-gold
setup.py
Python
bsd-3-clause
418
<?php /* * ircPlanet Services for ircu * Copyright (c) 2005 Brian Cline. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * 3. Neither the name of ircPlanet nor the names of its contributors may be * used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ if (!($chan = $this->getChannel($chan_name))) { $bot->noticef($user, "Nobody is on channel %s.", $chan_name); return false; } if (!$chan->isOn($bot->getNumeric())) { $bot->noticef($user, 'I am not on %s.', $chan->getName()); return false; } $reason = assemble($pargs, 2); $users = $this->getChannelUsersByMask($chan_name); foreach ($users as $numeric => $chan_user) { if (!$chan_user->isBot() && $chan_user != $user) { $mask = $chan_user->getHostMask(); $ban = new DB_Ban($chan_reg->getId(), $user->getAccountId(), $mask); $ban->setReason($reason); $chan_reg->addBan($ban); $bot->mode($chan->getName(), "-o+b $numeric $mask"); $bot->kick($chan->getName(), $numeric, $reason); $chan->addBan($mask); } } $chan_reg->save();
briancline/googlecode-ircplanet
Channel/commands/kickbanall.php
PHP
bsd-3-clause
2,380
# -*- coding: utf-8 -*- from django.contrib import admin from ionyweb.plugin_app.plugin_video.models import Plugin_Video admin.site.register(Plugin_Video)
makinacorpus/ionyweb
ionyweb/plugin_app/plugin_video/admin.py
Python
bsd-3-clause
157
/* * Copyright (c) 2009-2015, United States Government, as represented by the Secretary of Health and Human Services. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above * copyright notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the United States Government nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package gov.hhs.fha.nhinc.docquery.nhin.proxy; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.any; import gov.hhs.fha.nhinc.aspect.NwhinInvocationEvent; import gov.hhs.fha.nhinc.common.nhinccommon.AssertionType; import gov.hhs.fha.nhinc.common.nhinccommon.HomeCommunityType; import gov.hhs.fha.nhinc.common.nhinccommon.NhinTargetSystemType; import gov.hhs.fha.nhinc.connectmgr.ConnectionManager; import gov.hhs.fha.nhinc.connectmgr.ConnectionManagerCache; import gov.hhs.fha.nhinc.docquery.aspect.AdhocQueryRequestDescriptionBuilder; import gov.hhs.fha.nhinc.docquery.aspect.AdhocQueryResponseDescriptionBuilder; import gov.hhs.fha.nhinc.messaging.client.CONNECTClient; import gov.hhs.fha.nhinc.messaging.service.port.ServicePortDescriptor; import gov.hhs.fha.nhinc.nhinclib.NhincConstants.UDDI_SPEC_VERSION; import ihe.iti.xds_b._2007.RespondingGatewayQueryPortType; import java.lang.reflect.Method; import javax.xml.ws.Service; import oasis.names.tc.ebxml_regrep.xsd.query._3.AdhocQueryRequest; import org.jmock.Mockery; import org.jmock.integration.junit4.JMock; import org.jmock.integration.junit4.JUnit4Mockery; import org.jmock.lib.legacy.ClassImposteriser; import org.junit.Test; import org.junit.runner.RunWith; /** * * @author Neil Webb */ @RunWith(JMock.class) public class NhinDocQueryWebServiceProxyTest { Mockery context = new JUnit4Mockery() { { setImposteriser(ClassImposteriser.INSTANCE); } }; final Service mockService = context.mock(Service.class); final RespondingGatewayQueryPortType mockPort = context.mock(RespondingGatewayQueryPortType.class); @SuppressWarnings("unchecked") private CONNECTClient<RespondingGatewayQueryPortType> client = mock(CONNECTClient.class); private ConnectionManagerCache cache = mock(ConnectionManagerCache.class); private AdhocQueryRequest request; private AssertionType assertion; @Test public void hasBeginOutboundProcessingEvent() throws Exception { Class<NhinDocQueryProxyWebServiceSecuredImpl> clazz = NhinDocQueryProxyWebServiceSecuredImpl.class; Method method = clazz.getMethod("respondingGatewayCrossGatewayQuery", AdhocQueryRequest.class, AssertionType.class, NhinTargetSystemType.class); NwhinInvocationEvent annotation = method.getAnnotation(NwhinInvocationEvent.class); assertNotNull(annotation); assertEquals(AdhocQueryRequestDescriptionBuilder.class, annotation.beforeBuilder()); assertEquals(AdhocQueryResponseDescriptionBuilder.class, annotation.afterReturningBuilder()); assertEquals("Document Query", annotation.serviceType()); assertEquals("", annotation.version()); } @Test public void testNoMtom() throws Exception { NhinDocQueryProxyWebServiceSecuredImpl impl = getImpl(); NhinTargetSystemType target = getTarget("1.1"); impl.respondingGatewayCrossGatewayQuery(request, assertion, target); verify(client, never()).enableMtom(); } @Test public void testUsingGuidance() throws Exception { NhinDocQueryProxyWebServiceSecuredImpl impl = getImpl(); NhinTargetSystemType target = getTarget("1.1"); impl.respondingGatewayCrossGatewayQuery(request, assertion, target); verify(cache).getEndpointURLByServiceNameSpecVersion(any(String.class), any(String.class), any(UDDI_SPEC_VERSION.class)); } /** * @param hcidValue * @return */ private NhinTargetSystemType getTarget(String hcidValue) { NhinTargetSystemType target = new NhinTargetSystemType(); HomeCommunityType hcid = new HomeCommunityType(); hcid.setHomeCommunityId(hcidValue); target.setHomeCommunity(hcid); target.setUseSpecVersion("2.0"); return target; } /** * @return */ private NhinDocQueryProxyWebServiceSecuredImpl getImpl() { return new NhinDocQueryProxyWebServiceSecuredImpl() { /* * (non-Javadoc) * * @see * gov.hhs.fha.nhinc.docquery.nhin.proxy.NhinDocQueryProxyWebServiceSecuredImpl#getCONNECTClientSecured( * gov.hhs.fha.nhinc.messaging.service.port.ServicePortDescriptor, * gov.hhs.fha.nhinc.common.nhinccommon.AssertionType, java.lang.String, * gov.hhs.fha.nhinc.common.nhinccommon.NhinTargetSystemType) */ @Override public CONNECTClient<RespondingGatewayQueryPortType> getCONNECTClientSecured( ServicePortDescriptor<RespondingGatewayQueryPortType> portDescriptor, AssertionType assertion, String url, NhinTargetSystemType target) { return client; } /* (non-Javadoc) * @see gov.hhs.fha.nhinc.docquery.nhin.proxy.NhinDocQueryProxyWebServiceSecuredImpl#getCMInstance() */ @Override protected ConnectionManager getCMInstance() { return cache; } }; } }
beiyuxinke/CONNECT
Product/Production/Services/DocumentQueryCore/src/test/java/gov/hhs/fha/nhinc/docquery/nhin/proxy/NhinDocQueryWebServiceProxyTest.java
Java
bsd-3-clause
6,930
<?php /** * Zend Framework (http://framework.zend.com/) * * @link http://github.com/zendframework/zf2 for the canonical source repository * @copyright Copyright (c) 2005-2015 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ namespace Zend\Form\Element; use Traversable; use Zend\Form\Element; use Zend\Form\ElementInterface; use Zend\Form\Exception; use Zend\Form\Fieldset; use Zend\Form\FieldsetInterface; use Zend\Form\FormInterface; use Zend\Stdlib\ArrayUtils; class Collection extends Fieldset { /** * Default template placeholder */ const DEFAULT_TEMPLATE_PLACEHOLDER = '__index__'; /** * Element used in the collection * * @var ElementInterface */ protected $targetElement; /** * Initial count of target element * * @var int */ protected $count = 1; /** * Are new elements allowed to be added dynamically ? * * @var bool */ protected $allowAdd = true; /** * Are existing elements allowed to be removed dynamically ? * * @var bool */ protected $allowRemove = true; /** * Is the template generated ? * * @var bool */ protected $shouldCreateTemplate = false; /** * Placeholder used in template content for making your life easier with JavaScript * * @var string */ protected $templatePlaceholder = self::DEFAULT_TEMPLATE_PLACEHOLDER; /** * Whether or not to create new objects during modify * * @var bool */ protected $createNewObjects = false; /** * Element used as a template * * @var ElementInterface|FieldsetInterface */ protected $templateElement; /** * The index of the last child element or fieldset * * @var int */ protected $lastChildIndex = -1; /** * Should child elements must be created on self::prepareElement()? * * @var bool */ protected $shouldCreateChildrenOnPrepareElement = true; /** * Accepted options for Collection: * - target_element: an array or element used in the collection * - count: number of times the element is added initially * - allow_add: if set to true, elements can be added to the form dynamically (using JavaScript) * - allow_remove: if set to true, elements can be removed to the form * - should_create_template: if set to true, a template is generated (inside a <span>) * - template_placeholder: placeholder used in the data template * * @param array|Traversable $options * @return Collection */ public function setOptions($options) { parent::setOptions($options); if (isset($options['target_element'])) { $this->setTargetElement($options['target_element']); } if (isset($options['count'])) { $this->setCount($options['count']); } if (isset($options['allow_add'])) { $this->setAllowAdd($options['allow_add']); } if (isset($options['allow_remove'])) { $this->setAllowRemove($options['allow_remove']); } if (isset($options['should_create_template'])) { $this->setShouldCreateTemplate($options['should_create_template']); } if (isset($options['template_placeholder'])) { $this->setTemplatePlaceholder($options['template_placeholder']); } if (isset($options['create_new_objects'])) { $this->setCreateNewObjects($options['create_new_objects']); } return $this; } /** * Checks if the object can be set in this fieldset * * @param object $object * @return bool */ public function allowObjectBinding($object) { return true; } /** * Set the object used by the hydrator * In this case the "object" is a collection of objects * * @param array|Traversable $object * @return Fieldset|FieldsetInterface * @throws Exception\InvalidArgumentException */ public function setObject($object) { if (!is_array($object) && !$object instanceof Traversable) { throw new Exception\InvalidArgumentException(sprintf( '%s expects an array or Traversable object argument; received "%s"', __METHOD__, (is_object($object) ? get_class($object) : gettype($object)) )); } $this->object = $object; $this->count = count($object) > $this->count ? count($object) : $this->count; return $this; } /** * Populate values * * @param array|Traversable $data * @throws \Zend\Form\Exception\InvalidArgumentException * @throws \Zend\Form\Exception\DomainException * @return void */ public function populateValues($data) { if (!is_array($data) && !$data instanceof Traversable) { throw new Exception\InvalidArgumentException(sprintf( '%s expects an array or Traversable set of data; received "%s"', __METHOD__, (is_object($data) ? get_class($data) : gettype($data)) )); } // Can't do anything with empty data if (empty($data)) { return; } if (!$this->allowRemove && count($data) < $this->count) { throw new Exception\DomainException(sprintf( 'There are fewer elements than specified in the collection (%s). Either set the allow_remove option ' . 'to true, or re-submit the form.', get_class($this) )); } // Check to see if elements have been replaced or removed foreach ($this->byName as $name => $elementOrFieldset) { if (isset($data[$name])) { continue; } if (!$this->allowRemove) { throw new Exception\DomainException(sprintf( 'Elements have been removed from the collection (%s) but the allow_remove option is not true.', get_class($this) )); } $this->remove($name); } foreach ($data as $key => $value) { if ($this->has($key)) { $elementOrFieldset = $this->get($key); } else { $elementOrFieldset = $this->addNewTargetElementInstance($key); if ($key > $this->lastChildIndex) { $this->lastChildIndex = $key; } } if ($elementOrFieldset instanceof FieldsetInterface) { $elementOrFieldset->populateValues($value); } else { $elementOrFieldset->setAttribute('value', $value); } } if (!$this->createNewObjects()) { $this->replaceTemplateObjects(); } } /** * Checks if this fieldset can bind data * * @return bool */ public function allowValueBinding() { return true; } /** * Bind values to the object * * @param array $values * @return array|mixed|void */ public function bindValues(array $values = array()) { $collection = array(); foreach ($values as $name => $value) { $element = $this->get($name); if ($element instanceof FieldsetInterface) { $collection[] = $element->bindValues($value); } else { $collection[] = $value; } } return $collection; } /** * Set the initial count of target element * * @param $count * @return Collection */ public function setCount($count) { $this->count = $count > 0 ? $count : 0; return $this; } /** * Get the initial count of target element * * @return int */ public function getCount() { return $this->count; } /** * Set the target element * * @param ElementInterface|array|Traversable $elementOrFieldset * @return Collection * @throws \Zend\Form\Exception\InvalidArgumentException */ public function setTargetElement($elementOrFieldset) { if (is_array($elementOrFieldset) || ($elementOrFieldset instanceof Traversable && !$elementOrFieldset instanceof ElementInterface) ) { $factory = $this->getFormFactory(); $elementOrFieldset = $factory->create($elementOrFieldset); } if (!$elementOrFieldset instanceof ElementInterface) { throw new Exception\InvalidArgumentException(sprintf( '%s requires that $elementOrFieldset be an object implementing %s; received "%s"', __METHOD__, __NAMESPACE__ . '\ElementInterface', (is_object($elementOrFieldset) ? get_class($elementOrFieldset) : gettype($elementOrFieldset)) )); } $this->targetElement = $elementOrFieldset; return $this; } /** * Get target element * * @return ElementInterface|null */ public function getTargetElement() { return $this->targetElement; } /** * Get allow add * * @param bool $allowAdd * @return Collection */ public function setAllowAdd($allowAdd) { $this->allowAdd = (bool) $allowAdd; return $this; } /** * Get allow add * * @return bool */ public function allowAdd() { return $this->allowAdd; } /** * @param bool $allowRemove * @return Collection */ public function setAllowRemove($allowRemove) { $this->allowRemove = (bool) $allowRemove; return $this; } /** * @return bool */ public function allowRemove() { return $this->allowRemove; } /** * If set to true, a template prototype is automatically added to the form to ease the creation of dynamic elements through JavaScript * * @param bool $shouldCreateTemplate * @return Collection */ public function setShouldCreateTemplate($shouldCreateTemplate) { $this->shouldCreateTemplate = (bool) $shouldCreateTemplate; return $this; } /** * Get if the collection should create a template * * @return bool */ public function shouldCreateTemplate() { return $this->shouldCreateTemplate; } /** * Set the placeholder used in the template generated to help create new elements in JavaScript * * @param string $templatePlaceholder * @return Collection */ public function setTemplatePlaceholder($templatePlaceholder) { if (is_string($templatePlaceholder)) { $this->templatePlaceholder = $templatePlaceholder; } return $this; } /** * Get the template placeholder * * @return string */ public function getTemplatePlaceholder() { return $this->templatePlaceholder; } /** * @param bool $createNewObjects * @return Collection */ public function setCreateNewObjects($createNewObjects) { $this->createNewObjects = (bool) $createNewObjects; return $this; } /** * @return bool */ public function createNewObjects() { return $this->createNewObjects; } /** * Get a template element used for rendering purposes only * * @return null|ElementInterface|FieldsetInterface */ public function getTemplateElement() { if ($this->templateElement === null) { $this->templateElement = $this->createTemplateElement(); } return $this->templateElement; } /** * Prepare the collection by adding a dummy template element if the user want one * * @param FormInterface $form * @return mixed|void */ public function prepareElement(FormInterface $form) { if (true === $this->shouldCreateChildrenOnPrepareElement) { if ($this->targetElement !== null && $this->count > 0) { while ($this->count > $this->lastChildIndex + 1) { $this->addNewTargetElementInstance(++$this->lastChildIndex); } } } // Create a template that will also be prepared if ($this->shouldCreateTemplate) { $templateElement = $this->getTemplateElement(); $this->add($templateElement); } parent::prepareElement($form); // The template element has been prepared, but we don't want it to be rendered nor validated, so remove it from the list if ($this->shouldCreateTemplate) { $this->remove($this->templatePlaceholder); } } /** * @return array * @throws \Zend\Form\Exception\InvalidArgumentException * @throws \Zend\Stdlib\Exception\InvalidArgumentException * @throws \Zend\Form\Exception\DomainException * @throws \Zend\Form\Exception\InvalidElementException */ public function extract() { if ($this->object instanceof Traversable) { $this->object = ArrayUtils::iteratorToArray($this->object, false); } if (!is_array($this->object)) { return array(); } $values = array(); foreach ($this->object as $key => $value) { // If a hydrator is provided, our work here is done if ($this->hydrator) { $values[$key] = $this->hydrator->extract($value); continue; } // If the target element is a fieldset that can accept the provided value // we should clone it, inject the value and extract the data if ( $this->targetElement instanceof FieldsetInterface ) { if ( ! $this->targetElement->allowObjectBinding($value) ) { continue; } $targetElement = clone $this->targetElement; $targetElement->setObject($value); $values[$key] = $targetElement->extract(); if (!$this->createNewObjects() && $this->has($key)) { $this->get($key)->setObject($value); } continue; } // If the target element is a non-fieldset element, just use the value if ( $this->targetElement instanceof ElementInterface ) { $values[$key] = $value; if (!$this->createNewObjects() && $this->has($key)) { $this->get($key)->setValue($value); } continue; } } return $values; } /** * Create a new instance of the target element * * @return ElementInterface */ protected function createNewTargetElementInstance() { return clone $this->targetElement; } /** * Add a new instance of the target element * * @param string $name * @return ElementInterface * @throws Exception\DomainException */ protected function addNewTargetElementInstance($name) { $this->shouldCreateChildrenOnPrepareElement = false; $elementOrFieldset = $this->createNewTargetElementInstance(); $elementOrFieldset->setName($name); $this->add($elementOrFieldset); if (!$this->allowAdd && $this->count() > $this->count) { throw new Exception\DomainException(sprintf( 'There are more elements than specified in the collection (%s). Either set the allow_add option ' . 'to true, or re-submit the form.', get_class($this) )); } return $elementOrFieldset; } /** * Create a dummy template element * * @return null|ElementInterface|FieldsetInterface */ protected function createTemplateElement() { if (!$this->shouldCreateTemplate) { return null; } if ($this->templateElement) { return $this->templateElement; } $elementOrFieldset = $this->createNewTargetElementInstance(); $elementOrFieldset->setName($this->templatePlaceholder); return $elementOrFieldset; } /** * Replaces the default template object of a sub element with the corresponding * real entity so that all properties are preserved. * * @return void */ protected function replaceTemplateObjects() { $fieldsets = $this->getFieldsets(); if (!count($fieldsets) || !$this->object) { return; } foreach ($fieldsets as $fieldset) { $i = $fieldset->getName(); if (isset($this->object[$i])) { $fieldset->setObject($this->object[$i]); } } } }
stromengine/10001
vendor/zendframework/zendframework/library/Zend/Form/Element/Collection.php
PHP
bsd-3-clause
17,810
/* * Copyright (c) 2011-2013, Longxiang He <helongxiang@smeshlink.com>, * SmeshLink Technology Co. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY. * * This file is part of the CoAP.NET, a CoAP framework in C#. * Please see README for more information. */ using System; namespace CoAP { /// <summary> /// Represents an event when a response arrives for a request. /// </summary> public class ResponseEventArgs : EventArgs { private Response _response; /// <summary> /// /// </summary> public ResponseEventArgs(Response response) { _response = response; } /// <summary> /// Gets the incoming response. /// </summary> public Response Response { get { return _response; } } } }
martindevans/CoAP.NET
CoAP.NET/ResponseEventArgs.cs
C#
bsd-3-clause
893
/* * Copyright 2009, Google Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // This file contains the definition of DrawElement. #include "core/cross/precompile.h" #include "core/cross/draw_element.h" #include "core/cross/renderer.h" #include "core/cross/error.h" namespace o3d { O3D_DEFN_CLASS(DrawElement, ParamObject); const char* DrawElement::kMaterialParamName = O3D_STRING_CONSTANT("material"); ObjectBase::Ref DrawElement::Create(ServiceLocator* service_locator) { Renderer* renderer = service_locator->GetService<Renderer>(); if (NULL == renderer) { O3D_ERROR(service_locator) << "No Render Device Available"; return ObjectBase::Ref(); } return ObjectBase::Ref(renderer->CreateDrawElement()); } DrawElement::DrawElement(ServiceLocator* service_locator) : ParamObject(service_locator), owner_(NULL) { RegisterParamRef(kMaterialParamName, &material_param_ref_); } DrawElement::~DrawElement() { } void DrawElement::SetOwner(Element* new_owner) { // Hold a ref to ourselves so we make sure we don't get deleted while // as we remove ourself from our current owner. DrawElement::Ref temp(this); if (owner_ != NULL) { bool removed = owner_->RemoveDrawElement(this); DLOG_ASSERT(removed); } owner_ = new_owner; if (new_owner) { new_owner->AddDrawElement(this); } } } // namespace o3d
amyvmiwei/chromium
o3d/core/cross/draw_element.cc
C++
bsd-3-clause
2,847
#ifndef NT2_INCLUDE_FUNCTIONS_SLIDE_HPP_INCLUDED #define NT2_INCLUDE_FUNCTIONS_SLIDE_HPP_INCLUDED #include <nt2/memory/include/functions/slide.hpp> #include <nt2/memory/include/functions/scalar/slide.hpp> #include <nt2/memory/include/functions/simd/slide.hpp> #endif
hainm/pythran
third_party/nt2/include/functions/slide.hpp
C++
bsd-3-clause
269
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "content/browser/download/mhtml_generation_manager.h" #include <map> #include <queue> #include <utility> #include "base/bind.h" #include "base/files/file.h" #include "base/guid.h" #include "base/macros.h" #include "base/scoped_observer.h" #include "base/stl_util.h" #include "base/strings/stringprintf.h" #include "content/browser/bad_message.h" #include "content/browser/frame_host/frame_tree_node.h" #include "content/browser/frame_host/render_frame_host_impl.h" #include "content/common/frame_messages.h" #include "content/public/browser/browser_thread.h" #include "content/public/browser/render_frame_host.h" #include "content/public/browser/render_process_host.h" #include "content/public/browser/render_process_host_observer.h" #include "content/public/browser/web_contents.h" #include "content/public/common/mhtml_generation_params.h" #include "net/base/mime_util.h" namespace content { // The class and all of its members live on the UI thread. Only static methods // are executed on other threads. class MHTMLGenerationManager::Job : public RenderProcessHostObserver { public: Job(int job_id, WebContents* web_contents, const MHTMLGenerationParams& params, const GenerateMHTMLCallback& callback); ~Job() override; int id() const { return job_id_; } void set_browser_file(base::File file) { browser_file_ = std::move(file); } const GenerateMHTMLCallback& callback() const { return callback_; } // Indicates whether we expect a message from the |sender| at this time. // We expect only one message per frame - therefore calling this method // will always clear |frame_tree_node_id_of_busy_frame_|. bool IsMessageFromFrameExpected(RenderFrameHostImpl* sender); // Handler for FrameHostMsg_SerializeAsMHTMLResponse (a notification from the // renderer that the MHTML generation for previous frame has finished). // Returns |true| upon success; |false| otherwise. bool OnSerializeAsMHTMLResponse( const std::set<std::string>& digests_of_uris_of_serialized_resources); // Sends IPC to the renderer, asking for MHTML generation of the next frame. // // Returns true if the message was sent successfully; false otherwise. bool SendToNextRenderFrame(); // Indicates if more calls to SendToNextRenderFrame are needed. bool IsDone() const { bool waiting_for_response_from_renderer = frame_tree_node_id_of_busy_frame_ != FrameTreeNode::kFrameTreeNodeInvalidId; bool no_more_requests_to_send = pending_frame_tree_node_ids_.empty(); return !waiting_for_response_from_renderer && no_more_requests_to_send; } // Close the file on the file thread and respond back on the UI thread with // file size. void CloseFile(base::Callback<void(int64_t file_size)> callback); // RenderProcessHostObserver: void RenderProcessExited(RenderProcessHost* host, base::TerminationStatus status, int exit_code) override; void RenderProcessHostDestroyed(RenderProcessHost* host) override; void MarkAsFinished(); private: static int64_t CloseFileOnFileThread(base::File file); void AddFrame(RenderFrameHost* render_frame_host); // Creates a new map with values (content ids) the same as in // |frame_tree_node_to_content_id_| map, but with the keys translated from // frame_tree_node_id into a |site_instance|-specific routing_id. std::map<int, std::string> CreateFrameRoutingIdToContentId( SiteInstance* site_instance); // Id used to map renderer responses to jobs. // See also MHTMLGenerationManager::id_to_job_ map. int job_id_; // User-configurable parameters. Includes the file location, binary encoding // choices, and whether to skip storing resources marked // Cache-Control: no-store. MHTMLGenerationParams params_; // The IDs of frames that still need to be processed. std::queue<int> pending_frame_tree_node_ids_; // Identifies a frame to which we've sent FrameMsg_SerializeAsMHTML but for // which we didn't yet process FrameHostMsg_SerializeAsMHTMLResponse via // OnSerializeAsMHTMLResponse. int frame_tree_node_id_of_busy_frame_; // The handle to the file the MHTML is saved to for the browser process. base::File browser_file_; // Map from frames to content ids (see WebFrameSerializer::generateMHTMLParts // for more details about what "content ids" are and how they are used). std::map<int, std::string> frame_tree_node_to_content_id_; // MIME multipart boundary to use in the MHTML doc. std::string mhtml_boundary_marker_; // Digests of URIs of already generated MHTML parts. std::set<std::string> digests_of_already_serialized_uris_; std::string salt_; // The callback to call once generation is complete. const GenerateMHTMLCallback callback_; // Whether the job is finished (set to true only for the short duration of // time between MHTMLGenerationManager::JobFinished is called and the job is // destroyed by MHTMLGenerationManager::OnFileClosed). bool is_finished_; // RAII helper for registering this Job as a RenderProcessHost observer. ScopedObserver<RenderProcessHost, MHTMLGenerationManager::Job> observed_renderer_process_host_; DISALLOW_COPY_AND_ASSIGN(Job); }; MHTMLGenerationManager::Job::Job(int job_id, WebContents* web_contents, const MHTMLGenerationParams& params, const GenerateMHTMLCallback& callback) : job_id_(job_id), params_(params), frame_tree_node_id_of_busy_frame_(FrameTreeNode::kFrameTreeNodeInvalidId), mhtml_boundary_marker_(net::GenerateMimeMultipartBoundary()), salt_(base::GenerateGUID()), callback_(callback), is_finished_(false), observed_renderer_process_host_(this) { DCHECK_CURRENTLY_ON(BrowserThread::UI); web_contents->ForEachFrame(base::Bind( &MHTMLGenerationManager::Job::AddFrame, base::Unretained(this))); // Safe because ForEachFrame is synchronous. // Main frame needs to be processed first. DCHECK(!pending_frame_tree_node_ids_.empty()); DCHECK(FrameTreeNode::GloballyFindByID(pending_frame_tree_node_ids_.front()) ->parent() == nullptr); } MHTMLGenerationManager::Job::~Job() { DCHECK_CURRENTLY_ON(BrowserThread::UI); } std::map<int, std::string> MHTMLGenerationManager::Job::CreateFrameRoutingIdToContentId( SiteInstance* site_instance) { std::map<int, std::string> result; for (const auto& it : frame_tree_node_to_content_id_) { int ftn_id = it.first; const std::string& content_id = it.second; FrameTreeNode* ftn = FrameTreeNode::GloballyFindByID(ftn_id); if (!ftn) continue; int routing_id = ftn->render_manager()->GetRoutingIdForSiteInstance(site_instance); if (routing_id == MSG_ROUTING_NONE) continue; result[routing_id] = content_id; } return result; } bool MHTMLGenerationManager::Job::SendToNextRenderFrame() { DCHECK(browser_file_.IsValid()); DCHECK(!pending_frame_tree_node_ids_.empty()); FrameMsg_SerializeAsMHTML_Params ipc_params; ipc_params.job_id = job_id_; ipc_params.mhtml_boundary_marker = mhtml_boundary_marker_; ipc_params.mhtml_binary_encoding = params_.use_binary_encoding; ipc_params.mhtml_cache_control_policy = params_.cache_control_policy; int frame_tree_node_id = pending_frame_tree_node_ids_.front(); pending_frame_tree_node_ids_.pop(); ipc_params.is_last_frame = pending_frame_tree_node_ids_.empty(); FrameTreeNode* ftn = FrameTreeNode::GloballyFindByID(frame_tree_node_id); if (!ftn) // The contents went away. return false; RenderFrameHost* rfh = ftn->current_frame_host(); // Get notified if the target of the IPC message dies between responding. observed_renderer_process_host_.RemoveAll(); observed_renderer_process_host_.Add(rfh->GetProcess()); // Tell the renderer to skip (= deduplicate) already covered MHTML parts. ipc_params.salt = salt_; ipc_params.digests_of_uris_to_skip = digests_of_already_serialized_uris_; ipc_params.destination_file = IPC::GetPlatformFileForTransit( browser_file_.GetPlatformFile(), false); // |close_source_handle|. ipc_params.frame_routing_id_to_content_id = CreateFrameRoutingIdToContentId(rfh->GetSiteInstance()); // Send the IPC asking the renderer to serialize the frame. DCHECK_EQ(FrameTreeNode::kFrameTreeNodeInvalidId, frame_tree_node_id_of_busy_frame_); frame_tree_node_id_of_busy_frame_ = frame_tree_node_id; rfh->Send(new FrameMsg_SerializeAsMHTML(rfh->GetRoutingID(), ipc_params)); return true; } void MHTMLGenerationManager::Job::RenderProcessExited( RenderProcessHost* host, base::TerminationStatus status, int exit_code) { DCHECK_CURRENTLY_ON(BrowserThread::UI); MHTMLGenerationManager::GetInstance()->RenderProcessExited(this); } void MHTMLGenerationManager::Job::MarkAsFinished() { DCHECK(!is_finished_); is_finished_ = true; // Stopping RenderProcessExited notifications is needed to avoid calling // JobFinished twice. See also https://crbug.com/612098. observed_renderer_process_host_.RemoveAll(); } void MHTMLGenerationManager::Job::AddFrame(RenderFrameHost* render_frame_host) { auto* rfhi = static_cast<RenderFrameHostImpl*>(render_frame_host); int frame_tree_node_id = rfhi->frame_tree_node()->frame_tree_node_id(); pending_frame_tree_node_ids_.push(frame_tree_node_id); std::string guid = base::GenerateGUID(); std::string content_id = base::StringPrintf("<frame-%d-%s@mhtml.blink>", frame_tree_node_id, guid.c_str()); frame_tree_node_to_content_id_[frame_tree_node_id] = content_id; } void MHTMLGenerationManager::Job::RenderProcessHostDestroyed( RenderProcessHost* host) { DCHECK_CURRENTLY_ON(BrowserThread::UI); observed_renderer_process_host_.Remove(host); } void MHTMLGenerationManager::Job::CloseFile( base::Callback<void(int64_t)> callback) { DCHECK_CURRENTLY_ON(BrowserThread::UI); if (!browser_file_.IsValid()) { callback.Run(-1); return; } BrowserThread::PostTaskAndReplyWithResult( BrowserThread::FILE, FROM_HERE, base::Bind(&MHTMLGenerationManager::Job::CloseFileOnFileThread, base::Passed(std::move(browser_file_))), callback); } bool MHTMLGenerationManager::Job::IsMessageFromFrameExpected( RenderFrameHostImpl* sender) { int sender_id = sender->frame_tree_node()->frame_tree_node_id(); if (sender_id != frame_tree_node_id_of_busy_frame_) return false; // We only expect one message per frame - let's make sure subsequent messages // from the same |sender| will be rejected. frame_tree_node_id_of_busy_frame_ = FrameTreeNode::kFrameTreeNodeInvalidId; return true; } bool MHTMLGenerationManager::Job::OnSerializeAsMHTMLResponse( const std::set<std::string>& digests_of_uris_of_serialized_resources) { // Renderer should be deduping resources with the same uris. DCHECK_EQ(0u, base::STLSetIntersection<std::set<std::string>>( digests_of_already_serialized_uris_, digests_of_uris_of_serialized_resources).size()); digests_of_already_serialized_uris_.insert( digests_of_uris_of_serialized_resources.begin(), digests_of_uris_of_serialized_resources.end()); if (pending_frame_tree_node_ids_.empty()) return true; // Report success - all frames have been processed. return SendToNextRenderFrame(); } // static int64_t MHTMLGenerationManager::Job::CloseFileOnFileThread(base::File file) { DCHECK_CURRENTLY_ON(BrowserThread::FILE); DCHECK(file.IsValid()); int64_t file_size = file.GetLength(); file.Close(); return file_size; } MHTMLGenerationManager* MHTMLGenerationManager::GetInstance() { return base::Singleton<MHTMLGenerationManager>::get(); } MHTMLGenerationManager::MHTMLGenerationManager() : next_job_id_(0) {} MHTMLGenerationManager::~MHTMLGenerationManager() { STLDeleteValues(&id_to_job_); } void MHTMLGenerationManager::SaveMHTML(WebContents* web_contents, const MHTMLGenerationParams& params, const GenerateMHTMLCallback& callback) { DCHECK_CURRENTLY_ON(BrowserThread::UI); int job_id = NewJob(web_contents, params, callback); BrowserThread::PostTaskAndReplyWithResult( BrowserThread::FILE, FROM_HERE, base::Bind(&MHTMLGenerationManager::CreateFile, params.file_path), base::Bind(&MHTMLGenerationManager::OnFileAvailable, base::Unretained(this), // Safe b/c |this| is a singleton. job_id)); } void MHTMLGenerationManager::OnSerializeAsMHTMLResponse( RenderFrameHostImpl* sender, int job_id, bool mhtml_generation_in_renderer_succeeded, const std::set<std::string>& digests_of_uris_of_serialized_resources) { DCHECK_CURRENTLY_ON(BrowserThread::UI); Job* job = FindJob(job_id); if (!job || !job->IsMessageFromFrameExpected(sender)) { NOTREACHED(); ReceivedBadMessage(sender->GetProcess(), bad_message::DWNLD_INVALID_SERIALIZE_AS_MHTML_RESPONSE); return; } if (!mhtml_generation_in_renderer_succeeded) { JobFinished(job, JobStatus::FAILURE); return; } if (!job->OnSerializeAsMHTMLResponse( digests_of_uris_of_serialized_resources)) { JobFinished(job, JobStatus::FAILURE); return; } if (job->IsDone()) JobFinished(job, JobStatus::SUCCESS); } // static base::File MHTMLGenerationManager::CreateFile(const base::FilePath& file_path) { DCHECK_CURRENTLY_ON(BrowserThread::FILE); // SECURITY NOTE: A file descriptor to the file created below will be passed // to multiple renderer processes which (in out-of-process iframes mode) can // act on behalf of separate web principals. Therefore it is important to // only allow writing to the file and forbid reading from the file (as this // would allow reading content generated by other renderers / other web // principals). uint32_t file_flags = base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE; base::File browser_file(file_path, file_flags); if (!browser_file.IsValid()) { LOG(ERROR) << "Failed to create file to save MHTML at: " << file_path.value(); } return browser_file; } void MHTMLGenerationManager::OnFileAvailable(int job_id, base::File browser_file) { DCHECK_CURRENTLY_ON(BrowserThread::UI); Job* job = FindJob(job_id); DCHECK(job); if (!browser_file.IsValid()) { LOG(ERROR) << "Failed to create file"; JobFinished(job, JobStatus::FAILURE); return; } job->set_browser_file(std::move(browser_file)); if (!job->SendToNextRenderFrame()) { JobFinished(job, JobStatus::FAILURE); } } void MHTMLGenerationManager::JobFinished(Job* job, JobStatus job_status) { DCHECK_CURRENTLY_ON(BrowserThread::UI); DCHECK(job); job->MarkAsFinished(); job->CloseFile( base::Bind(&MHTMLGenerationManager::OnFileClosed, base::Unretained(this), // Safe b/c |this| is a singleton. job->id(), job_status)); } void MHTMLGenerationManager::OnFileClosed(int job_id, JobStatus job_status, int64_t file_size) { DCHECK_CURRENTLY_ON(BrowserThread::UI); Job* job = FindJob(job_id); job->callback().Run(job_status == JobStatus::SUCCESS ? file_size : -1); id_to_job_.erase(job_id); delete job; } int MHTMLGenerationManager::NewJob(WebContents* web_contents, const MHTMLGenerationParams& params, const GenerateMHTMLCallback& callback) { DCHECK_CURRENTLY_ON(BrowserThread::UI); int job_id = next_job_id_++; id_to_job_[job_id] = new Job(job_id, web_contents, params, callback); return job_id; } MHTMLGenerationManager::Job* MHTMLGenerationManager::FindJob(int job_id) { DCHECK_CURRENTLY_ON(BrowserThread::UI); IDToJobMap::iterator iter = id_to_job_.find(job_id); if (iter == id_to_job_.end()) { NOTREACHED(); return nullptr; } return iter->second; } void MHTMLGenerationManager::RenderProcessExited(Job* job) { DCHECK_CURRENTLY_ON(BrowserThread::UI); DCHECK(job); JobFinished(job, JobStatus::FAILURE); } } // namespace content
danakj/chromium
content/browser/download/mhtml_generation_manager.cc
C++
bsd-3-clause
16,585
package com.skcraft.plume.event.block; import com.google.common.base.Functions; import com.google.common.base.Predicate; import com.skcraft.plume.event.BulkEvent; import com.skcraft.plume.event.Cause; import com.skcraft.plume.event.DelegateEvent; import com.skcraft.plume.event.Result; import com.skcraft.plume.util.Location3i; import net.minecraft.world.World; import java.util.List; import static com.google.common.base.Preconditions.checkNotNull; abstract class BlockEvent extends DelegateEvent implements BulkEvent { private final World world; protected BlockEvent(Cause cause, World world) { super(cause); checkNotNull(world, "world"); this.world = world; } /** * Get the world. * * @return The world */ public World getWorld() { return world; } /** * Get a list of affected locations. * * @return A list of affected locations */ public abstract List<Location3i> getLocations(); /** * Filter the list of affected blocks with the given predicate. If the * predicate returns {@code false}, then the block is removed. * * @param predicate the predicate * @param cancelEventOnFalse true to cancel the event and clear the block * list once the predicate returns {@code false} * @return Whether one or more blocks were filtered out */ public boolean filterLocations(Predicate<Location3i> predicate, boolean cancelEventOnFalse) { return filter(getLocations(), Functions.<Location3i>identity(), predicate, cancelEventOnFalse); } @Override public Result getResult() { if (getLocations().isEmpty()) { return Result.DENY; } return super.getResult(); } @Override public Result getExplicitResult() { return super.getResult(); } }
wizjany/Plume
src/main/java/com/skcraft/plume/event/block/BlockEvent.java
Java
bsd-3-clause
1,892
class Blog < ActiveRecord::Base has_many :posts has_many :comments, :through => :posts attr_accessible :name, :subdomain searchable :include => { :posts => :author } do string :subdomain text :name end # Make sure that includes are added to with multiple searchable calls searchable(:include => :comments) {} end
hafeild/alice
vendor/bundle/ruby/2.3.0/gems/sunspot_rails-2.3.0/spec/rails_app/app/models/blog.rb
Ruby
bsd-3-clause
338
from __future__ import unicode_literals __all__ = ( 'Key', 'Keys', ) class Key(object): def __init__(self, name): #: Descriptive way of writing keys in configuration files. e.g. <C-A> #: for ``Control-A``. self.name = name def __repr__(self): return '%s(%r)' % (self.__class__.__name__, self.name) class Keys(object): Escape = Key('<Escape>') ControlA = Key('<C-A>') ControlB = Key('<C-B>') ControlC = Key('<C-C>') ControlD = Key('<C-D>') ControlE = Key('<C-E>') ControlF = Key('<C-F>') ControlG = Key('<C-G>') ControlH = Key('<C-H>') ControlI = Key('<C-I>') # Tab ControlJ = Key('<C-J>') # Enter ControlK = Key('<C-K>') ControlL = Key('<C-L>') ControlM = Key('<C-M>') # Enter ControlN = Key('<C-N>') ControlO = Key('<C-O>') ControlP = Key('<C-P>') ControlQ = Key('<C-Q>') ControlR = Key('<C-R>') ControlS = Key('<C-S>') ControlT = Key('<C-T>') ControlU = Key('<C-U>') ControlV = Key('<C-V>') ControlW = Key('<C-W>') ControlX = Key('<C-X>') ControlY = Key('<C-Y>') ControlZ = Key('<C-Z>') ControlSpace = Key('<C-Space>') ControlBackslash = Key('<C-Backslash>') ControlSquareClose = Key('<C-SquareClose>') ControlCircumflex = Key('<C-Circumflex>') ControlUnderscore = Key('<C-Underscore>') ControlLeft = Key('<C-Left>') ControlRight = Key('<C-Right>') ControlUp = Key('<C-Up>') ControlDown = Key('<C-Down>') Up = Key('<Up>') Down = Key('<Down>') Right = Key('<Right>') Left = Key('<Left>') Home = Key('<Home>') End = Key('<End>') Delete = Key('<Delete>') ShiftDelete = Key('<ShiftDelete>') PageUp = Key('<PageUp>') PageDown = Key('<PageDown>') BackTab = Key('<BackTab>') # shift + tab Tab = ControlI Backspace = ControlH F1 = Key('<F1>') F2 = Key('<F2>') F3 = Key('<F3>') F4 = Key('<F4>') F5 = Key('<F5>') F6 = Key('<F6>') F7 = Key('<F7>') F8 = Key('<F8>') F9 = Key('<F9>') F10 = Key('<F10>') F11 = Key('<F11>') F12 = Key('<F12>') F13 = Key('<F13>') F14 = Key('<F14>') F15 = Key('<F15>') F16 = Key('<F16>') F17 = Key('<F17>') F18 = Key('<F18>') F19 = Key('<F19>') F20 = Key('<F20>') # Matches any key. Any = Key('<Any>') # Special CPRResponse = Key('<Cursor-Position-Response>')
jaseg/python-prompt-toolkit
prompt_toolkit/keys.py
Python
bsd-3-clause
2,546
/* * Copyright (c) 2004-2022, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hisp.dhis.expression; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import org.hisp.dhis.analytics.DataType; import org.hisp.dhis.common.DimensionalItemId; import org.hisp.dhis.common.DimensionalItemObject; import org.hisp.dhis.constant.Constant; import org.hisp.dhis.dataelement.DataElement; import org.hisp.dhis.indicator.Indicator; import org.hisp.dhis.indicator.IndicatorValue; import org.hisp.dhis.organisationunit.OrganisationUnitGroup; import org.hisp.dhis.period.Period; /** * Expressions are mathematical formulas and can contain references to various * elements. * * @author Margrethe Store * @author Lars Helge Overland * @author Jim Grace */ public interface ExpressionService { String ID = ExpressionService.class.getName(); String DAYS_DESCRIPTION = "[Number of days]"; String SYMBOL_DAYS = "[days]"; String SYMBOL_WILDCARD = "*"; String UID_EXPRESSION = "[a-zA-Z]\\w{10}"; String INT_EXPRESSION = "^(0|-?[1-9]\\d*)$"; // ------------------------------------------------------------------------- // Expression CRUD operations // ------------------------------------------------------------------------- /** * Adds a new Expression to the database. * * @param expression The Expression to add. * @return The generated identifier for this Expression. */ long addExpression( Expression expression ); /** * Updates an Expression. * * @param expression The Expression to update. */ void updateExpression( Expression expression ); /** * Deletes an Expression from the database. * * @param expression the expression. */ void deleteExpression( Expression expression ); /** * Get the Expression with the given identifier. * * @param id The identifier. * @return an Expression with the given identifier. */ Expression getExpression( long id ); /** * Gets all Expressions. * * @return A list with all Expressions. */ List<Expression> getAllExpressions(); // ------------------------------------------------------------------------- // Indicator expression logic // ------------------------------------------------------------------------- /** * Returns all dimensional item objects which are present in numerator and * denominator of the given indicators, as a map from id to object. * * @param indicators the collection of indicators. * @return a map from dimensional item id to object. */ Map<DimensionalItemId, DimensionalItemObject> getIndicatorDimensionalItemMap( Collection<Indicator> indicators ); /** * Returns all OrganisationUnitGroups in the numerator and denominator * expressions in the given Indicators. Returns an empty set if the given * indicators are null or empty. * * @param indicators the set of indicators. * @return a Set of OrganisationUnitGroups. */ List<OrganisationUnitGroup> getOrgUnitGroupCountGroups( Collection<Indicator> indicators ); /** * Generates the calculated value for the given parameters based on the * values in the given maps. * * @param indicator the indicator for which to calculate the value. * @param periods a List of periods for which to calculate the value. * @param itemMap map of dimensional item id to object in expression. * @param valueMap the map of data values. * @param orgUnitCountMap the map of organisation unit group member counts. * @return the calculated value as a double. */ IndicatorValue getIndicatorValueObject( Indicator indicator, List<Period> periods, Map<DimensionalItemId, DimensionalItemObject> itemMap, Map<DimensionalItemObject, Object> valueMap, Map<String, Integer> orgUnitCountMap ); /** * Substitutes any constants and org unit group member counts in the * numerator and denominator on all indicators in the given collection. * * @param indicators the set of indicators. */ void substituteIndicatorExpressions( Collection<Indicator> indicators ); // ------------------------------------------------------------------------- // Get information about the expression // ------------------------------------------------------------------------- /** * Tests whether the expression is valid. * * @param expression the expression string. * @param parseType the type of expression to parse. * @return the ExpressionValidationOutcome of the validation. */ ExpressionValidationOutcome expressionIsValid( String expression, ParseType parseType ); /** * Creates an expression description containing the names of the * DimensionalItemObjects from a numeric valued expression. * * @param expression the expression string. * @param parseType the type of expression to parse. * @return An description containing DimensionalItemObjects names. */ String getExpressionDescription( String expression, ParseType parseType ); /** * Creates an expression description containing the names of the * DimensionalItemObjects from an expression string, for an expression that * will return the specified data type. * * @param expression the expression string. * @param parseType the type of expression to parse. * @param dataType the data type for the expression to return. * @return An description containing DimensionalItemObjects names. */ String getExpressionDescription( String expression, ParseType parseType, DataType dataType ); /** * Gets information we need from an expression string. * * @param params the expression parameters. * @return the expression information. */ ExpressionInfo getExpressionInfo( ExpressionParams params ); /** * From expression info, create a "base" expression parameters object with * certain metadata fields supplied that are needed for later evaluation. * <p> * Before evaluation, the caller will need to add to this "base" object * fields such as expression, parseType, dataType, valueMap, etc. * * @param info the expression information. * @return the expression parameters with metadata pre-filled. */ ExpressionParams getBaseExpressionParams( ExpressionInfo info ); /** * Returns UIDs of Data Elements and associated Option Combos (if any) found * in the Data Element Operands an expression. * <p/> * If the Data Element Operand consists of just a Data Element, or if the * Option Combo is a wildcard "*", returns just dataElementUID. * <p/> * If an Option Combo is present, returns dataElementUID.optionComboUID. * * @param expression the expression string. * @param parseType the type of expression to parse. * @return a Set of data element identifiers. */ Set<String> getExpressionElementAndOptionComboIds( String expression, ParseType parseType ); /** * Returns all data elements found in the given expression string, including * those found in data element operands. Returns an empty set if the given * expression is null. * * @param expression the expression string. * @param parseType the type of expression to parse. * @return a Set of data elements included in the expression string. */ Set<DataElement> getExpressionDataElements( String expression, ParseType parseType ); /** * Returns all CategoryOptionCombo uids in the given expression string that * are used as a data element operand categoryOptionCombo or * attributeOptionCombo. Returns an empty set if the expression is null. * * @param expression the expression string. * @param parseType the type of expression to parse. * @return a Set of CategoryOptionCombo uids in the expression string. */ Set<String> getExpressionOptionComboIds( String expression, ParseType parseType ); /** * Returns all dimensional item object ids in the given expression. * * @param expression the expression string. * @param parseType the type of expression to parse. * @return a Set of dimensional item object ids. */ Set<DimensionalItemId> getExpressionDimensionalItemIds( String expression, ParseType parseType ); /** * Returns set of all OrganisationUnitGroup UIDs in the given expression. * * @param expression the expression string. * @param parseType the type of expression to parse. * @return Map of UIDs to OrganisationUnitGroups in the expression string. */ Set<String> getExpressionOrgUnitGroupIds( String expression, ParseType parseType ); // ------------------------------------------------------------------------- // Compute the value of the expression // ------------------------------------------------------------------------- /** * Generates the calculated value for an expression. * * @param params the expression parameters. * @return the calculated value. */ Object getExpressionValue( ExpressionParams params ); // ------------------------------------------------------------------------- // Gets a (possibly cached) constant map // ------------------------------------------------------------------------- /** * Gets the (possibly cached) constant map. * * @return the constant map. */ Map<String, Constant> getConstantMap(); }
hispindia/dhis2-Core
dhis-2/dhis-api/src/main/java/org/hisp/dhis/expression/ExpressionService.java
Java
bsd-3-clause
11,199
#!/usr/bin/env python """ ================ sMRI: FreeSurfer ================ This script, smri_freesurfer.py, demonstrates the ability to call reconall on a set of subjects and then make an average subject. python smri_freesurfer.py Import necessary modules from nipype. """ import os import nipype.pipeline.engine as pe import nipype.interfaces.io as nio from nipype.interfaces.freesurfer.preprocess import ReconAll from nipype.interfaces.freesurfer.utils import MakeAverageSubject subject_list = ['s1', 's3'] data_dir = os.path.abspath('data') subjects_dir = os.path.abspath('amri_freesurfer_tutorial/subjects_dir') wf = pe.Workflow(name="l1workflow") wf.base_dir = os.path.abspath('amri_freesurfer_tutorial/workdir') """ Grab data """ datasource = pe.MapNode(interface=nio.DataGrabber(infields=['subject_id'], outfields=['struct']), name='datasource', iterfield=['subject_id']) datasource.inputs.base_directory = data_dir datasource.inputs.template = '%s/%s.nii' datasource.inputs.template_args = dict(struct=[['subject_id', 'struct']]) datasource.inputs.subject_id = subject_list """ Run recon-all """ recon_all = pe.MapNode(interface=ReconAll(), name='recon_all', iterfield=['subject_id', 'T1_files']) recon_all.inputs.subject_id = subject_list if not os.path.exists(subjects_dir): os.mkdir(subjects_dir) recon_all.inputs.subjects_dir = subjects_dir wf.connect(datasource, 'struct', recon_all, 'T1_files') """ Make average subject """ average = pe.Node(interface=MakeAverageSubject(), name="average") average.inputs.subjects_dir = subjects_dir wf.connect(recon_all, 'subject_id', average, 'subjects_ids') wf.run("MultiProc", plugin_args={'n_procs': 4})
FredLoney/nipype
examples/smri_freesurfer.py
Python
bsd-3-clause
1,804
/*=================================================================== The Medical Imaging Interaction Toolkit (MITK) Copyright (c) German Cancer Research Center, Division of Medical and Biological Informatics. All rights reserved. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See LICENSE.txt or http://www.mitk.org for details. ===================================================================*/ #include "QmitkUSNavigationProcessWidget.h" #include "ui_QmitkUSNavigationProcessWidget.h" #include "../NavigationStepWidgets/QmitkUSAbstractNavigationStep.h" #include "../SettingsWidgets/QmitkUSNavigationAbstractSettingsWidget.h" #include "mitkDataNode.h" #include "mitkNavigationDataToNavigationDataFilter.h" #include <QTimer> #include <QSignalMapper> #include <QShortcut> QmitkUSNavigationProcessWidget::QmitkUSNavigationProcessWidget(QWidget* parent) : QWidget(parent), m_SettingsWidget(0), m_BaseNode(mitk::DataNode::New()), m_CurrentTabIndex(0), m_CurrentMaxStep(0), m_ImageAlreadySetToNode(false), m_ReadySignalMapper(new QSignalMapper(this)), m_NoLongerReadySignalMapper(new QSignalMapper(this)), m_StdMultiWidget(0), m_UsePlanningStepWidget(false), ui(new Ui::QmitkUSNavigationProcessWidget) { m_Parent = parent; ui->setupUi(this); // remove the default page ui->stepsToolBox->removeItem(0); //set shortcuts QShortcut *nextShortcut = new QShortcut(QKeySequence("F10"), parent); QShortcut *prevShortcut = new QShortcut(QKeySequence("F11"), parent); connect(nextShortcut, SIGNAL(activated()), this, SLOT(OnNextButtonClicked())); connect(prevShortcut, SIGNAL(activated()), this, SLOT(OnPreviousButtonClicked())); //connect other slots connect( ui->restartStepButton, SIGNAL(clicked()), this, SLOT(OnRestartStepButtonClicked()) ); connect( ui->previousButton, SIGNAL(clicked()), this, SLOT(OnPreviousButtonClicked()) ); connect( ui->nextButton, SIGNAL(clicked()), this, SLOT(OnNextButtonClicked()) ); connect( ui->stepsToolBox, SIGNAL(currentChanged(int)), this, SLOT(OnTabChanged(int)) ); connect (ui->settingsButton, SIGNAL(clicked()), this, SLOT(OnSettingsButtonClicked()) ); connect( m_ReadySignalMapper, SIGNAL(mapped(int)), this, SLOT(OnStepReady(int)) ); connect( m_NoLongerReadySignalMapper, SIGNAL(mapped(int)), this, SLOT(OnStepNoLongerReady(int)) ); ui->settingsFrameWidget->setHidden(true); } QmitkUSNavigationProcessWidget::~QmitkUSNavigationProcessWidget() { ui->stepsToolBox->blockSignals(true); for ( NavigationStepVector::iterator it = m_NavigationSteps.begin(); it != m_NavigationSteps.end(); ++it ) { if ( (*it)->GetNavigationStepState() > QmitkUSAbstractNavigationStep::State_Stopped ) { (*it)->StopStep(); } delete *it; } m_NavigationSteps.clear(); if ( m_SettingsNode.IsNotNull() && m_DataStorage.IsNotNull() ) { m_DataStorage->Remove(m_SettingsNode); } delete ui; } void QmitkUSNavigationProcessWidget::EnableInteraction(bool enable) { if (enable) { ui->restartStepButton->setEnabled(true); ui->previousButton->setEnabled(true); ui->nextButton->setEnabled(true); ui->stepsToolBox->setEnabled(true); } else { ui->restartStepButton->setEnabled(false); ui->previousButton->setEnabled(false); ui->nextButton->setEnabled(false); ui->stepsToolBox->setEnabled(false); } } void QmitkUSNavigationProcessWidget::SetDataStorage(itk::SmartPointer<mitk::DataStorage> dataStorage) { m_DataStorage = dataStorage; if ( dataStorage.IsNull() ) { mitkThrow() << "Data Storage must not be null for QmitkUSNavigationProcessWidget."; } // test if base node is already in the data storage and add it if not m_BaseNode = dataStorage->GetNamedNode(QmitkUSAbstractNavigationStep::DATANAME_BASENODE); if ( m_BaseNode.IsNull() ) { m_BaseNode = mitk::DataNode::New(); m_BaseNode->SetName(QmitkUSAbstractNavigationStep::DATANAME_BASENODE); dataStorage->Add(m_BaseNode); } // base node and image stream node may be the same node if ( strcmp(QmitkUSAbstractNavigationStep::DATANAME_BASENODE, QmitkUSAbstractNavigationStep::DATANAME_IMAGESTREAM) != 0) { m_ImageStreamNode = dataStorage->GetNamedNode(QmitkUSAbstractNavigationStep::DATANAME_IMAGESTREAM); if (m_ImageStreamNode.IsNull()) { // Create Node for US Stream m_ImageStreamNode = mitk::DataNode::New(); m_ImageStreamNode->SetName(QmitkUSAbstractNavigationStep::DATANAME_IMAGESTREAM); dataStorage->Add(m_ImageStreamNode); } } else { m_ImageStreamNode = m_BaseNode; } m_SettingsNode = dataStorage->GetNamedDerivedNode(QmitkUSAbstractNavigationStep::DATANAME_SETTINGS, m_BaseNode); if ( m_SettingsNode.IsNull() ) { m_SettingsNode = mitk::DataNode::New(); m_SettingsNode->SetName(QmitkUSAbstractNavigationStep::DATANAME_SETTINGS); dataStorage->Add(m_SettingsNode, m_BaseNode); } if (m_SettingsWidget) { m_SettingsWidget->SetSettingsNode(m_SettingsNode); } } void QmitkUSNavigationProcessWidget::SetSettingsWidget(QmitkUSNavigationAbstractSettingsWidget* settingsWidget) { // disconnect slots to settings widget if there was a widget before if ( m_SettingsWidget ) { disconnect( ui->settingsSaveButton, SIGNAL(clicked()), m_SettingsWidget, SLOT(OnSave()) ); disconnect( ui->settingsCancelButton, SIGNAL(clicked()), m_SettingsWidget, SLOT(OnCancel()) ); disconnect (m_SettingsWidget, SIGNAL(Saved()), this, SLOT(OnSettingsWidgetReturned()) ); disconnect (m_SettingsWidget, SIGNAL(Canceled()), this, SLOT(OnSettingsWidgetReturned()) ); disconnect (m_SettingsWidget, SIGNAL(SettingsChanged(itk::SmartPointer<mitk::DataNode>)), this, SLOT(OnSettingsChanged(itk::SmartPointer<mitk::DataNode>)) ); ui->settingsWidget->removeWidget(m_SettingsWidget); } m_SettingsWidget = settingsWidget; if ( m_SettingsWidget ) { m_SettingsWidget->LoadSettings(); connect( ui->settingsSaveButton, SIGNAL(clicked()), m_SettingsWidget, SLOT(OnSave()) ); connect( ui->settingsCancelButton, SIGNAL(clicked()), m_SettingsWidget, SLOT(OnCancel()) ); connect (m_SettingsWidget, SIGNAL(Saved()), this, SLOT(OnSettingsWidgetReturned()) ); connect (m_SettingsWidget, SIGNAL(Canceled()), this, SLOT(OnSettingsWidgetReturned()) ); connect (m_SettingsWidget, SIGNAL(SettingsChanged(itk::SmartPointer<mitk::DataNode>)), this, SLOT(OnSettingsChanged(itk::SmartPointer<mitk::DataNode>)) ); if ( m_SettingsNode.IsNotNull() ) { m_SettingsWidget->SetSettingsNode(m_SettingsNode, true); } ui->settingsWidget->addWidget(m_SettingsWidget); } ui->settingsButton->setEnabled(m_SettingsWidget != 0); } void QmitkUSNavigationProcessWidget::SetNavigationSteps(NavigationStepVector navigationSteps) { disconnect( this, SLOT(OnTabChanged(int)) ); for ( int n = ui->stepsToolBox->count()-1; n >= 0; --n ) { ui->stepsToolBox->removeItem(n); } connect( ui->stepsToolBox, SIGNAL(currentChanged(int)), this, SLOT(OnTabChanged(int)) ); m_NavigationSteps.clear(); m_NavigationSteps = navigationSteps; this->InitializeNavigationStepWidgets(); // notify all navigation step widgets about the current settings for (NavigationStepIterator it = m_NavigationSteps.begin(); it != m_NavigationSteps.end(); ++it) { (*it)->OnSettingsChanged(m_SettingsNode); } } void QmitkUSNavigationProcessWidget::ResetNavigationProcess() { MITK_INFO("QmitkUSNavigationProcessWidget") << "Resetting navigation process."; ui->stepsToolBox->blockSignals(true); for ( int n = 0; n <= m_CurrentMaxStep; ++n ) { m_NavigationSteps.at(n)->StopStep(); if ( n > 0 ) { ui->stepsToolBox->setItemEnabled(n, false); } } ui->stepsToolBox->blockSignals(false); m_CurrentMaxStep = 0; ui->stepsToolBox->setCurrentIndex(0); if ( m_NavigationSteps.size() > 0 ) { m_NavigationSteps.at(0)->ActivateStep(); } this->UpdatePrevNextButtons(); } void QmitkUSNavigationProcessWidget::UpdateNavigationProgress() { if ( m_CombinedModality.IsNotNull() && !m_CombinedModality->GetIsFreezed() ) { m_CombinedModality->Modified(); m_CombinedModality->Update(); if ( m_LastNavigationDataFilter.IsNotNull() ) { m_LastNavigationDataFilter->Update(); } mitk::Image::Pointer image = m_CombinedModality->GetOutput(); // make sure that always the current image is set to the data node if ( image.IsNotNull() && m_ImageStreamNode->GetData() != image.GetPointer() && image->IsInitialized() ) { m_ImageStreamNode->SetData(image); m_ImageAlreadySetToNode = true; } } if ( m_CurrentTabIndex > 0 && static_cast<unsigned int>(m_CurrentTabIndex) < m_NavigationSteps.size() ) { m_NavigationSteps.at(m_CurrentTabIndex)->Update(); } } void QmitkUSNavigationProcessWidget::OnNextButtonClicked() { if (m_CombinedModality.IsNotNull() && m_CombinedModality->GetIsFreezed()) {return;} //no moving through steps when the modality is NULL or frozen int currentIndex = ui->stepsToolBox->currentIndex(); if (currentIndex >= m_CurrentMaxStep) { MITK_WARN << "Next button clicked though no next tab widget is available."; return; } ui->stepsToolBox->setCurrentIndex(++currentIndex); this->UpdatePrevNextButtons(); } void QmitkUSNavigationProcessWidget::OnPreviousButtonClicked() { if (m_CombinedModality.IsNotNull() && m_CombinedModality->GetIsFreezed()) {return;} //no moving through steps when the modality is NULL or frozen int currentIndex = ui->stepsToolBox->currentIndex(); if (currentIndex <= 0) { MITK_WARN << "Previous button clicked though no previous tab widget is available."; return; } ui->stepsToolBox->setCurrentIndex(--currentIndex); this->UpdatePrevNextButtons(); } void QmitkUSNavigationProcessWidget::OnRestartStepButtonClicked() { MITK_INFO("QmitkUSNavigationProcessWidget") << "Restarting step " << m_CurrentTabIndex << " (" << m_NavigationSteps.at(m_CurrentTabIndex)->GetTitle().toStdString() << ")."; m_NavigationSteps.at(ui->stepsToolBox->currentIndex())->RestartStep(); m_NavigationSteps.at(ui->stepsToolBox->currentIndex())->ActivateStep(); } void QmitkUSNavigationProcessWidget::OnTabChanged(int index) { if ( index < 0 || index >= static_cast<int>(m_NavigationSteps.size()) ) { return; } else if ( m_CurrentTabIndex == index ) { // just activate the step if it is the same step againg m_NavigationSteps.at(index)->ActivateStep(); return; } MITK_INFO("QmitkUSNavigationProcessWidget") << "Activating navigation step " << index << " (" << m_NavigationSteps.at(index)->GetTitle().toStdString() <<")."; if (index > m_CurrentTabIndex) { this->UpdateFilterPipeline(); // finish all previous steps to make sure that all data is valid for (int n = m_CurrentTabIndex; n < index; ++n) { m_NavigationSteps.at(n)->FinishStep(); } } // deactivate the previously active step if ( m_CurrentTabIndex > 0 && m_NavigationSteps.size() > static_cast<unsigned int>(m_CurrentTabIndex) ) { m_NavigationSteps.at(m_CurrentTabIndex)->DeactivateStep(); } // start step of the current tab if it wasn't started before if ( m_NavigationSteps.at(index)->GetNavigationStepState() == QmitkUSAbstractNavigationStep::State_Stopped ) { m_NavigationSteps.at(index)->StartStep(); } m_NavigationSteps.at(index)->ActivateStep(); if (static_cast<unsigned int>(index) < m_NavigationSteps.size()) ui->restartStepButton->setEnabled(m_NavigationSteps.at(index)->GetIsRestartable()); this->UpdatePrevNextButtons(); m_CurrentTabIndex = index; emit SignalActiveNavigationStepChanged(index); } void QmitkUSNavigationProcessWidget::OnSettingsButtonClicked() { this->SetSettingsWidgetVisible(true); } void QmitkUSNavigationProcessWidget::OnSettingsWidgetReturned() { this->SetSettingsWidgetVisible(false); } void QmitkUSNavigationProcessWidget::OnSettingsNodeChanged(itk::SmartPointer<mitk::DataNode> dataNode) { if ( m_SettingsWidget ) m_SettingsWidget->SetSettingsNode(dataNode); } void QmitkUSNavigationProcessWidget::OnStepReady(int index) { if (m_CurrentMaxStep <= index) { m_CurrentMaxStep = index + 1; this->UpdatePrevNextButtons(); for (int n = 0; n <= m_CurrentMaxStep; ++n) { ui->stepsToolBox->setItemEnabled(n, true); } } emit SignalNavigationStepFinished(index, true); } void QmitkUSNavigationProcessWidget::OnStepNoLongerReady(int index) { if (m_CurrentMaxStep > index) { m_CurrentMaxStep = index; this->UpdatePrevNextButtons(); this->UpdateFilterPipeline(); for (int n = m_CurrentMaxStep+1; n < ui->stepsToolBox->count(); ++n) { ui->stepsToolBox->setItemEnabled(n, false); m_NavigationSteps.at(n)->StopStep(); } } emit SignalNavigationStepFinished(index, false); } void QmitkUSNavigationProcessWidget::OnCombinedModalityChanged(itk::SmartPointer<mitk::USCombinedModality> combinedModality) { m_CombinedModality = combinedModality; m_ImageAlreadySetToNode = false; if ( combinedModality.IsNotNull() ) { if ( combinedModality->GetNavigationDataSource().IsNull() ) { MITK_WARN << "There is no navigation data source set for the given combined modality."; return; } this->UpdateFilterPipeline(); } for (NavigationStepIterator it = m_NavigationSteps.begin(); it != m_NavigationSteps.end(); ++it) { (*it)->SetCombinedModality(combinedModality); } emit SignalCombinedModalityChanged(combinedModality); } void QmitkUSNavigationProcessWidget::OnSettingsChanged(const mitk::DataNode::Pointer dataNode) { static bool methodEntered = false; if ( methodEntered ) { MITK_WARN("QmitkUSNavigationProcessWidget") << "Ignoring recursive call to 'OnSettingsChanged()'. " << "Make sure to no emit 'SignalSettingsNodeChanged' in an 'OnSettingsChanged()' method."; return; } methodEntered = true; std::string application; if ( dataNode->GetStringProperty("settings.application", application) ) { QString applicationQString = QString::fromStdString(application); if ( applicationQString != ui->titleLabel->text() ) { ui->titleLabel->setText(applicationQString); } } // notify all navigation step widgets about the changed settings for (NavigationStepIterator it = m_NavigationSteps.begin(); it != m_NavigationSteps.end(); ++it) { (*it)->OnSettingsChanged(dataNode); } emit SignalSettingsChanged(dataNode); methodEntered = false; } void QmitkUSNavigationProcessWidget::InitializeNavigationStepWidgets() { // do not listen for steps tool box signal during insertion of items into tool box disconnect( ui->stepsToolBox, SIGNAL(currentChanged(int)), this, SLOT(OnTabChanged(int)) ); m_CurrentMaxStep = 0; mitk::DataStorage::Pointer dataStorage = m_DataStorage; for (unsigned int n = 0; n < m_NavigationSteps.size(); ++n) { QmitkUSAbstractNavigationStep* curNavigationStep = m_NavigationSteps.at(n); curNavigationStep->SetDataStorage(dataStorage); connect( curNavigationStep, SIGNAL(SignalReadyForNextStep()), m_ReadySignalMapper, SLOT(map())); connect( curNavigationStep, SIGNAL(SignalNoLongerReadyForNextStep()), m_NoLongerReadySignalMapper, SLOT(map()) ); connect( curNavigationStep, SIGNAL(SignalCombinedModalityChanged(itk::SmartPointer<mitk::USCombinedModality>)), this, SLOT(OnCombinedModalityChanged(itk::SmartPointer<mitk::USCombinedModality>)) ); connect( curNavigationStep, SIGNAL(SignalIntermediateResult(const itk::SmartPointer<mitk::DataNode>)), this, SIGNAL(SignalIntermediateResult(const itk::SmartPointer<mitk::DataNode>)) ); connect( curNavigationStep, SIGNAL(SignalSettingsNodeChanged(itk::SmartPointer<mitk::DataNode>)), this, SLOT(OnSettingsNodeChanged(itk::SmartPointer<mitk::DataNode>)) ); m_ReadySignalMapper->setMapping(curNavigationStep, n); m_NoLongerReadySignalMapper->setMapping(curNavigationStep, n); ui->stepsToolBox->insertItem(n, curNavigationStep, QString("Step ") + QString::number(n+1) + ": " + curNavigationStep->GetTitle()); if ( n > 0 ) { ui->stepsToolBox->setItemEnabled(n, false); } } ui->restartStepButton->setEnabled(m_NavigationSteps.at(0)->GetIsRestartable()); ui->stepsToolBox->setCurrentIndex(0); // activate the first navigation step widgets if ( ! m_NavigationSteps.empty() ) { m_NavigationSteps.at(0)->ActivateStep(); } // after filling the steps tool box the signal is interesting again connect( ui->stepsToolBox, SIGNAL(currentChanged(int)), this, SLOT(OnTabChanged(int)) ); this->UpdateFilterPipeline(); } void QmitkUSNavigationProcessWidget::UpdatePrevNextButtons() { int currentIndex = ui->stepsToolBox->currentIndex(); ui->previousButton->setEnabled(currentIndex > 0); ui->nextButton->setEnabled(currentIndex < m_CurrentMaxStep); } void QmitkUSNavigationProcessWidget::UpdateFilterPipeline() { if ( m_CombinedModality.IsNull() ) { return; } std::vector<mitk::NavigationDataToNavigationDataFilter::Pointer> filterList; mitk::NavigationDataSource::Pointer navigationDataSource = m_CombinedModality->GetNavigationDataSource(); for (unsigned int n = 0; n <= m_CurrentMaxStep && n < m_NavigationSteps.size(); ++n) { QmitkUSAbstractNavigationStep::FilterVector filter = m_NavigationSteps.at(n)->GetFilter(); if ( ! filter.empty() ) { filterList.insert(filterList.end(), filter.begin(), filter.end()); } } if ( ! filterList.empty() ) { for (unsigned int n = 0; n < navigationDataSource->GetNumberOfOutputs(); ++n) { filterList.at(0)->SetInput(n, navigationDataSource->GetOutput(n)); } for (std::vector<mitk::NavigationDataToNavigationDataFilter::Pointer>::iterator it = filterList.begin()+1; it != filterList.end(); ++it) { std::vector<mitk::NavigationDataToNavigationDataFilter::Pointer>::iterator prevIt = it-1; for (unsigned int n = 0; n < (*prevIt)->GetNumberOfOutputs(); ++n) { (*it)->SetInput(n, (*prevIt)->GetOutput(n)); } } m_LastNavigationDataFilter = filterList.at(filterList.size()-1); } else { m_LastNavigationDataFilter = navigationDataSource.GetPointer(); } } void QmitkUSNavigationProcessWidget::SetSettingsWidgetVisible(bool visible) { ui->settingsFrameWidget->setVisible(visible); ui->stepsToolBox->setHidden(visible); ui->settingsButton->setHidden(visible); ui->restartStepButton->setHidden(visible); ui->previousButton->setHidden(visible); ui->nextButton->setHidden(visible); } void QmitkUSNavigationProcessWidget::FinishCurrentNavigationStep() { int currentIndex = ui->stepsToolBox->currentIndex(); QmitkUSAbstractNavigationStep* curNavigationStep = m_NavigationSteps.at(currentIndex); curNavigationStep->FinishStep(); }
iwegner/MITK
Plugins/org.mitk.gui.qt.igt.app.echotrack/src/internal/Widgets/QmitkUSNavigationProcessWidget.cpp
C++
bsd-3-clause
18,872
// Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/image_editor/screenshot_flow.h" #include <memory> #include "base/logging.h" #include "build/build_config.h" #include "content/public/browser/render_widget_host.h" #include "content/public/browser/web_contents.h" #include "content/public/browser/web_contents_observer.h" #include "third_party/skia/include/core/SkColor.h" #include "ui/compositor/paint_recorder.h" #include "ui/gfx/canvas.h" #include "ui/gfx/geometry/point.h" #include "ui/gfx/geometry/rect.h" #include "ui/gfx/geometry/rect_f.h" #include "ui/gfx/image/image.h" #include "ui/gfx/native_widget_types.h" #include "ui/gfx/render_text.h" #include "ui/snapshot/snapshot.h" #include "ui/views/background.h" #if defined(OS_MAC) #include "chrome/browser/image_editor/event_capture_mac.h" #include "components/lens/lens_features.h" #include "content/public/browser/render_view_host.h" #include "ui/views/widget/widget.h" #endif #if defined(USE_AURA) #include "ui/aura/window.h" #include "ui/wm/core/window_util.h" #endif namespace image_editor { // Colors for semitransparent overlay. static constexpr SkColor kColorSemitransparentOverlayMask = SkColorSetARGB(0x7F, 0x00, 0x00, 0x00); static constexpr SkColor kColorSemitransparentOverlayVisible = SkColorSetARGB(0x00, 0x00, 0x00, 0x00); static constexpr SkColor kColorSelectionRect = SkColorSetRGB(0xEE, 0xEE, 0xEE); // Minimum selection rect edge size to treat as a valid capture region. static constexpr int kMinimumValidSelectionEdgePixels = 30; ScreenshotFlow::ScreenshotFlow(content::WebContents* web_contents) : content::WebContentsObserver(web_contents), web_contents_(web_contents->GetWeakPtr()) { weak_this_ = weak_factory_.GetWeakPtr(); } ScreenshotFlow::~ScreenshotFlow() { RemoveUIOverlay(); } void ScreenshotFlow::CreateAndAddUIOverlay() { if (screen_capture_layer_) return; web_contents_observer_ = std::make_unique<UnderlyingWebContentsObserver>( web_contents_.get(), this); screen_capture_layer_ = std::make_unique<ui::Layer>(ui::LayerType::LAYER_TEXTURED); screen_capture_layer_->SetName("ScreenshotRegionSelectionLayer"); screen_capture_layer_->SetFillsBoundsOpaquely(false); screen_capture_layer_->set_delegate(this); #if defined(OS_MAC) gfx::Rect bounds = web_contents_->GetViewBounds(); const gfx::NativeView web_contents_view = web_contents_->GetContentNativeView(); views::Widget* widget = views::Widget::GetWidgetForNativeView(web_contents_view); ui::Layer* content_layer = widget->GetLayer(); const gfx::Rect offset_bounds = widget->GetWindowBoundsInScreen(); bounds.Offset(-offset_bounds.x(), -offset_bounds.y()); event_capture_mac_ = std::make_unique<EventCaptureMac>( this, web_contents_->GetTopLevelNativeWindow()); #else const gfx::NativeWindow& native_window = web_contents_->GetNativeView(); ui::Layer* content_layer = native_window->layer(); const gfx::Rect bounds = native_window->bounds(); // Capture mouse down and drag events on our window. native_window->AddPreTargetHandler(this); #endif content_layer->Add(screen_capture_layer_.get()); content_layer->StackAtTop(screen_capture_layer_.get()); screen_capture_layer_->SetBounds(bounds); screen_capture_layer_->SetVisible(true); SetCursor(ui::mojom::CursorType::kCross); // After setup is done, we should set the capture mode to active. capture_mode_ = CaptureMode::SELECTION_RECTANGLE; } void ScreenshotFlow::RemoveUIOverlay() { if (capture_mode_ == CaptureMode::NOT_CAPTURING) return; capture_mode_ = CaptureMode::NOT_CAPTURING; if (!web_contents_ || !screen_capture_layer_) return; #if defined(OS_MAC) views::Widget* widget = views::Widget::GetWidgetForNativeView( web_contents_->GetContentNativeView()); if (!widget) return; ui::Layer* content_layer = widget->GetLayer(); event_capture_mac_.reset(); #else const gfx::NativeWindow& native_window = web_contents_->GetNativeView(); native_window->RemovePreTargetHandler(this); ui::Layer* content_layer = native_window->layer(); #endif content_layer->Remove(screen_capture_layer_.get()); screen_capture_layer_->set_delegate(nullptr); screen_capture_layer_.reset(); // Restore the cursor to pointer; there's no corresponding GetCursor() // to store the pre-capture-mode cursor, and the pointer will have moved // in the meantime. SetCursor(ui::mojom::CursorType::kPointer); } void ScreenshotFlow::Start(ScreenshotCaptureCallback flow_callback) { flow_callback_ = std::move(flow_callback); CreateAndAddUIOverlay(); RequestRepaint(gfx::Rect()); } void ScreenshotFlow::StartFullscreenCapture( ScreenshotCaptureCallback flow_callback) { // Start and finish the capture process by screenshotting the full window. // There is no region selection step in this mode. flow_callback_ = std::move(flow_callback); CaptureAndRunScreenshotCompleteCallback(ScreenshotCaptureResultCode::SUCCESS, gfx::Rect(web_contents_->GetSize())); } void ScreenshotFlow::CaptureAndRunScreenshotCompleteCallback( ScreenshotCaptureResultCode result_code, gfx::Rect region) { if (region.IsEmpty()) { RunScreenshotCompleteCallback(result_code, gfx::Rect(), gfx::Image()); return; } gfx::Rect bounds = web_contents_->GetViewBounds(); #if defined(OS_MAC) const gfx::NativeView& native_view = web_contents_->GetContentNativeView(); gfx::Image img; bool rval = ui::GrabViewSnapshot(native_view, region, &img); // If |img| is empty, clients should treat it as a canceled action, but // we have a DCHECK for development as we expected this call to succeed. DCHECK(rval); RunScreenshotCompleteCallback(result_code, bounds, img); #else ui::GrabWindowSnapshotAsyncCallback screenshot_callback = base::BindOnce(&ScreenshotFlow::RunScreenshotCompleteCallback, weak_this_, result_code, bounds); const gfx::NativeWindow& native_window = web_contents_->GetNativeView(); ui::GrabWindowSnapshotAsync(native_window, region, std::move(screenshot_callback)); #endif } void ScreenshotFlow::CancelCapture() { RemoveUIOverlay(); } void ScreenshotFlow::OnKeyEvent(ui::KeyEvent* event) { if (event->type() == ui::ET_KEY_PRESSED && event->key_code() == ui::VKEY_ESCAPE) { event->StopPropagation(); CompleteCapture(ScreenshotCaptureResultCode::USER_ESCAPE_EXIT, gfx::Rect()); } } void ScreenshotFlow::OnMouseEvent(ui::MouseEvent* event) { if (!event->IsLocatedEvent()) return; const ui::LocatedEvent* located_event = ui::LocatedEvent::FromIfValid(event); if (!located_event) return; gfx::Point location = located_event->location(); #if defined(OS_MAC) // Offset |location| be relative to the WebContents widget, vs the parent // window, recomputed rather than cached in case e.g. user disables // bookmarks bar from another window. gfx::Rect web_contents_bounds = web_contents_->GetViewBounds(); const gfx::NativeView web_contents_view = web_contents_->GetContentNativeView(); views::Widget* widget = views::Widget::GetWidgetForNativeView(web_contents_view); const gfx::Rect widget_bounds = widget->GetWindowBoundsInScreen(); location.set_x(location.x() + (widget_bounds.x() - web_contents_bounds.x())); location.set_y(location.y() + (widget_bounds.y() - web_contents_bounds.y())); // Don't capture clicks on browser ui outside the webcontents. if (location.x() < 0 || location.y() < 0 || location.x() > web_contents_bounds.width() || location.y() > web_contents_bounds.height()) { return; } #endif switch (event->type()) { case ui::ET_MOUSE_MOVED: SetCursor(ui::mojom::CursorType::kCross); event->SetHandled(); break; case ui::ET_MOUSE_PRESSED: if (event->IsLeftMouseButton()) { drag_start_ = location; drag_end_ = location; event->SetHandled(); } break; case ui::ET_MOUSE_DRAGGED: if (event->IsLeftMouseButton()) { drag_end_ = location; RequestRepaint(gfx::Rect()); event->SetHandled(); } break; case ui::ET_MOUSE_RELEASED: if (capture_mode_ == CaptureMode::SELECTION_RECTANGLE || capture_mode_ == CaptureMode::SELECTION_ELEMENT) { event->SetHandled(); gfx::Rect selection = gfx::BoundingRect(drag_start_, drag_end_); drag_start_.SetPoint(0, 0); drag_end_.SetPoint(0, 0); if (selection.width() >= kMinimumValidSelectionEdgePixels && selection.height() >= kMinimumValidSelectionEdgePixels) { CompleteCapture(ScreenshotCaptureResultCode::SUCCESS, selection); } else { RequestRepaint(gfx::Rect()); } } break; default: break; } } void ScreenshotFlow::CompleteCapture(ScreenshotCaptureResultCode result_code, const gfx::Rect& region) { RemoveUIOverlay(); CaptureAndRunScreenshotCompleteCallback(result_code, region); } void ScreenshotFlow::RunScreenshotCompleteCallback( ScreenshotCaptureResultCode result_code, gfx::Rect bounds, gfx::Image image) { ScreenshotCaptureResult result; result.result_code = result_code; result.image = image; result.screen_bounds = bounds; std::move(flow_callback_).Run(result); } void ScreenshotFlow::OnPaintLayer(const ui::PaintContext& context) { if (!screen_capture_layer_) return; const gfx::Rect& screen_bounds(screen_capture_layer_->bounds()); ui::PaintRecorder recorder(context, screen_bounds.size()); gfx::Canvas* canvas = recorder.canvas(); auto selection_rect = gfx::BoundingRect(drag_start_, drag_end_); PaintSelectionLayer(canvas, selection_rect, gfx::Rect()); paint_invalidation_ = gfx::Rect(); } void ScreenshotFlow::RequestRepaint(gfx::Rect region) { if (!screen_capture_layer_) return; if (region.IsEmpty()) { const gfx::Size& layer_size = screen_capture_layer_->size(); region = gfx::Rect(0, 0, layer_size.width(), layer_size.height()); } paint_invalidation_.Union(region); screen_capture_layer_->SchedulePaint(region); } void ScreenshotFlow::PaintSelectionLayer(gfx::Canvas* canvas, const gfx::Rect& selection, const gfx::Rect& invalidation_region) { // Adjust for hidpi and lodpi support. canvas->UndoDeviceScaleFactor(); // Clear the canvas with our mask color. canvas->DrawColor(kColorSemitransparentOverlayMask); // Allow the user's selection to show through, and add a border around it. if (!selection.IsEmpty()) { float scale_factor = screen_capture_layer_->device_scale_factor(); gfx::Rect selection_scaled = gfx::ScaleToEnclosingRect(selection, scale_factor); canvas->FillRect(selection_scaled, kColorSemitransparentOverlayVisible, SkBlendMode::kClear); canvas->DrawRect(gfx::RectF(selection_scaled), kColorSelectionRect); } } void ScreenshotFlow::SetCursor(ui::mojom::CursorType cursor_type) { if (!web_contents_) { return; } #if defined(OS_MAC) if (cursor_type == ui::mojom::CursorType::kCross && lens::features::kRegionSearchMacCursorFix.Get()) { EventCaptureMac::SetCrossCursor(); return; } #endif content::RenderWidgetHost* host = web_contents_->GetMainFrame()->GetRenderWidgetHost(); if (host) { ui::Cursor cursor(cursor_type); host->SetCursor(cursor); } } bool ScreenshotFlow::IsCaptureModeActive() { return capture_mode_ != CaptureMode::NOT_CAPTURING; } void ScreenshotFlow::WebContentsDestroyed() { if (IsCaptureModeActive()) { CancelCapture(); } } void ScreenshotFlow::OnVisibilityChanged(content::Visibility visibility) { if (IsCaptureModeActive()) { CancelCapture(); } } // UnderlyingWebContentsObserver monitors the WebContents and exits screen // capture mode if a navigation occurs. class ScreenshotFlow::UnderlyingWebContentsObserver : public content::WebContentsObserver { public: UnderlyingWebContentsObserver(content::WebContents* web_contents, ScreenshotFlow* screenshot_flow) : content::WebContentsObserver(web_contents), screenshot_flow_(screenshot_flow) {} ~UnderlyingWebContentsObserver() override = default; UnderlyingWebContentsObserver(const UnderlyingWebContentsObserver&) = delete; UnderlyingWebContentsObserver& operator=( const UnderlyingWebContentsObserver&) = delete; // content::WebContentsObserver void PrimaryPageChanged(content::Page& page) override { // We only care to complete/cancel a capture if the capture mode is // currently active. if (screenshot_flow_->IsCaptureModeActive()) screenshot_flow_->CompleteCapture( ScreenshotCaptureResultCode::USER_NAVIGATED_EXIT, gfx::Rect()); } private: ScreenshotFlow* screenshot_flow_; }; } // namespace image_editor
scheib/chromium
chrome/browser/image_editor/screenshot_flow.cc
C++
bsd-3-clause
13,138
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "content/common/child_histogram_message_filter.h" #include <ctype.h> #include "base/bind.h" #include "base/message_loop.h" #include "base/metrics/statistics_recorder.h" #include "base/pickle.h" #include "content/common/child_process.h" #include "content/common/child_process_messages.h" #include "content/common/child_thread.h" namespace content { ChildHistogramMessageFilter::ChildHistogramMessageFilter() : channel_(NULL), ALLOW_THIS_IN_INITIALIZER_LIST(histogram_snapshot_manager_(this)) { } ChildHistogramMessageFilter::~ChildHistogramMessageFilter() { } void ChildHistogramMessageFilter::OnFilterAdded(IPC::Channel* channel) { channel_ = channel; } void ChildHistogramMessageFilter::OnFilterRemoved() { } bool ChildHistogramMessageFilter::OnMessageReceived( const IPC::Message& message) { bool handled = true; IPC_BEGIN_MESSAGE_MAP(ChildHistogramMessageFilter, message) IPC_MESSAGE_HANDLER(ChildProcessMsg_GetChildHistogramData, OnGetChildHistogramData) IPC_MESSAGE_UNHANDLED(handled = false) IPC_END_MESSAGE_MAP() return handled; } void ChildHistogramMessageFilter::SendHistograms(int sequence_number) { ChildProcess::current()->io_message_loop_proxy()->PostTask( FROM_HERE, base::Bind(&ChildHistogramMessageFilter::UploadAllHistograms, this, sequence_number)); } void ChildHistogramMessageFilter::OnGetChildHistogramData(int sequence_number) { UploadAllHistograms(sequence_number); } void ChildHistogramMessageFilter::UploadAllHistograms(int sequence_number) { DCHECK_EQ(0u, pickled_histograms_.size()); base::StatisticsRecorder::CollectHistogramStats("ChildProcess"); // Push snapshots into our pickled_histograms_ vector. // Note: Before serializing, we set the kIPCSerializationSourceFlag for all // the histograms, so that the receiving process can distinguish them from the // local histograms. histogram_snapshot_manager_.PrepareDeltas( base::Histogram::kIPCSerializationSourceFlag, false); channel_->Send(new ChildProcessHostMsg_ChildHistogramData( sequence_number, pickled_histograms_)); pickled_histograms_.clear(); static int count = 0; count++; DHISTOGRAM_COUNTS("Histogram.ChildProcessHistogramSentCount", count); } void ChildHistogramMessageFilter::RecordDelta( const base::HistogramBase& histogram, const base::HistogramSamples& snapshot) { DCHECK_NE(0, snapshot.TotalCount()); Pickle pickle; histogram.SerializeInfo(&pickle); snapshot.Serialize(&pickle); pickled_histograms_.push_back( std::string(static_cast<const char*>(pickle.data()), pickle.size())); } void ChildHistogramMessageFilter::InconsistencyDetected( base::Histogram::Inconsistencies problem) { UMA_HISTOGRAM_ENUMERATION("Histogram.InconsistenciesChildProcess", problem, base::Histogram::NEVER_EXCEEDED_VALUE); } void ChildHistogramMessageFilter::UniqueInconsistencyDetected( base::Histogram::Inconsistencies problem) { UMA_HISTOGRAM_ENUMERATION("Histogram.InconsistenciesChildProcessUnique", problem, base::Histogram::NEVER_EXCEEDED_VALUE); } void ChildHistogramMessageFilter::InconsistencyDetectedInLoggedCount( int amount) { UMA_HISTOGRAM_COUNTS("Histogram.InconsistentSnapshotChildProcess", std::abs(amount)); } } // namespace content
zcbenz/cefode-chromium
content/common/child_histogram_message_filter.cc
C++
bsd-3-clause
3,578
/* * Copyright (C) 2013 Soumith Chintala * */ #include <jni.h> #include <stdio.h> #include <stdlib.h> #include "torchandroid.h" #include <assert.h> extern "C" { JNIEXPORT jstring JNICALL Java_com_torch_Torch_jni_1call( JNIEnv* env, jobject thiz, jobject assetManager, jstring nativeLibraryDir_, jstring luaFile_ ) { // D("Hello from C"); // get native asset manager AAssetManager* manager = AAssetManager_fromJava(env, assetManager); assert( NULL != manager); const char *nativeLibraryDir = env->GetStringUTFChars(nativeLibraryDir_, 0); const char *file = env->GetStringUTFChars(luaFile_, 0); char buffer[4096]; // buffer for textview output D("Torch.call(%s), nativeLibraryDir=%s", file, nativeLibraryDir); buffer[0] = 0; lua_State *L = inittorch(manager, nativeLibraryDir); // create a lua_State assert( NULL != manager); // load and run file int ret; long size = android_asset_get_size(file); if (size != -1) { char *filebytes = android_asset_get_bytes(file); ret = luaL_dobuffer(L, filebytes, size, "main"); } // check if script ran succesfully. If not, print error to logcat if (ret == 1) { D("Error doing resource: %s:%s\n", file, lua_tostring(L,-1)); strlcat(buffer, lua_tostring(L,-1), sizeof(buffer)); } else strlcat(buffer, "Torch script ran succesfully. Check Logcat for more details.", sizeof(buffer)); // destroy the Lua State lua_close(L); return env->NewStringUTF(buffer); } }
Jeff-Huang/th-android
src/torchcall.cpp
C++
bsd-3-clause
1,727
<?php use yii\helpers\Html; ?> <?php foreach ($_model as $photo): ?> <div class="thumb" style="float:left;padding: 2px" data-name="<?= $photo->name ?>" onclick="ShowFullImage('<?= $photo->name ?>')" > <div> <?= Html::img('/upload/multy-thumbs/' . $photo->name, ['height' => '70px']); ?> </div> <div style="" > <?= $photo->name; ?> </div> </div> <?php endforeach; ?> <style> .jpreloader.back_background{ background-color: #111214; bottom: 0; height: 100%; left: 0; opacity: 0.9; position: fixed; right: 0; top: 0; width: 100%; display: block; background-image: url("/img/preloader.gif"); background-repeat: no-repeat; background-position:center center; } </style>
kotmonstr/kotmonstr
frontend/modules/image/views/default/get-photo.php
PHP
bsd-3-clause
1,011
package io.flutter.embedding.engine.mutatorsstack; import static junit.framework.TestCase.*; import static org.mockito.Mockito.*; import android.graphics.Matrix; import android.view.MotionEvent; import io.flutter.embedding.android.AndroidTouchProcessor; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.robolectric.RobolectricTestRunner; import org.robolectric.RuntimeEnvironment; import org.robolectric.annotation.Config; @Config(manifest = Config.NONE) @RunWith(RobolectricTestRunner.class) public class FlutterMutatorViewTest { @Test public void canDragViews() { final AndroidTouchProcessor touchProcessor = mock(AndroidTouchProcessor.class); final FlutterMutatorView view = new FlutterMutatorView(RuntimeEnvironment.systemContext, 1.0f, touchProcessor); final FlutterMutatorsStack mutatorStack = mock(FlutterMutatorsStack.class); assertTrue(view.onInterceptTouchEvent(mock(MotionEvent.class))); { view.readyToDisplay(mutatorStack, /*left=*/ 1, /*top=*/ 2, /*width=*/ 0, /*height=*/ 0); view.onTouchEvent(MotionEvent.obtain(0, 0, MotionEvent.ACTION_DOWN, 0.0f, 0.0f, 0)); final ArgumentCaptor<Matrix> matrixCaptor = ArgumentCaptor.forClass(Matrix.class); verify(touchProcessor).onTouchEvent(any(), matrixCaptor.capture()); final Matrix screenMatrix = new Matrix(); screenMatrix.postTranslate(1, 2); assertTrue(matrixCaptor.getValue().equals(screenMatrix)); } reset(touchProcessor); { view.readyToDisplay(mutatorStack, /*left=*/ 3, /*top=*/ 4, /*width=*/ 0, /*height=*/ 0); view.onTouchEvent(MotionEvent.obtain(0, 0, MotionEvent.ACTION_MOVE, 0.0f, 0.0f, 0)); final ArgumentCaptor<Matrix> matrixCaptor = ArgumentCaptor.forClass(Matrix.class); verify(touchProcessor).onTouchEvent(any(), matrixCaptor.capture()); final Matrix screenMatrix = new Matrix(); screenMatrix.postTranslate(1, 2); assertTrue(matrixCaptor.getValue().equals(screenMatrix)); } reset(touchProcessor); { view.readyToDisplay(mutatorStack, /*left=*/ 5, /*top=*/ 6, /*width=*/ 0, /*height=*/ 0); view.onTouchEvent(MotionEvent.obtain(0, 0, MotionEvent.ACTION_MOVE, 0.0f, 0.0f, 0)); final ArgumentCaptor<Matrix> matrixCaptor = ArgumentCaptor.forClass(Matrix.class); verify(touchProcessor).onTouchEvent(any(), matrixCaptor.capture()); final Matrix screenMatrix = new Matrix(); screenMatrix.postTranslate(3, 4); assertTrue(matrixCaptor.getValue().equals(screenMatrix)); } reset(touchProcessor); { view.readyToDisplay(mutatorStack, /*left=*/ 7, /*top=*/ 8, /*width=*/ 0, /*height=*/ 0); view.onTouchEvent(MotionEvent.obtain(0, 0, MotionEvent.ACTION_DOWN, 0.0f, 0.0f, 0)); final ArgumentCaptor<Matrix> matrixCaptor = ArgumentCaptor.forClass(Matrix.class); verify(touchProcessor).onTouchEvent(any(), matrixCaptor.capture()); final Matrix screenMatrix = new Matrix(); screenMatrix.postTranslate(7, 8); assertTrue(matrixCaptor.getValue().equals(screenMatrix)); } } }
chinmaygarde/flutter_engine
shell/platform/android/test/io/flutter/embedding/engine/mutatorsstack/FlutterMutatorViewTest.java
Java
bsd-3-clause
3,135
package org.hisp.dhis.dxf2.adx; /* * Copyright (c) 2015, UiO * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.apache.xerces.util.XMLChar; import org.hisp.dhis.dataelement.DataElementCategory; import org.hisp.dhis.dataelement.DataElementCategoryCombo; import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo; import org.hisp.dhis.dataset.DataSet; import org.hisp.dhis.dataset.DataSetElement; /** * @author bobj */ public class AdxDataSetMetadata { // Lookup category options per cat option combo private final Map<Integer, Map<String, String>> categoryOptionMap; AdxDataSetMetadata( DataSet dataSet ) throws AdxException { categoryOptionMap = new HashMap<>(); Set<DataElementCategoryCombo> catCombos = new HashSet<>(); catCombos.add( dataSet.getCategoryCombo() ); for ( DataSetElement element : dataSet.getDataSetElements() ) { catCombos.add( element.getResolvedCategoryCombo() ); } for ( DataElementCategoryCombo categoryCombo : catCombos ) { for ( DataElementCategoryOptionCombo catOptCombo : categoryCombo.getOptionCombos() ) { addExplodedCategoryAttributes( catOptCombo ); } } } private void addExplodedCategoryAttributes( DataElementCategoryOptionCombo coc ) throws AdxException { Map<String, String> categoryAttributes = new HashMap<>(); if ( !coc.isDefault() ) { for ( DataElementCategory category : coc.getCategoryCombo().getCategories() ) { String categoryCode = category.getCode(); if ( categoryCode == null || !XMLChar.isValidName( categoryCode ) ) { throw new AdxException( "Category code for " + category.getName() + " is missing or invalid: " + categoryCode ); } String catOptCode = category.getCategoryOption( coc ).getCode(); if ( catOptCode == null || catOptCode.isEmpty() ) { throw new AdxException( "CategoryOption code for " + category.getCategoryOption( coc ).getName() + " is missing" ); } categoryAttributes.put( categoryCode, catOptCode ); } } categoryOptionMap.put( coc.getId(), categoryAttributes ); } public Map<String, String> getExplodedCategoryAttributes( int cocId ) { return this.categoryOptionMap.get( cocId ); } }
uonafya/jphes-core
dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/adx/AdxDataSetMetadata.java
Java
bsd-3-clause
4,017
<?php /** * Created by PhpStorm. * User: qhuy * Date: 18/12/2014 * Time: 21:59 */ namespace backend\models; use common\helpers\CommonUtils; use common\helpers\CVietnameseTools; use common\helpers\MediaToolBoxHelper; use common\helpers\MyCurl; use common\models\Content; use common\models\News; use garyjl\simplehtmldom\SimpleHtmlDom; use Imagine\Image\Box; use Imagine\Image\ManipulatorInterface; use Yii; use yii\base\Model; use yii\helpers\FileHelper; use yii\helpers\Url; use yii\validators\ImageValidator; use yii\validators\UrlValidator; use yii\web\UploadedFile; class Image extends Model { const TYPE_POSTER = 1; const TYPE_THUMBNAIL = 2; const TYPE_SLIDESHOW = 3; const TYPE_AUTO = 4; const ORIENTATION_PORTRAIT = 1; const ORIENTATION_LANDSCAPE = 2; const RATIO_W = 16; const RATIO_H = 9; const RATIO_MULIPLIER_UPPER_LIMIT = 120;//Ty le toi da limit (1920x1080) public static $imageConfig = null; private $_id = ''; public $content_id; public $name; public $url; public $type = self::TYPE_POSTER; public $orientation; public static $image_types = [ self::TYPE_POSTER => 'poster', self::TYPE_SLIDESHOW => 'slide show', self::TYPE_THUMBNAIL => 'thumbnail', self::TYPE_AUTO => 'auto' ]; public static $image_orient = [ self::ORIENTATION_LANDSCAPE => 'landscape', self::ORIENTATION_PORTRAIT => 'portrait' ]; /** * @var $image UploadedFile */ public $image; /** * Create snapshot param * @param Content $video */ private static function initParamSnapshot($video, $position) { $snapshots = []; $folderSave = self::createFolderImage($video->id); if (!$folderSave) { return false; } if($position <= 0 || $position > $video->duration){ $position = rand(1, $video->duration); } $saveFileName = time() . '_' . CVietnameseTools::makeValidFileName($video->display_name . '_' . $position . '_snapshot.jpg'); $snapshot = new Snapshot(); $snapshot->time = $position; $snapshot->file_path = Yii::getAlias($folderSave) . $saveFileName; $snapshot->size = '100%'; $snapshots[] = $snapshot; return $snapshots; } private static function toAlias($path) { $basePath = Yii::getAlias('@webroot'); Yii::info(preg_quote($basePath, '/')); return preg_replace('/'.preg_quote($basePath, '/').'/', '@webroot', $path); } /** * @inheritdoc */ public function rules() { return [ [['image'], 'file', 'extensions' => ['jpg', 'png', 'jpeg', 'gif']], ['id', 'safe'], [['type', 'orientation'], 'integer'], [['url', 'name'], 'string', 'max' => 500], ]; } public function fields() { // if ($this->scenario == static::SCENARIO_DEFAULT) { // return parent::fields(); // } //List scenario $res = [ // field name is "email", the corresponding attribute name is "email_address" 'name', 'url', 'type', 'orientation', ]; return $res; } public function getId() { if (empty($this->_id)) { $this->_id = md5($this->url); } return $this->_id; } /** * Return full path url */ public function getImageUrl() { $validator = new UrlValidator(); if ($validator->validate($this->url)) { return $this->url; } else { if (preg_match('/(@[a-z]*)/', $this->url)) { return Yii::getAlias($this->url); } else { $configImage = self::getImageConfig(); $baseUrl = isset($configImage['base_url']) ? $configImage['base_url'] : Yii::getAlias('@web'); return $baseUrl . $this->url; } } } public function getFilePath() { $validator = new UrlValidator(); if ($validator->validate($this->url)) { return null; } else { if (preg_match('/(@[a-z]*)/', $this->url)) { return Yii::getAlias(str_replace('@web', '@webroot', $this->url)); } else { $configImage = self::getImageConfig(); $baseUrl = isset($configImage['base_url']) ? $configImage['base_url'] : Yii::getAlias('@webroot'); return $baseUrl . $this->url; } } } public function getWidthHeight() { if(is_file($this->getFilePath())){ $imageSize = getimagesize($this->getFilePath()); if ($imageSize) { return $imageSize[0] . 'x'. $imageSize[1]; } else { return 'N/A'; } }else{ return 'File not found'; } } public function getNameImageFullSave() { return time() . '_' . CVietnameseTools::makeValidFileName($this->name); } public static function getImageConfig() { if (Image::$imageConfig == null) { Image::$imageConfig = [ 'folder' => '@webroot' . DIRECTORY_SEPARATOR . Yii::getAlias('@content_images') . DIRECTORY_SEPARATOR, 'base_url' => Yii::getAlias('@web') . '/' . Yii::getAlias('@content_images') . DIRECTORY_SEPARATOR ]; } return Image::$imageConfig; } /** * Create folder to store image * Each store have image separate with video id * @return full path folder with alias (@webroot/...) */ public static function createFolderImage($video_id) { $configImage = self::getImageConfig(); $basePath = Yii::getAlias($configImage['folder']); if (!is_dir($basePath)) { FileHelper::createDirectory($basePath, 0777); } if (!is_dir($basePath) || $video_id == null) { Yii::error("Folder base path not exist: " . $basePath); return false; } $fullPath = $basePath . $video_id; if (!is_dir($fullPath)) { if (!FileHelper::createDirectory($fullPath, 0777)) { Yii::error("Can not create folder save image: " . $fullPath); return false; } } if (!substr($configImage['folder'], -1) == '/') { $configImage['folder'] .= '/'; } return $configImage['folder'] . $video_id . '/'; } /** * Thuc hien save image khi dc upload len * @param $content News * @return bool * @throws \yii\web\UnauthorizedHttpException */ public function saveImage($content) { $video_id = $content->id; $folderSave = self::createFolderImage($video_id); if (!$folderSave) { $this->addError('image', 'Folder save image not found'); return false; } if ($this->image == null) { $this->addError('image', "Image file not found!"); return false; } $saveFileName = $this->getNameImageFullSave(); $imagePath = Yii::getAlias($folderSave) . $saveFileName; Yii::info("Save file to " . $imagePath, 'VideoImage'); if (!$this->image->saveAs($imagePath)) { $this->addError('image', 'Can not save '); return false; } $imageSize = getimagesize($imagePath); if (count($imageSize) > 0) { if ($imageSize[0] > $imageSize[1]) { //neu width > height $this->orientation = self::ORIENTATION_LANDSCAPE; } else { $this->orientation = self::ORIENTATION_PORTRAIT; } } $this->url = $this->resolveUrl($folderSave) . $saveFileName; if ($this->type == self::TYPE_AUTO) { $this->autoGenerateImages($content, $folderSave, $saveFileName); } return true; } /** * replaces '/(@[a-z]*)root/' => '$1' * @param string $path * @return string */ public function resolveUrl($path) { return preg_replace('/(@[a-z]*)root/', '$1', $path); } /** * @param $content News */ public function save($content) { return $content->addImage($this->getArray()); } public function getArray() { return [ 'name' => $this->name, 'url' => $this->url, 'type' => $this->type, 'orientation' => $this->orientation ]; } public function delete() { //Delete image $file_path = $this->getFilePath(); if (is_dir($file_path)) { return true; } if ($file_path && file_exists($file_path)) { return unlink($this->getFilePath()); } return true; } /** * @param $video Content * @return bool */ public function loadImageYt($video) { $ch = new MyCurl(); $folderSave = self::createFolderImage($video->id); if (!$folderSave) { $this->addError('image', 'Folder save image not found'); return false; } $yt_info = CommonUtils::getVideoYoutubeInfo($video->youtube_id); if ($yt_info == null || $yt_info->snippet == null || $yt_info->snippet->thumbnails == null) { return false; } $img_yt_url = ''; $thumbnails = $yt_info->snippet->thumbnails; $img_yt_url = $this->getHighestImage($thumbnails); if (empty($img_yt_url)) { return false; } $image_extention = end(explode('.', $img_yt_url)); $this->name = $video->display_name . '_yt.' . $image_extention; $this->type = self::TYPE_AUTO; $saveFileName = $this->getNameImageFullSave(); $imagePath = Yii::getAlias($folderSave) . $saveFileName; Yii::info("Save file to " . $imagePath, 'VideoImage'); $response = $ch->download($img_yt_url, Yii::getAlias($folderSave), $saveFileName); if (!$response) { $this->addError('image', 'Can not save '); return false; } $imageSize = getimagesize($imagePath); if (count($imageSize) > 0) { if ($imageSize[0] > $imageSize[1]) { //neu width > height $this->orientation = self::ORIENTATION_LANDSCAPE; } else { $this->orientation = self::ORIENTATION_PORTRAIT; } } $this->url = $this->resolveUrl($folderSave) . $saveFileName; $this->autoGenerateImages($video, $folderSave, $saveFileName); if ($yt_info->contentDetails != null) { $video->duration = CommonUtils::convertYtDurationToSeconds($yt_info->contentDetails->duration); $video->update(false); } return true; } /** * @param Content $video * @return bool */ public static function loadImageSnapshot($video, $video_file, $position = 0) { $snapshots = self::initParamSnapshot($video,$position); $response = MediaToolBoxHelper::getVideoSnapshot($video_file, $snapshots); if (!$response) { Yii::error('Can not get snapshot'); return false; } foreach ($response as $snapshot) { $image = new Image(); $folderSave = self::toAlias(pathinfo($snapshot->file_path, PATHINFO_DIRNAME).DIRECTORY_SEPARATOR); $saveFileName = pathinfo($snapshot->file_path, PATHINFO_BASENAME); $image->name = $saveFileName; $image->autoGenerateImages($video, $folderSave, $saveFileName); } return true; } /** * Load first image from content to create some image * @param $content Content */ public function loadImageFromContent($content) { $ch = new MyCurl(); $folderSave = self::createFolderImage($content->id); if (!$folderSave) { $this->addError('image', 'Folder save image not found'); return false; } $img_content_url = ''; $html = SimpleHtmlDom::str_get_html($content->content); // Get first image foreach($html->find('img') as $element){ $img_content_url = $element->src; if(!empty($img_content_url)) break; } if(empty($img_content_url)){ return false; } $url_validator = new UrlValidator(); if(!$url_validator->validate($img_content_url)){ $img_content_url = Yii::$app->getUrlManager()->getHostInfo().$img_content_url; } Yii::info($img_content_url); $image_extention = end(explode('.', $img_content_url)); $this->name = $content->display_name . '_content.' . $image_extention; $this->type = self::TYPE_AUTO; $saveFileName = $this->getNameImageFullSave(); $imagePath = Yii::getAlias($folderSave) . $saveFileName; Yii::info("Save file to " . $imagePath, 'Image'); $response = $ch->download($img_content_url, Yii::getAlias($folderSave), $saveFileName); if (!$response || !CommonUtils::validateImage($imagePath)) { $this->addError('image', 'Can not save '); return false; } $imageSize = getimagesize($imagePath); if (count($imageSize) > 0) { if ($imageSize[0] > $imageSize[1]) { //neu width > height $this->orientation = self::ORIENTATION_LANDSCAPE; } else { $this->orientation = self::ORIENTATION_PORTRAIT; } } $this->url = $this->resolveUrl($folderSave) . $saveFileName; $this->autoGenerateImages($content, $folderSave, $saveFileName); return true; } /** * Tu dong generate ra cac file dinh dang khac nhau * @param $video * @param $folderSave //Dang tuong doi example '@webroot/content_images/45/ * @param $saveFileName * @return bool */ public function autoGenerateImages($video, $folderSave, $saveFileName) { $imagePath = Yii::getAlias($folderSave) . $saveFileName; $imageSize = getimagesize($imagePath); $img_width = 1; $img_height = 1; if (count($imageSize) > 0) { $img_width = $imageSize[0]; $img_height = $imageSize[1]; } else { return false; } //Resize to slide $slide = $this->resizeImage($folderSave, $imagePath, $img_width, $img_height, self::TYPE_SLIDESHOW); if (!$slide->save($video)) { return false; } //Resize to poster $poster = $this->resizeImage($folderSave, $imagePath, $img_width, $img_height); if (!$poster->save($video)) { return false; }; //Resize to thumbnail $poster = $this->resizeImage($folderSave, $imagePath, $img_width, $img_height, self::TYPE_THUMBNAIL); if (!$poster->save($video)) { return false; } } /** * @param $imageTool \yii\imagine\Image * @param $width * @param $heigh */ private function resizeImage($folder, $filename, $width, $height, $type = self::TYPE_POSTER) { $model = new Image(); $box_src = new Box($width, $height); $box = null; switch ($type) { case self::TYPE_THUMBNAIL: $box = new Box(320, 180); break; default: if (($width / $height) === (self::RATIO_W / self::RATIO_H)) { $box = $box_src; } else { list($new_width, $new_height) = $this->getNewSize($width, $height, self::RATIO_W / self::RATIO_H); $box = new Box($new_width, $new_height); } } $imageTool = \yii\imagine\Image::getImagine()->open($filename); $image = $imageTool->thumbnail($box, ManipulatorInterface::THUMBNAIL_OUTBOUND); $model->name = self::$image_types[$type] . '_' . $this->name; $model->type = $type; $saveFileName = $model->getNameImageFullSave(); $imagePath = Yii::getAlias($folder) . $saveFileName; Yii::info("Save file to " . $imagePath, 'VideoImage'); if (!$image->save($imagePath)) { return null; } $imageSize = getimagesize($imagePath); if (count($imageSize) > 0) { if ($imageSize[0] > $imageSize[1]) { //neu width > height $model->orientation = self::ORIENTATION_LANDSCAPE; } else { $model->orientation = self::ORIENTATION_PORTRAIT; } } $model->url = $this->resolveUrl($folder) . $saveFileName; return $model; } private function getHighestImage($thumbnails) { if ($thumbnails->maxres != null) { return $thumbnails->maxres->url; } if ($thumbnails->standard != null) { return $thumbnails->standard->url; } if ($thumbnails->high != null) { return $thumbnails->high->url; } if ($thumbnails->medium != null) { return $thumbnails->medium->url; } if ($thumbnails->default != null) { return $thumbnails->default->url; } } private function getNewSize($width, $height, $ratio) { // Find closest ratio multiple to image size if ($width > $height) { // landscape $ratioMultiple = round($height / self::RATIO_H, 0, PHP_ROUND_HALF_DOWN); } else { // portrait $ratioMultiple = round($width / self::RATIO_W, 0, PHP_ROUND_HALF_DOWN); } $newWidth = $ratioMultiple * self::RATIO_W; $newHeight = $ratioMultiple * self::RATIO_H; if ($newWidth > self::RATIO_W * self::RATIO_MULIPLIER_UPPER_LIMIT || $newHeight > self::RATIO_H * self::RATIO_MULIPLIER_UPPER_LIMIT) { // File is larger than upper limit $ratioMultiple = self::RATIO_MULIPLIER_UPPER_LIMIT; } $this->tweakMultiplier($ratioMultiple, $width, $height); $newWidth = $ratioMultiple * self::RATIO_W; $newHeight = $ratioMultiple * self::RATIO_H; return [ $newWidth, $newHeight ]; } /** * Xac dinh ratio sao cho new_width, new_height kho qua kich thuoc anh that * @param $ratioMultiple * @param $fitInsideWidth * @param $fitInsideHeight */ protected function tweakMultiplier(&$ratioMultiple, $fitInsideWidth, $fitInsideHeight) { $newWidth = $ratioMultiple * self::RATIO_W; $newHeight = $ratioMultiple * self::RATIO_H; if ($newWidth > $fitInsideWidth || $newHeight > $fitInsideHeight) { $ratioMultiple--; $this->tweakMultiplier($ratioMultiple, $fitInsideWidth, $fitInsideHeight); } else { return; } } }
tuanpv1/news
backend/models/Image.php
PHP
bsd-3-clause
19,090
Ext.define('Ozone.data.Dashboard', { extend: 'Ext.data.Model', idProperty: 'guid', fields:[ 'alteredByAdmin', 'guid', {name:'id', mapping: 'guid'}, { name: 'isdefault', type: 'boolean', defaultValue: false }, { name: 'dashboardPosition', type: 'int' }, 'EDashboardLayoutList', 'name', { name: 'state', defaultValue: [] }, 'removed', 'groups', 'isGroupDashboard', 'description', 'createdDate', 'prettyCreatedDate', 'editedDate', 'prettyEditedDate', { name: 'stack', defaultValue: null }, { name: 'locked', type: 'boolean', defaultValue: false }, { name: 'layoutConfig', defaultValue: null }, { name: 'createdBy', model: 'User'}, { name: 'user', model: 'User'} ], constructor: function(data, id, raw) { if(data.layoutConfig && typeof data.layoutConfig === 'string' && data.layoutConfig !== Object.prototype.toString()) { data.layoutConfig = Ext.JSON.decode(data.layoutConfig); } //todo see if we still need this if(data.layoutConfig === Object.prototype.toString()) { data.layoutConfig = ""; } if(!data.guid) { data.guid = guid.util.guid(); } this.callParent(arguments); } }); Ext.define('Ozone.data.stores.AdminDashboardStore', { extend:'Ozone.data.OWFStore', model: 'Ozone.data.Dashboard', alias: 'store.admindashboardstore', remoteSort: true, totalProperty:'results', sorters: [ { property : 'dashboardPosition', direction: 'ASC' } ], constructor: function(config) { Ext.applyIf(config, { api: { read: "/dashboard", create: "/dashboard", update: "/dashboard", destroy: "/dashboard" }, reader: { root: 'data' }, writer: { root: 'data' } }); this.callParent(arguments); }, reorder: function() { if (this.getCount() > 0) { for (var i = 0; i < this.getCount(); i++) { var dashboard = this.getAt(i); dashboard.set('dashboardPosition', i + 1); } } } }); Ext.define('Ozone.components.admin.grid.DashboardGroupsGrid', { extend: 'Ext.grid.Panel', alias: ['widget.dashboardgroupsgrid'], quickSearchFields: ['name'], plugins: new Ozone.components.focusable.FocusableGridPanel(), cls: 'grid-dashboard', defaultPageSize: 50, multiSelect: true, forceFit: true, baseParams: null, initComponent: function() { //create new store if (this.store == null) { this.store = Ext.StoreMgr.lookup({ type: 'admindashboardstore', pageSize: this.defaultPageSize }); } if (this.baseParams) { this.setBaseParams(this.baseParams); } Ext.apply(this, { columnLines:true, columns: [ { itemId: 'guid', header: 'GUID', dataIndex: 'guid', flex: 1, width: 210, minWidth: 210, sortable: true, hidden: true, renderer: function(value, metaData, record, rowIndex, columnIndex, store, view) { return '<div class="grid-text">' + value +'</div>'; } },{ itemId: 'name', header: 'Dashboard Title', dataIndex: 'name', flex: 3, minWidth: 200, sortable: true, renderer: function(value, metaData, record, rowIndex, columnIndex, store, view) { var title = value; var dashboardLayoutList = record.get('EDashboardLayoutList'); //List of valid ENUM Dashboard Layout Strings var dashboardLayout = record.get('layout'); //current dashboard layout string var iconClass = "grid-dashboard-default-icon-layout"; // if(dashboardLayout && dashboardLayoutList){ // if(dashboardLayoutList.indexOf(dashboardLayout) != -1){ // iconClass = "grid-dashboard-icon-layout-" + dashboardLayout; // } // } // var retVal = '<div class="grid-dashboard-title-box"><div class="grid-dashboard-icon ' + iconClass +'"></div>'; // retVal += '<div class="grid-dashboard-title">' + title + '</div>'; // retVal += '</div>'; return '<p class="grid-dashboard-title '+ iconClass + '">' + Ext.htmlEncode(title) + '</p>'; } }, { itemId: 'groups', header: 'Groups', dataIndex: 'groups', flex: 1, sortable: false, renderer: function(value, metaData, record, rowIndex, columnIndex, store, view) { return '<div class="grid-text grid-dashboard-group-count">' + value.length +'</div>'; } }, { itemId: 'widgets', header: 'Widgets', dataIndex: 'layoutConfig', flex: 1, sortable: false, renderer: function(value, metaData, record, rowIndex, columnIndex, store, view) { var widgetCount = 0; if (value) { var countWidgets = function(cfg) { if(!cfg || !cfg.items) return; if(cfg.items.length === 0) { if(cfg.widgets && cfg.widgets.length > 0) { widgetCount += cfg.widgets.length; } } else { for(var i = 0, len = cfg.items.length; i < len; i++) { countWidgets(cfg.items[i]); } } return widgetCount; }; widgetCount = countWidgets(value); } return '<div class="grid-text grid-dashboard-widget-count">' + widgetCount +'</div>'; } } ] }); Ext.apply(this, { multiSelect: true, dockedItems: [Ext.create('Ext.toolbar.Paging', { dock: 'bottom', store: this.store, displayInfo: true, hidden: this.hidePagingToolbar, itemId: 'dashboard-groups-grid-paging' })] }); this.callParent(arguments); }, getSelectedDashboards: function(){ return this.getSelectionModel().getSelection(); }, load: function() { this.store.loadPage(1); }, refresh: function() { this.store.loadPage(this.store.currentPage); }, getTopToolbar: function() { return this.getDockedItems('toolbar[dock="top"]')[0]; }, getBottomToolbar: function() { return this.getDockedItems('toolbar[dock="bottom"]')[0]; }, applyFilter: function(filterText, fields) { this.store.proxy.extraParams = undefined; if (filterText) { var filters = []; for (var i = 0; i < fields.length; i++) { filters.push({ filterField: fields[i], filterValue: filterText }); } this.store.proxy.extraParams = { filters: Ext.JSON.encode(filters), filterOperator: 'OR' }; } if (this.baseParams) { this.setBaseParams(this.baseParams); } this.store.loadPage(1,{ params: { offset: 0, max: this.store.pageSize } }); }, clearFilters: function() { this.store.proxy.extraParams = undefined; if (this.baseParams) { this.setBaseParams(this.baseParams); } this.store.load({ params: { start: 0, max: this.store.pageSize } }); }, setBaseParams: function(params) { this.baseParams = params; if (this.store.proxy.extraParams) { Ext.apply(this.store.proxy.extraParams, params); } else { this.store.proxy.extraParams = params; } }, setStore: function(store, cols) { this.reconfigure(store, cols); var pgtb = this.getBottomToolbar(); if (pgtb) { pgtb.bindStore(store); } } }); Ext.define('Ozone.components.admin.dashboard.DashboardDetailPanel', { extend: 'Ext.panel.Panel', alias: ['widget.dashboarddetailpanel', 'widget.dashboarddetail'], viewDashboard: null, loadedRecord: null, initComponent: function() { //init quicktips Ext.tip.QuickTipManager.init(true,{ dismissDelay: 60000, showDelay: 2000 }); this.viewDashboard = Ext.create('Ext.view.View', { store: Ext.create('Ext.data.Store', { storeId: 'storeDashboardItem', fields: [ { name: 'name', type: 'string' }, { name: 'layout', type: 'string' }, { name: 'EDashboardLayoutList', type: 'string' }, { name: 'isGroupDashboard', type: 'boolean'}, { name: 'groups', model: 'Group'}, { name: 'description', type: 'string' }, { name: 'createdDate', type: 'string' }, { name: 'prettyCreatedDate', type: 'string' }, { name: 'editedDate', type: 'string' }, { name: 'prettyEditedDate', type: 'string' }, { name: 'createdBy', model: 'User' }, { name: 'stack', model: 'Stack'} ] }), deferEmptyText: false, tpl: new Ext.XTemplate( '<tpl for=".">', '<div class="selector">', '<div id="detail-info" class="detail-info">', '<div class="dashboard-detail-icon-block">', '{[this.renderIconBlock(values)]}', '</div>', '<div class="dashboard-detail-info-block">', '<div class="detail-header-block">', '{[this.renderDetailHeaderBlock(values)]}', '</div>', '<div class="detail-block">', '<div><span class="detail-label">Description:</span> {description:htmlEncode}</span></div><br>', '<div><span class="detail-label">Groups:</span> {[this.renderGroups(values)]}</div>', '<div><span class="detail-label">Created:</span> <span {createdDate:this.renderToolTip}>{prettyCreatedDate:this.renderDate}</span></div>', '<div><span class="detail-label">Author:</span> {[this.renderUserRealName(values)]}</div>', '<div><span class="detail-label">Last Modified:</span> <span {editedDate:this.renderToolTip}>{prettyEditedDate:this.renderDate}</span></div>', '</div>', '</div>', '</div>', '</div>', '</tpl>', { compiled: true, renderDate: function(value) { return value ? value : ''; }, renderToolTip: function (value) { var str = 'data-qtip="' + value + '"'; return str; }, renderUserRealName: function(values) { var createdBy = values.createdBy; return (createdBy.userRealName ? Ext.htmlEncode(createdBy.userRealName) : '') }, renderGroups: function(values) { var groups = values.groups; var stack = values.stack; var retVal = ''; if (!stack && groups && groups.length > 0) { for (var i = -1; ++i < groups.length;) { retVal += Ext.htmlEncode(groups[i].name) + ', '; } retVal = retVal.substring(0, retVal.length - 2); } return retVal; }, renderIconBlock: function(values) { var iconClass = "dashboard-default-icon-layout"; var retVal = '<div class="dashboard-icon ' + iconClass + '"></div>'; return retVal; }, renderDetailHeaderBlock: function(values){ var isGroupDashboard = values.isGroupDashboard; var title = values.name; var retVal = '<div class="dashboard-title-block">'; retVal += '<div class="dashboard-title detail-title">' + Ext.htmlEncode(title) + '</div>'; retVal += (isGroupDashboard) ? '<div>This is a group dashboard.</div>' : ''; retVal += '</div>'; return retVal; } } ), emptyText: 'No dashboard selected', itemSelector: 'div.selector', autoScroll: 'true' }); this.items = [this.viewDashboard]; this.callParent(arguments); }, loadData: function(record) { this.viewDashboard.store.loadData([record], false); this.loadedRecord = record; }, removeData: function() { this.viewDashboard.store.removeAll(false); this.loadedRecord = null; } }); Ext.define('Ozone.components.admin.dashboard.GroupDashboardManagementPanel', { extend: 'Ozone.components.admin.ManagementPanel', alias: ['widget.groupdashboardmanagement','widget.groupdashboardmanagementpanel','widget.Ozone.components.admin.GroupDashboardManagementPanel'], layout: 'fit', cls: 'groupdashboardmanagementpanel', gridDashboards: null, pnlDashboardDetail: null, txtHeading: null, lastAction: null, guid_EditCopyWidget: null, widgetStateHandler: null, dragAndDrop: true, launchesWidgets: true, channel: 'AdminChannel', defaultTitle: 'Group Dashboards', minButtonWidth: 80, detailsAutoOpen: true, initComponent: function() { var me = this; OWF.Preferences.getUserPreference({ namespace: 'owf.admin.DashboardEditCopy', name: 'guid_to_launch', onSuccess: function(result) { me.guid_EditCopyWidget = result.value; }, onFailure: function(err){ /* No op */ me.showAlert('Preferences Error', 'Error looking up Dashboard Editor: ' + err); } }); this.gridDashboards = Ext.create('Ozone.components.admin.grid.DashboardGroupsGrid', { preventHeader: true, region: 'center', border: false }); this.gridDashboards.setBaseParams({ adminEnabled: true, isGroupDashboard: true, isStackDashboard: false }); this.gridDashboards.store.load({ params: { offset: 0, max: this.pageSize } }); this.relayEvents(this.gridDashboards, ['datachanged', 'select', 'deselect', 'itemdblclick']); this.pnlDashboardDetail = Ext.create('Ozone.components.admin.dashboard.DashboardDetailPanel', { layout: { type: 'fit', align: 'stretch' }, region: 'east', preventHeader: true, collapseMode: 'mini', collapsible: true, collapsed: true, split: true, border: false, width: 266 }); this.txtHeading = Ext.create('Ext.toolbar.TextItem', { text: '<span class="heading-bold">'+this.defaultTitle+'</span>' }); this.searchBox = Ext.widget('searchbox'); this.items = [{ xtype: 'panel', layout: 'border', border: false, items: [ this.gridDashboards, this.pnlDashboardDetail ] }]; this.dockedItems = [{ xtype: 'toolbar', dock: 'top', layout: { type: 'hbox', align: 'stretchmax' }, items: [ this.txtHeading, { xtype: 'tbfill' }, this.searchBox ] }, { xtype: 'toolbar', dock: 'bottom', ui: 'footer', defaults: { minWidth: this.minButtonWidth }, items: [{ xtype: 'button', text: 'Create', handler: function(button, evt) { evt.stopPropagation(); me.doCreate(); } }, { xtype: 'button', text: 'Edit', handler: function() { me.doEdit(); } }, { xtype: 'button', text: 'Delete', handler: function(button) { me.doDelete(); } }] }]; this.gridDashboards.store.on( 'load', function(thisStore, records, options){ if ((this.pnlDashboardDetail != null ) && (!this.pnlDashboardDetail.collapsed) && (this.pnlDashboardDetail.loadedRecord != null)){ for(var idx=0; idx < records.length; idx++){ if(records[idx].id == this.pnlDashboardDetail.loadedRecord.id){ this.pnlDashboardDetail.loadData(records[idx]); break; } } } }, this ); this.on( 'datachanged', function(store, opts) { //collapse and clear detail panel if the store is refreshed if (this.pnlDashboardDetail != null ) { this.pnlDashboardDetail.collapse(); this.pnlDashboardDetail.removeData(); } //refresh launch menu if (!this.disableLaunchMenuRefresh) { this.refreshWidgetLaunchMenu(); } }, this ); this.on( 'select', function(rowModel, record, index, opts) { this.pnlDashboardDetail.loadData(record); if (this.pnlDashboardDetail.collapsed && this.detailsAutoOpen) {this.pnlDashboardDetail.expand();} }, this ); this.searchBox.on( 'searchChanged', function(searchbox, value) { var grid = this.gridDashboards; if (grid) { if (!value) this.gridDashboards.clearFilters(); else this.gridDashboards.applyFilter(value, ['name', 'description']); } }, this ); this.on({ 'itemdblclick': { scope: this, fn: this.doEdit } }); this.gridDashboards.getView().on({ itemkeydown: { scope: this, fn: function(view, record, dom, index, evt) { switch(evt.getKey()) { case evt.SPACE: case evt.ENTER: this.doEdit(); } } } }); this.callParent(arguments); OWF.Eventing.subscribe('AdminChannel', owfdojo.hitch(this, function(sender, msg, channel) { if(msg.domain === 'Dashboard') { this.gridDashboards.getBottomToolbar().doRefresh(); } })); this.on( 'afterrender', function() { var splitterEl = this.el.down(".x-collapse-el"); splitterEl.on('click', function() { var collapsed = this.el.down(".x-splitter-collapsed"); if(collapsed) { this.detailsAutoOpen = true; } else { this.detailsAutoOpen = false; } }, this); }, this ); }, onLaunchFailed: function(response) { if (response.error) { this.showAlert('Launch Error', 'Dashboard Editor Launch Failed: ' + response.message); } }, doCreate: function() { var dataString = Ozone.util.toString({ copyFlag: false, isCreate: true, isGroupDashboard: true }); OWF.Launcher.launch({ guid: this.guid_EditCopyWidget, launchOnlyIfClosed: false, data: dataString }, this.onLaunchFailed); }, doEdit: function() { var records = this.gridDashboards.getSelectedDashboards(); if (records && records.length > 0) { for (var i = 0; i < records.length; i++) { var id = records[i].getId();//From Id property of Dashboard Model var dataString = Ozone.util.toString({ id: id, copyFlag: false, isCreate: false, isGroupDashboard: true }); OWF.Launcher.launch({ title: '$1 - ' + records[i].get('name'), titleRegex: /(.*)/, guid: this.guid_EditCopyWidget, launchOnlyIfClosed: false, data: dataString }, this.onLaunchFailed); } } else { this.showAlert("Error", "You must select at least one dashboard to edit"); } }, doDelete: function() { var records = this.gridDashboards.getSelectionModel().getSelection(); if (records && records.length > 0) { var msg = 'This action will permanently delete '; if (records.length == 1) { msg += '<span class="heading-bold">' + Ext.htmlEncode(records[0].data.name) + '</span>.'; } else { msg += 'the selected <span class="heading-bold">' + records.length + ' dashboards</span>.'; } this.showConfirmation('Warning', msg, function(btn, text, opts) { if(btn == 'ok') { var store = this.gridDashboards.getStore(); store.remove(records); var remainingRecords = store.getTotalCount() - records.length; store.on({ write: { fn: function() { if(store.data.items.length == 0 && store.currentPage > 1) { var lastPage = store.getPageFromRecordIndex(remainingRecords - 1); var pageToLoad = (lastPage >= store.currentPage) ? store.currentPage : lastPage; store.loadPage(pageToLoad); } this.gridDashboards.getBottomToolbar().doRefresh(); this.pnlDashboardDetail.removeData(); if(!this.pnlDashboardDetail.collapsed) { this.pnlDashboardDetail.collapse();} this.refreshWidgetLaunchMenu(); }, scope: this, single: true } }); store.save(); } }); } else { this.showAlert("Error", "You must select at least one dashboard to delete"); } } });
Nanonid/tcsolrsvc
webapps/owf/js/owf-group-dashboard-management-widget.js
JavaScript
bsd-3-clause
24,987
/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE36_Absolute_Path_Traversal__wchar_t_file_fopen_45.cpp Label Definition File: CWE36_Absolute_Path_Traversal.label.xml Template File: sources-sink-45.tmpl.cpp */ /* * @description * CWE: 36 Absolute Path Traversal * BadSource: file Read input from a file * GoodSource: Full path and file name * Sinks: fopen * BadSink : Open the file named in data using fopen() * Flow Variant: 45 Data flow: data passed as a static global variable from one function to another in the same source file * * */ #include "std_testcase.h" #ifndef _WIN32 #include <wchar.h> #endif #ifdef _WIN32 #define FILENAME "C:\\temp\\file.txt" #else #define FILENAME "/tmp/file.txt" #endif #ifdef _WIN32 #define FOPEN _wfopen #else #define FOPEN fopen #endif namespace CWE36_Absolute_Path_Traversal__wchar_t_file_fopen_45 { static wchar_t * badData; static wchar_t * goodG2BData; #ifndef OMITBAD static void badSink() { wchar_t * data = badData; { FILE *pFile = NULL; /* POTENTIAL FLAW: Possibly opening a file without validating the file name or path */ pFile = FOPEN(data, L"wb+"); if (pFile != NULL) { fclose(pFile); } } } void bad() { wchar_t * data; wchar_t dataBuffer[FILENAME_MAX] = L""; data = dataBuffer; { /* Read input from a file */ size_t dataLen = wcslen(data); FILE * pFile; /* if there is room in data, attempt to read the input from a file */ if (FILENAME_MAX-dataLen > 1) { pFile = fopen(FILENAME, "r"); if (pFile != NULL) { /* POTENTIAL FLAW: Read data from a file */ if (fgetws(data+dataLen, (int)(FILENAME_MAX-dataLen), pFile) == NULL) { printLine("fgetws() failed"); /* Restore NUL terminator if fgetws fails */ data[dataLen] = L'\0'; } fclose(pFile); } } } badData = data; badSink(); } #endif /* OMITBAD */ #ifndef OMITGOOD /* goodG2B() uses the GoodSource with the BadSink */ static void goodG2BSink() { wchar_t * data = goodG2BData; { FILE *pFile = NULL; /* POTENTIAL FLAW: Possibly opening a file without validating the file name or path */ pFile = FOPEN(data, L"wb+"); if (pFile != NULL) { fclose(pFile); } } } static void goodG2B() { wchar_t * data; wchar_t dataBuffer[FILENAME_MAX] = L""; data = dataBuffer; #ifdef _WIN32 /* FIX: Use a fixed, full path and file name */ wcscat(data, L"c:\\temp\\file.txt"); #else /* FIX: Use a fixed, full path and file name */ wcscat(data, L"/tmp/file.txt"); #endif goodG2BData = data; goodG2BSink(); } void good() { goodG2B(); } #endif /* OMITGOOD */ } /* close namespace */ /* Below is the main(). It is only used when building this testcase on its own for testing or for building a binary to use in testing binary analysis tools. It is not used when compiling all the testcases as one application, which is how source code analysis tools are tested. */ #ifdef INCLUDEMAIN using namespace CWE36_Absolute_Path_Traversal__wchar_t_file_fopen_45; /* so that we can use good and bad easily */ int main(int argc, char * argv[]) { /* seed randomness */ srand( (unsigned)time(NULL) ); #ifndef OMITGOOD printLine("Calling good()..."); good(); printLine("Finished good()"); #endif /* OMITGOOD */ #ifndef OMITBAD printLine("Calling bad()..."); bad(); printLine("Finished bad()"); #endif /* OMITBAD */ return 0; } #endif
JianpingZeng/xcc
xcc/test/juliet/testcases/CWE36_Absolute_Path_Traversal/s04/CWE36_Absolute_Path_Traversal__wchar_t_file_fopen_45.cpp
C++
bsd-3-clause
3,879
/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE401_Memory_Leak__new_twoIntsStruct_52b.cpp Label Definition File: CWE401_Memory_Leak__new.label.xml Template File: sources-sinks-52b.tmpl.cpp */ /* * @description * CWE: 401 Memory Leak * BadSource: Allocate data using new * GoodSource: Allocate data on the stack * Sinks: * GoodSink: call delete on data * BadSink : no deallocation of data * Flow Variant: 52 Data flow: data passed as an argument from one function to another to another in three different source files * * */ #include "std_testcase.h" #ifndef _WIN32 #include <wchar.h> #endif namespace CWE401_Memory_Leak__new_twoIntsStruct_52 { #ifndef OMITBAD /* bad function declaration */ void badSink_c(twoIntsStruct * data); void badSink_b(twoIntsStruct * data) { badSink_c(data); } #endif /* OMITBAD */ #ifndef OMITGOOD /* goodG2B uses the GoodSource with the BadSink */ void goodG2BSink_c(twoIntsStruct * data); void goodG2BSink_b(twoIntsStruct * data) { goodG2BSink_c(data); } /* goodB2G uses the BadSource with the GoodSink */ void goodB2GSink_c(twoIntsStruct * data); void goodB2GSink_b(twoIntsStruct * data) { goodB2GSink_c(data); } #endif /* OMITGOOD */ } /* close namespace */
JianpingZeng/xcc
xcc/test/juliet/testcases/CWE401_Memory_Leak/s02/CWE401_Memory_Leak__new_twoIntsStruct_52b.cpp
C++
bsd-3-clause
1,293
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/chromeos/login/users/chrome_user_manager_impl.h" #include <cstddef> #include <set> #include "ash/multi_profile_uma.h" #include "base/bind.h" #include "base/bind_helpers.h" #include "base/command_line.h" #include "base/compiler_specific.h" #include "base/format_macros.h" #include "base/logging.h" #include "base/metrics/histogram.h" #include "base/prefs/pref_registry_simple.h" #include "base/prefs/pref_service.h" #include "base/prefs/scoped_user_pref_update.h" #include "base/strings/string_util.h" #include "base/strings/stringprintf.h" #include "base/strings/utf_string_conversions.h" #include "base/thread_task_runner_handle.h" #include "base/values.h" #include "chrome/browser/browser_process.h" #include "chrome/browser/chrome_notification_types.h" #include "chrome/browser/chromeos/login/demo_mode/demo_app_launcher.h" #include "chrome/browser/chromeos/login/session/user_session_manager.h" #include "chrome/browser/chromeos/login/signin/auth_sync_observer.h" #include "chrome/browser/chromeos/login/signin/auth_sync_observer_factory.h" #include "chrome/browser/chromeos/login/users/avatar/user_image_manager_impl.h" #include "chrome/browser/chromeos/login/users/multi_profile_user_controller.h" #include "chrome/browser/chromeos/login/users/supervised_user_manager_impl.h" #include "chrome/browser/chromeos/policy/browser_policy_connector_chromeos.h" #include "chrome/browser/chromeos/policy/device_local_account.h" #include "chrome/browser/chromeos/profiles/multiprofiles_session_aborted_dialog.h" #include "chrome/browser/chromeos/profiles/profile_helper.h" #include "chrome/browser/chromeos/session_length_limiter.h" #include "chrome/browser/profiles/profile.h" #include "chrome/browser/supervised_user/chromeos/manager_password_service_factory.h" #include "chrome/browser/supervised_user/chromeos/supervised_user_password_service_factory.h" #include "chrome/common/chrome_constants.h" #include "chrome/common/chrome_switches.h" #include "chrome/common/crash_keys.h" #include "chrome/common/pref_names.h" #include "chrome/grit/theme_resources.h" #include "chromeos/chromeos_switches.h" #include "chromeos/login/user_names.h" #include "chromeos/settings/cros_settings_names.h" #include "components/session_manager/core/session_manager.h" #include "components/user_manager/remove_user_delegate.h" #include "components/user_manager/user_image/user_image.h" #include "components/user_manager/user_type.h" #include "content/public/browser/browser_thread.h" #include "content/public/browser/notification_service.h" #include "policy/policy_constants.h" #include "ui/base/resource/resource_bundle.h" #include "ui/wm/core/wm_core_switches.h" using content::BrowserThread; namespace chromeos { namespace { // A vector pref of the the regular users known on this device, arranged in LRU // order. const char kRegularUsers[] = "LoggedInUsers"; // A vector pref of the public accounts defined on this device. const char kPublicAccounts[] = "PublicAccounts"; // A string pref that gets set when a public account is removed but a user is // currently logged into that account, requiring the account's data to be // removed after logout. const char kPublicAccountPendingDataRemoval[] = "PublicAccountPendingDataRemoval"; } // namespace // static void ChromeUserManagerImpl::RegisterPrefs(PrefRegistrySimple* registry) { ChromeUserManager::RegisterPrefs(registry); registry->RegisterListPref(kPublicAccounts); registry->RegisterStringPref(kPublicAccountPendingDataRemoval, std::string()); SupervisedUserManager::RegisterPrefs(registry); SessionLengthLimiter::RegisterPrefs(registry); } // static scoped_ptr<ChromeUserManager> ChromeUserManagerImpl::CreateChromeUserManager() { return scoped_ptr<ChromeUserManager>(new ChromeUserManagerImpl()); } ChromeUserManagerImpl::ChromeUserManagerImpl() : ChromeUserManager(base::ThreadTaskRunnerHandle::Get(), BrowserThread::GetBlockingPool()), cros_settings_(CrosSettings::Get()), device_local_account_policy_service_(NULL), supervised_user_manager_(new SupervisedUserManagerImpl(this)), weak_factory_(this) { UpdateNumberOfUsers(); // UserManager instance should be used only on UI thread. DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); registrar_.Add(this, chrome::NOTIFICATION_OWNERSHIP_STATUS_CHANGED, content::NotificationService::AllSources()); registrar_.Add(this, chrome::NOTIFICATION_LOGIN_USER_PROFILE_PREPARED, content::NotificationService::AllSources()); registrar_.Add(this, chrome::NOTIFICATION_PROFILE_CREATED, content::NotificationService::AllSources()); // Since we're in ctor postpone any actions till this is fully created. if (base::MessageLoop::current()) { base::MessageLoop::current()->PostTask( FROM_HERE, base::Bind(&ChromeUserManagerImpl::RetrieveTrustedDevicePolicies, weak_factory_.GetWeakPtr())); } local_accounts_subscription_ = cros_settings_->AddSettingsObserver( kAccountsPrefDeviceLocalAccounts, base::Bind(&ChromeUserManagerImpl::RetrieveTrustedDevicePolicies, weak_factory_.GetWeakPtr())); multi_profile_user_controller_.reset( new MultiProfileUserController(this, GetLocalState())); policy::BrowserPolicyConnectorChromeOS* connector = g_browser_process->platform_part()->browser_policy_connector_chromeos(); avatar_policy_observer_.reset(new policy::CloudExternalDataPolicyObserver( cros_settings_, connector->GetDeviceLocalAccountPolicyService(), policy::key::kUserAvatarImage, this)); avatar_policy_observer_->Init(); wallpaper_policy_observer_.reset(new policy::CloudExternalDataPolicyObserver( cros_settings_, connector->GetDeviceLocalAccountPolicyService(), policy::key::kWallpaperImage, this)); wallpaper_policy_observer_->Init(); } ChromeUserManagerImpl::~ChromeUserManagerImpl() { } void ChromeUserManagerImpl::Shutdown() { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); ChromeUserManager::Shutdown(); local_accounts_subscription_.reset(); // Stop the session length limiter. session_length_limiter_.reset(); if (device_local_account_policy_service_) device_local_account_policy_service_->RemoveObserver(this); for (UserImageManagerMap::iterator it = user_image_managers_.begin(), ie = user_image_managers_.end(); it != ie; ++it) { it->second->Shutdown(); } multi_profile_user_controller_.reset(); avatar_policy_observer_.reset(); wallpaper_policy_observer_.reset(); registrar_.RemoveAll(); } MultiProfileUserController* ChromeUserManagerImpl::GetMultiProfileUserController() { return multi_profile_user_controller_.get(); } UserImageManager* ChromeUserManagerImpl::GetUserImageManager( const std::string& user_id) { UserImageManagerMap::iterator ui = user_image_managers_.find(user_id); if (ui != user_image_managers_.end()) return ui->second.get(); linked_ptr<UserImageManagerImpl> mgr(new UserImageManagerImpl(user_id, this)); user_image_managers_[user_id] = mgr; return mgr.get(); } SupervisedUserManager* ChromeUserManagerImpl::GetSupervisedUserManager() { return supervised_user_manager_.get(); } user_manager::UserList ChromeUserManagerImpl::GetUsersAdmittedForMultiProfile() const { // Supervised users are not allowed to use multi-profiles. if (GetLoggedInUsers().size() == 1 && GetPrimaryUser()->GetType() != user_manager::USER_TYPE_REGULAR) { return user_manager::UserList(); } user_manager::UserList result; const user_manager::UserList& users = GetUsers(); for (user_manager::UserList::const_iterator it = users.begin(); it != users.end(); ++it) { if ((*it)->GetType() == user_manager::USER_TYPE_REGULAR && !(*it)->is_logged_in()) { MultiProfileUserController::UserAllowedInSessionReason check; multi_profile_user_controller_->IsUserAllowedInSession((*it)->email(), &check); if (check == MultiProfileUserController::NOT_ALLOWED_PRIMARY_USER_POLICY_FORBIDS) { return user_manager::UserList(); } // Users with a policy that prevents them being added to a session will be // shown in login UI but will be grayed out. // Same applies to owner account (see http://crbug.com/385034). if (check == MultiProfileUserController::ALLOWED || check == MultiProfileUserController::NOT_ALLOWED_POLICY_FORBIDS || check == MultiProfileUserController::NOT_ALLOWED_OWNER_AS_SECONDARY || check == MultiProfileUserController::NOT_ALLOWED_POLICY_CERT_TAINTED) { result.push_back(*it); } } } return result; } user_manager::UserList ChromeUserManagerImpl::GetUnlockUsers() const { const user_manager::UserList& logged_in_users = GetLoggedInUsers(); if (logged_in_users.empty()) return user_manager::UserList(); user_manager::UserList unlock_users; Profile* profile = ProfileHelper::Get()->GetProfileByUserUnsafe(GetPrimaryUser()); std::string primary_behavior = profile->GetPrefs()->GetString(prefs::kMultiProfileUserBehavior); // Specific case: only one logged in user or // primary user has primary-only multi-profile policy. if (logged_in_users.size() == 1 || primary_behavior == MultiProfileUserController::kBehaviorPrimaryOnly) { if (GetPrimaryUser()->can_lock()) unlock_users.push_back(primary_user_); } else { // Fill list of potential unlock users based on multi-profile policy state. for (user_manager::UserList::const_iterator it = logged_in_users.begin(); it != logged_in_users.end(); ++it) { user_manager::User* user = (*it); Profile* profile = ProfileHelper::Get()->GetProfileByUserUnsafe(user); const std::string behavior = profile->GetPrefs()->GetString(prefs::kMultiProfileUserBehavior); if (behavior == MultiProfileUserController::kBehaviorUnrestricted && user->can_lock()) { unlock_users.push_back(user); } else if (behavior == MultiProfileUserController::kBehaviorPrimaryOnly) { NOTREACHED() << "Spotted primary-only multi-profile policy for non-primary user"; } } } return unlock_users; } void ChromeUserManagerImpl::SessionStarted() { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); ChromeUserManager::SessionStarted(); content::NotificationService::current()->Notify( chrome::NOTIFICATION_SESSION_STARTED, content::Source<UserManager>(this), content::Details<const user_manager::User>(GetActiveUser())); } void ChromeUserManagerImpl::RemoveUserInternal( const std::string& user_email, user_manager::RemoveUserDelegate* delegate) { CrosSettings* cros_settings = CrosSettings::Get(); const base::Closure& callback = base::Bind(&ChromeUserManagerImpl::RemoveUserInternal, weak_factory_.GetWeakPtr(), user_email, delegate); // Ensure the value of owner email has been fetched. if (CrosSettingsProvider::TRUSTED != cros_settings->PrepareTrustedValues(callback)) { // Value of owner email is not fetched yet. RemoveUserInternal will be // called again after fetch completion. return; } std::string owner; cros_settings->GetString(kDeviceOwner, &owner); if (user_email == owner) { // Owner is not allowed to be removed from the device. return; } RemoveNonOwnerUserInternal(user_email, delegate); } void ChromeUserManagerImpl::SaveUserOAuthStatus( const std::string& user_id, user_manager::User::OAuthTokenStatus oauth_token_status) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); ChromeUserManager::SaveUserOAuthStatus(user_id, oauth_token_status); GetUserFlow(user_id)->HandleOAuthTokenStatusChange(oauth_token_status); } void ChromeUserManagerImpl::SaveUserDisplayName( const std::string& user_id, const base::string16& display_name) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); ChromeUserManager::SaveUserDisplayName(user_id, display_name); // Do not update local state if data stored or cached outside the user's // cryptohome is to be treated as ephemeral. if (!IsUserNonCryptohomeDataEphemeral(user_id)) supervised_user_manager_->UpdateManagerName(user_id, display_name); } void ChromeUserManagerImpl::StopPolicyObserverForTesting() { avatar_policy_observer_.reset(); wallpaper_policy_observer_.reset(); } void ChromeUserManagerImpl::Observe( int type, const content::NotificationSource& source, const content::NotificationDetails& details) { switch (type) { case chrome::NOTIFICATION_OWNERSHIP_STATUS_CHANGED: if (!device_local_account_policy_service_) { policy::BrowserPolicyConnectorChromeOS* connector = g_browser_process->platform_part() ->browser_policy_connector_chromeos(); device_local_account_policy_service_ = connector->GetDeviceLocalAccountPolicyService(); if (device_local_account_policy_service_) device_local_account_policy_service_->AddObserver(this); } RetrieveTrustedDevicePolicies(); UpdateOwnership(); break; case chrome::NOTIFICATION_LOGIN_USER_PROFILE_PREPARED: { Profile* profile = content::Details<Profile>(details).ptr(); if (IsUserLoggedIn() && !IsLoggedInAsGuest() && !IsLoggedInAsKioskApp()) { if (IsLoggedInAsSupervisedUser()) SupervisedUserPasswordServiceFactory::GetForProfile(profile); if (IsLoggedInAsRegularUser()) ManagerPasswordServiceFactory::GetForProfile(profile); if (!profile->IsOffTheRecord()) { AuthSyncObserver* sync_observer = AuthSyncObserverFactory::GetInstance()->GetForProfile(profile); sync_observer->StartObserving(); multi_profile_user_controller_->StartObserving(profile); } } break; } case chrome::NOTIFICATION_PROFILE_CREATED: { Profile* profile = content::Source<Profile>(source).ptr(); user_manager::User* user = ProfileHelper::Get()->GetUserByProfile(profile); if (user != NULL) user->set_profile_is_created(); // If there is pending user switch, do it now. if (!GetPendingUserSwitchID().empty()) { // Call SwitchActiveUser async because otherwise it may cause // ProfileManager::GetProfile before the profile gets registered // in ProfileManager. It happens in case of sync profile load when // NOTIFICATION_PROFILE_CREATED is called synchronously. base::MessageLoop::current()->PostTask( FROM_HERE, base::Bind(&ChromeUserManagerImpl::SwitchActiveUser, weak_factory_.GetWeakPtr(), GetPendingUserSwitchID())); SetPendingUserSwitchID(std::string()); } break; } default: NOTREACHED(); } } void ChromeUserManagerImpl::OnExternalDataSet(const std::string& policy, const std::string& user_id) { if (policy == policy::key::kUserAvatarImage) GetUserImageManager(user_id)->OnExternalDataSet(policy); else if (policy == policy::key::kWallpaperImage) WallpaperManager::Get()->OnPolicySet(policy, user_id); else NOTREACHED(); } void ChromeUserManagerImpl::OnExternalDataCleared(const std::string& policy, const std::string& user_id) { if (policy == policy::key::kUserAvatarImage) GetUserImageManager(user_id)->OnExternalDataCleared(policy); else if (policy == policy::key::kWallpaperImage) WallpaperManager::Get()->OnPolicyCleared(policy, user_id); else NOTREACHED(); } void ChromeUserManagerImpl::OnExternalDataFetched( const std::string& policy, const std::string& user_id, scoped_ptr<std::string> data) { if (policy == policy::key::kUserAvatarImage) GetUserImageManager(user_id)->OnExternalDataFetched(policy, data.Pass()); else if (policy == policy::key::kWallpaperImage) WallpaperManager::Get()->OnPolicyFetched(policy, user_id, data.Pass()); else NOTREACHED(); } void ChromeUserManagerImpl::OnPolicyUpdated(const std::string& user_id) { const user_manager::User* user = FindUser(user_id); if (!user || user->GetType() != user_manager::USER_TYPE_PUBLIC_ACCOUNT) return; UpdatePublicAccountDisplayName(user_id); } void ChromeUserManagerImpl::OnDeviceLocalAccountsChanged() { // No action needed here, changes to the list of device-local accounts get // handled via the kAccountsPrefDeviceLocalAccounts device setting observer. } bool ChromeUserManagerImpl::CanCurrentUserLock() const { return ChromeUserManager::CanCurrentUserLock() && GetCurrentUserFlow()->CanLockScreen(); } bool ChromeUserManagerImpl::IsUserNonCryptohomeDataEphemeral( const std::string& user_id) const { // Data belonging to the obsolete public accounts whose data has not been // removed yet is not ephemeral. bool is_obsolete_public_account = IsPublicAccountMarkedForRemoval(user_id); return !is_obsolete_public_account && ChromeUserManager::IsUserNonCryptohomeDataEphemeral(user_id); } bool ChromeUserManagerImpl::AreEphemeralUsersEnabled() const { policy::BrowserPolicyConnectorChromeOS* connector = g_browser_process->platform_part()->browser_policy_connector_chromeos(); return GetEphemeralUsersEnabled() && (connector->IsEnterpriseManaged() || !GetOwnerEmail().empty()); } const std::string& ChromeUserManagerImpl::GetApplicationLocale() const { return g_browser_process->GetApplicationLocale(); } PrefService* ChromeUserManagerImpl::GetLocalState() const { return g_browser_process ? g_browser_process->local_state() : NULL; } void ChromeUserManagerImpl::HandleUserOAuthTokenStatusChange( const std::string& user_id, user_manager::User::OAuthTokenStatus status) const { GetUserFlow(user_id)->HandleOAuthTokenStatusChange(status); } bool ChromeUserManagerImpl::IsEnterpriseManaged() const { policy::BrowserPolicyConnectorChromeOS* connector = g_browser_process->platform_part()->browser_policy_connector_chromeos(); return connector->IsEnterpriseManaged(); } void ChromeUserManagerImpl::LoadPublicAccounts( std::set<std::string>* public_sessions_set) { const base::ListValue* prefs_public_sessions = GetLocalState()->GetList(kPublicAccounts); std::vector<std::string> public_sessions; ParseUserList(*prefs_public_sessions, std::set<std::string>(), &public_sessions, public_sessions_set); for (std::vector<std::string>::const_iterator it = public_sessions.begin(); it != public_sessions.end(); ++it) { users_.push_back(user_manager::User::CreatePublicAccountUser(*it)); UpdatePublicAccountDisplayName(*it); } } void ChromeUserManagerImpl::PerformPreUserListLoadingActions() { // Clean up user list first. All code down the path should be synchronous, // so that local state after transaction rollback is in consistent state. // This process also should not trigger EnsureUsersLoaded again. if (supervised_user_manager_->HasFailedUserCreationTransaction()) supervised_user_manager_->RollbackUserCreationTransaction(); } void ChromeUserManagerImpl::PerformPostUserListLoadingActions() { for (user_manager::UserList::iterator ui = users_.begin(), ue = users_.end(); ui != ue; ++ui) { GetUserImageManager((*ui)->email())->LoadUserImage(); } } void ChromeUserManagerImpl::PerformPostUserLoggedInActions( bool browser_restart) { // Initialize the session length limiter and start it only if // session limit is defined by the policy. session_length_limiter_.reset( new SessionLengthLimiter(NULL, browser_restart)); } bool ChromeUserManagerImpl::IsDemoApp(const std::string& user_id) const { return DemoAppLauncher::IsDemoAppSession(user_id); } bool ChromeUserManagerImpl::IsKioskApp(const std::string& user_id) const { policy::DeviceLocalAccount::Type device_local_account_type; return policy::IsDeviceLocalAccountUser(user_id, &device_local_account_type) && device_local_account_type == policy::DeviceLocalAccount::TYPE_KIOSK_APP; } bool ChromeUserManagerImpl::IsPublicAccountMarkedForRemoval( const std::string& user_id) const { return user_id == GetLocalState()->GetString(kPublicAccountPendingDataRemoval); } void ChromeUserManagerImpl::RetrieveTrustedDevicePolicies() { // Local state may not be initialized in unit_tests. if (!GetLocalState()) return; SetEphemeralUsersEnabled(false); SetOwnerEmail(std::string()); // Schedule a callback if device policy has not yet been verified. if (CrosSettingsProvider::TRUSTED != cros_settings_->PrepareTrustedValues( base::Bind(&ChromeUserManagerImpl::RetrieveTrustedDevicePolicies, weak_factory_.GetWeakPtr()))) { return; } bool ephemeral_users_enabled = false; cros_settings_->GetBoolean(kAccountsPrefEphemeralUsersEnabled, &ephemeral_users_enabled); SetEphemeralUsersEnabled(ephemeral_users_enabled); std::string owner_email; cros_settings_->GetString(kDeviceOwner, &owner_email); SetOwnerEmail(owner_email); EnsureUsersLoaded(); bool changed = UpdateAndCleanUpPublicAccounts( policy::GetDeviceLocalAccounts(cros_settings_)); // If ephemeral users are enabled and we are on the login screen, take this // opportunity to clean up by removing all regular users except the owner. if (GetEphemeralUsersEnabled() && !IsUserLoggedIn()) { ListPrefUpdate prefs_users_update(GetLocalState(), kRegularUsers); prefs_users_update->Clear(); for (user_manager::UserList::iterator it = users_.begin(); it != users_.end();) { const std::string user_email = (*it)->email(); if ((*it)->GetType() == user_manager::USER_TYPE_REGULAR && user_email != GetOwnerEmail()) { RemoveNonCryptohomeData(user_email); DeleteUser(*it); it = users_.erase(it); changed = true; } else { if ((*it)->GetType() != user_manager::USER_TYPE_PUBLIC_ACCOUNT) prefs_users_update->Append(new base::StringValue(user_email)); ++it; } } } if (changed) NotifyUserListChanged(); } void ChromeUserManagerImpl::GuestUserLoggedIn() { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); ChromeUserManager::GuestUserLoggedIn(); // TODO(nkostylev): Add support for passing guest session cryptohome // mount point. Legacy (--login-profile) value will be used for now. // http://crosbug.com/230859 active_user_->SetStubImage( user_manager::UserImage( *ResourceBundle::GetSharedInstance().GetImageSkiaNamed( IDR_PROFILE_PICTURE_LOADING)), user_manager::User::USER_IMAGE_INVALID, false); // Initializes wallpaper after active_user_ is set. WallpaperManager::Get()->SetUserWallpaperNow(chromeos::login::kGuestUserName); } void ChromeUserManagerImpl::RegularUserLoggedIn(const std::string& user_id) { ChromeUserManager::RegularUserLoggedIn(user_id); if (IsCurrentUserNew()) WallpaperManager::Get()->SetUserWallpaperNow(user_id); GetUserImageManager(user_id)->UserLoggedIn(IsCurrentUserNew(), false); WallpaperManager::Get()->EnsureLoggedInUserWallpaperLoaded(); // Make sure that new data is persisted to Local State. GetLocalState()->CommitPendingWrite(); } void ChromeUserManagerImpl::RegularUserLoggedInAsEphemeral( const std::string& user_id) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); ChromeUserManager::RegularUserLoggedInAsEphemeral(user_id); GetUserImageManager(user_id)->UserLoggedIn(IsCurrentUserNew(), false); WallpaperManager::Get()->SetUserWallpaperNow(user_id); } void ChromeUserManagerImpl::SupervisedUserLoggedIn(const std::string& user_id) { // TODO(nkostylev): Refactor, share code with RegularUserLoggedIn(). // Remove the user from the user list. active_user_ = RemoveRegularOrSupervisedUserFromList(user_id); // If the user was not found on the user list, create a new user. if (!GetActiveUser()) { SetIsCurrentUserNew(true); active_user_ = user_manager::User::CreateSupervisedUser(user_id); // Leaving OAuth token status at the default state = unknown. WallpaperManager::Get()->SetUserWallpaperNow(user_id); } else { if (supervised_user_manager_->CheckForFirstRun(user_id)) { SetIsCurrentUserNew(true); WallpaperManager::Get()->SetUserWallpaperNow(user_id); } else { SetIsCurrentUserNew(false); } } // Add the user to the front of the user list. ListPrefUpdate prefs_users_update(GetLocalState(), kRegularUsers); prefs_users_update->Insert(0, new base::StringValue(user_id)); users_.insert(users_.begin(), active_user_); // Now that user is in the list, save display name. if (IsCurrentUserNew()) { SaveUserDisplayName(GetActiveUser()->email(), GetActiveUser()->GetDisplayName()); } GetUserImageManager(user_id)->UserLoggedIn(IsCurrentUserNew(), true); WallpaperManager::Get()->EnsureLoggedInUserWallpaperLoaded(); // Make sure that new data is persisted to Local State. GetLocalState()->CommitPendingWrite(); } void ChromeUserManagerImpl::PublicAccountUserLoggedIn( user_manager::User* user) { SetIsCurrentUserNew(true); active_user_ = user; // The UserImageManager chooses a random avatar picture when a user logs in // for the first time. Tell the UserImageManager that this user is not new to // prevent the avatar from getting changed. GetUserImageManager(user->email())->UserLoggedIn(false, true); WallpaperManager::Get()->EnsureLoggedInUserWallpaperLoaded(); } void ChromeUserManagerImpl::KioskAppLoggedIn(const std::string& app_id) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); policy::DeviceLocalAccount::Type device_local_account_type; DCHECK(policy::IsDeviceLocalAccountUser(app_id, &device_local_account_type)); DCHECK_EQ(policy::DeviceLocalAccount::TYPE_KIOSK_APP, device_local_account_type); active_user_ = user_manager::User::CreateKioskAppUser(app_id); active_user_->SetStubImage( user_manager::UserImage( *ResourceBundle::GetSharedInstance().GetImageSkiaNamed( IDR_PROFILE_PICTURE_LOADING)), user_manager::User::USER_IMAGE_INVALID, false); WallpaperManager::Get()->SetUserWallpaperNow(app_id); // TODO(bartfab): Add KioskAppUsers to the users_ list and keep metadata like // the kiosk_app_id in these objects, removing the need to re-parse the // device-local account list here to extract the kiosk_app_id. const std::vector<policy::DeviceLocalAccount> device_local_accounts = policy::GetDeviceLocalAccounts(cros_settings_); const policy::DeviceLocalAccount* account = NULL; for (std::vector<policy::DeviceLocalAccount>::const_iterator it = device_local_accounts.begin(); it != device_local_accounts.end(); ++it) { if (it->user_id == app_id) { account = &*it; break; } } std::string kiosk_app_id; if (account) { kiosk_app_id = account->kiosk_app_id; } else { LOG(ERROR) << "Logged into nonexistent kiosk-app account: " << app_id; NOTREACHED(); } CommandLine* command_line = CommandLine::ForCurrentProcess(); command_line->AppendSwitch(::switches::kForceAppMode); command_line->AppendSwitchASCII(::switches::kAppId, kiosk_app_id); // Disable window animation since kiosk app runs in a single full screen // window and window animation causes start-up janks. command_line->AppendSwitch(wm::switches::kWindowAnimationsDisabled); } void ChromeUserManagerImpl::DemoAccountLoggedIn() { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); active_user_ = user_manager::User::CreateKioskAppUser(DemoAppLauncher::kDemoUserName); active_user_->SetStubImage( user_manager::UserImage( *ResourceBundle::GetSharedInstance().GetImageSkiaNamed( IDR_PROFILE_PICTURE_LOADING)), user_manager::User::USER_IMAGE_INVALID, false); WallpaperManager::Get()->SetUserWallpaperNow(DemoAppLauncher::kDemoUserName); CommandLine* command_line = CommandLine::ForCurrentProcess(); command_line->AppendSwitch(::switches::kForceAppMode); command_line->AppendSwitchASCII(::switches::kAppId, DemoAppLauncher::kDemoAppId); // Disable window animation since the demo app runs in a single full screen // window and window animation causes start-up janks. CommandLine::ForCurrentProcess()->AppendSwitch( wm::switches::kWindowAnimationsDisabled); } void ChromeUserManagerImpl::RetailModeUserLoggedIn() { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); SetIsCurrentUserNew(true); active_user_ = user_manager::User::CreateRetailModeUser(); GetUserImageManager(chromeos::login::kRetailModeUserName) ->UserLoggedIn(IsCurrentUserNew(), true); WallpaperManager::Get()->SetUserWallpaperNow( chromeos::login::kRetailModeUserName); } void ChromeUserManagerImpl::NotifyOnLogin() { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); UserSessionManager::OverrideHomedir(); UpdateNumberOfUsers(); ChromeUserManager::NotifyOnLogin(); // TODO(nkostylev): Deprecate this notification in favor of // ActiveUserChanged() observer call. content::NotificationService::current()->Notify( chrome::NOTIFICATION_LOGIN_USER_CHANGED, content::Source<UserManager>(this), content::Details<const user_manager::User>(GetActiveUser())); UserSessionManager::GetInstance()->PerformPostUserLoggedInActions(); } void ChromeUserManagerImpl::UpdateOwnership() { bool is_owner = DeviceSettingsService::Get()->HasPrivateOwnerKey(); VLOG(1) << "Current user " << (is_owner ? "is owner" : "is not owner"); SetCurrentUserIsOwner(is_owner); } void ChromeUserManagerImpl::RemoveNonCryptohomeData( const std::string& user_id) { ChromeUserManager::RemoveNonCryptohomeData(user_id); WallpaperManager::Get()->RemoveUserWallpaperInfo(user_id); GetUserImageManager(user_id)->DeleteUserImage(); supervised_user_manager_->RemoveNonCryptohomeData(user_id); multi_profile_user_controller_->RemoveCachedValues(user_id); } void ChromeUserManagerImpl::CleanUpPublicAccountNonCryptohomeDataPendingRemoval() { PrefService* local_state = GetLocalState(); const std::string public_account_pending_data_removal = local_state->GetString(kPublicAccountPendingDataRemoval); if (public_account_pending_data_removal.empty() || (IsUserLoggedIn() && public_account_pending_data_removal == GetActiveUser()->email())) { return; } RemoveNonCryptohomeData(public_account_pending_data_removal); local_state->ClearPref(kPublicAccountPendingDataRemoval); } void ChromeUserManagerImpl::CleanUpPublicAccountNonCryptohomeData( const std::vector<std::string>& old_public_accounts) { std::set<std::string> users; for (user_manager::UserList::const_iterator it = users_.begin(); it != users_.end(); ++it) users.insert((*it)->email()); // If the user is logged into a public account that has been removed from the // user list, mark the account's data as pending removal after logout. if (IsLoggedInAsPublicAccount()) { const std::string active_user_id = GetActiveUser()->email(); if (users.find(active_user_id) == users.end()) { GetLocalState()->SetString(kPublicAccountPendingDataRemoval, active_user_id); users.insert(active_user_id); } } // Remove the data belonging to any other public accounts that are no longer // found on the user list. for (std::vector<std::string>::const_iterator it = old_public_accounts.begin(); it != old_public_accounts.end(); ++it) { if (users.find(*it) == users.end()) RemoveNonCryptohomeData(*it); } } bool ChromeUserManagerImpl::UpdateAndCleanUpPublicAccounts( const std::vector<policy::DeviceLocalAccount>& device_local_accounts) { // Try to remove any public account data marked as pending removal. CleanUpPublicAccountNonCryptohomeDataPendingRemoval(); // Get the current list of public accounts. std::vector<std::string> old_public_accounts; for (user_manager::UserList::const_iterator it = users_.begin(); it != users_.end(); ++it) { if ((*it)->GetType() == user_manager::USER_TYPE_PUBLIC_ACCOUNT) old_public_accounts.push_back((*it)->email()); } // Get the new list of public accounts from policy. std::vector<std::string> new_public_accounts; for (std::vector<policy::DeviceLocalAccount>::const_iterator it = device_local_accounts.begin(); it != device_local_accounts.end(); ++it) { // TODO(mnissler, nkostylev, bartfab): Process Kiosk Apps within the // standard login framework: http://crbug.com/234694 if (it->type == policy::DeviceLocalAccount::TYPE_PUBLIC_SESSION) new_public_accounts.push_back(it->user_id); } // If the list of public accounts has not changed, return. if (new_public_accounts.size() == old_public_accounts.size()) { bool changed = false; for (size_t i = 0; i < new_public_accounts.size(); ++i) { if (new_public_accounts[i] != old_public_accounts[i]) { changed = true; break; } } if (!changed) return false; } // Persist the new list of public accounts in a pref. ListPrefUpdate prefs_public_accounts_update(GetLocalState(), kPublicAccounts); prefs_public_accounts_update->Clear(); for (std::vector<std::string>::const_iterator it = new_public_accounts.begin(); it != new_public_accounts.end(); ++it) { prefs_public_accounts_update->AppendString(*it); } // Remove the old public accounts from the user list. for (user_manager::UserList::iterator it = users_.begin(); it != users_.end();) { if ((*it)->GetType() == user_manager::USER_TYPE_PUBLIC_ACCOUNT) { if (*it != GetLoggedInUser()) DeleteUser(*it); it = users_.erase(it); } else { ++it; } } // Add the new public accounts to the front of the user list. for (std::vector<std::string>::const_reverse_iterator it = new_public_accounts.rbegin(); it != new_public_accounts.rend(); ++it) { if (IsLoggedInAsPublicAccount() && *it == GetActiveUser()->email()) users_.insert(users_.begin(), GetLoggedInUser()); else users_.insert(users_.begin(), user_manager::User::CreatePublicAccountUser(*it)); UpdatePublicAccountDisplayName(*it); } for (user_manager::UserList::iterator ui = users_.begin(), ue = users_.begin() + new_public_accounts.size(); ui != ue; ++ui) { GetUserImageManager((*ui)->email())->LoadUserImage(); } // Remove data belonging to public accounts that are no longer found on the // user list. CleanUpPublicAccountNonCryptohomeData(old_public_accounts); return true; } void ChromeUserManagerImpl::UpdatePublicAccountDisplayName( const std::string& user_id) { std::string display_name; if (device_local_account_policy_service_) { policy::DeviceLocalAccountPolicyBroker* broker = device_local_account_policy_service_->GetBrokerForUser(user_id); if (broker) display_name = broker->GetDisplayName(); } // Set or clear the display name. SaveUserDisplayName(user_id, base::UTF8ToUTF16(display_name)); } UserFlow* ChromeUserManagerImpl::GetCurrentUserFlow() const { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); if (!IsUserLoggedIn()) return GetDefaultUserFlow(); return GetUserFlow(GetLoggedInUser()->email()); } UserFlow* ChromeUserManagerImpl::GetUserFlow(const std::string& user_id) const { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); FlowMap::const_iterator it = specific_flows_.find(user_id); if (it != specific_flows_.end()) return it->second; return GetDefaultUserFlow(); } void ChromeUserManagerImpl::SetUserFlow(const std::string& user_id, UserFlow* flow) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); ResetUserFlow(user_id); specific_flows_[user_id] = flow; } void ChromeUserManagerImpl::ResetUserFlow(const std::string& user_id) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); FlowMap::iterator it = specific_flows_.find(user_id); if (it != specific_flows_.end()) { delete it->second; specific_flows_.erase(it); } } bool ChromeUserManagerImpl::AreSupervisedUsersAllowed() const { bool supervised_users_allowed = false; cros_settings_->GetBoolean(kAccountsPrefSupervisedUsersEnabled, &supervised_users_allowed); return supervised_users_allowed; } UserFlow* ChromeUserManagerImpl::GetDefaultUserFlow() const { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); if (!default_flow_.get()) default_flow_.reset(new DefaultUserFlow()); return default_flow_.get(); } void ChromeUserManagerImpl::NotifyUserListChanged() { content::NotificationService::current()->Notify( chrome::NOTIFICATION_USER_LIST_CHANGED, content::Source<UserManager>(this), content::NotificationService::NoDetails()); } void ChromeUserManagerImpl::NotifyUserAddedToSession( const user_manager::User* added_user, bool user_switch_pending) { if (user_switch_pending) SetPendingUserSwitchID(added_user->email()); UpdateNumberOfUsers(); ChromeUserManager::NotifyUserAddedToSession(added_user, user_switch_pending); } void ChromeUserManagerImpl::OnUserNotAllowed(const std::string& user_email) { LOG(ERROR) << "Shutdown session because a user is not allowed to be in the " "current session"; chromeos::ShowMultiprofilesSessionAbortedDialog(user_email); } void ChromeUserManagerImpl::UpdateNumberOfUsers() { size_t users = GetLoggedInUsers().size(); if (users) { // Write the user number as UMA stat when a multi user session is possible. if ((users + GetUsersAdmittedForMultiProfile().size()) > 1) ash::MultiProfileUMA::RecordUserCount(users); } base::debug::SetCrashKeyValue( crash_keys::kNumberOfUsers, base::StringPrintf("%" PRIuS, GetLoggedInUsers().size())); } } // namespace chromeos
7kbird/chrome
chrome/browser/chromeos/login/users/chrome_user_manager_impl.cc
C++
bsd-3-clause
39,004
/* * pointcloud_publisher_node.cpp * * Created on: Aug 19, 2021 * Author: Edo Jelavic * Institute: ETH Zurich, Robotic Systems Lab */ #include <pcl_conversions/pcl_conversions.h> #include <ros/ros.h> #include <sensor_msgs/PointCloud2.h> #include "grid_map_pcl/helpers.hpp" namespace gm = ::grid_map::grid_map_pcl; using Point = ::pcl::PointXYZ; using PointCloud = ::pcl::PointCloud<Point>; void publishCloud(const std::string& filename, const ros::Publisher& pub, const std::string& frame) { PointCloud::Ptr cloud(new pcl::PointCloud<pcl::PointXYZ>); cloud = gm::loadPointcloudFromPcd(filename); cloud->header.frame_id = frame; sensor_msgs::PointCloud2 msg; pcl::toROSMsg(*cloud, msg); ROS_INFO_STREAM("Publishing loaded cloud, number of points: " << cloud->points.size()); msg.header.stamp = ros::Time::now(); pub.publish(msg); } int main(int argc, char** argv) { ros::init(argc, argv, "point_cloud_pub_node"); ros::NodeHandle nh("~"); const std::string pathToCloud = gm::getPcdFilePath(nh); const std::string cloudFrame = nh.param<std::string>("cloud_frame", ""); // publish cloud ros::Publisher cloudPub = nh.advertise<sensor_msgs::PointCloud2>("raw_pointcloud", 1, true); publishCloud(pathToCloud, cloudPub, cloudFrame); // run ros::spin(); return EXIT_SUCCESS; }
ethz-asl/grid_map
grid_map_pcl/src/pointcloud_publisher_node.cpp
C++
bsd-3-clause
1,327
using System; using Inbox2.Framework; namespace Inbox2.Core.Configuration { public static class CloudApi { public static string ApiBaseUrl { get { return String.Format("http://api{0}.inbox2.com/", String.IsNullOrEmpty(CommandLine.Current.Environment) ? String.Empty : "." + CommandLine.Current.Environment); } } public static string ApplicationKey { get { return "ZABhADQAMgA4AGQAYQAyAA=="; } } public static string AccessToken { get { return SettingsManager.ClientSettings.AppConfiguration.AuthToken; } } } }
Klaudit/inbox2_desktop
Code/Client/Inbox2/Core/Configuration/CloudApi.cs
C#
bsd-3-clause
616
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "content/browser/payments/payment_app_info_fetcher.h" #include <limits> #include <utility> #include "base/base64.h" #include "base/bind.h" #include "base/callback_helpers.h" #include "components/payments/content/icon/icon_size.h" #include "content/browser/renderer_host/render_frame_host_impl.h" #include "content/browser/service_worker/service_worker_context_wrapper.h" #include "content/browser/web_contents/web_contents_impl.h" #include "content/public/browser/browser_task_traits.h" #include "content/public/browser/browser_thread.h" #include "content/public/browser/global_routing_id.h" #include "content/public/browser/manifest_icon_downloader.h" #include "content/public/browser/page.h" #include "third_party/abseil-cpp/absl/types/optional.h" #include "third_party/blink/public/common/manifest/manifest_icon_selector.h" #include "third_party/blink/public/common/manifest/manifest_util.h" #include "third_party/blink/public/common/storage_key/storage_key.h" #include "third_party/blink/public/mojom/devtools/console_message.mojom.h" #include "third_party/blink/public/mojom/manifest/manifest.mojom.h" #include "ui/gfx/codec/png_codec.h" #include "url/origin.h" namespace content { PaymentAppInfoFetcher::PaymentAppInfo::PaymentAppInfo() {} PaymentAppInfoFetcher::PaymentAppInfo::~PaymentAppInfo() {} void PaymentAppInfoFetcher::Start( const GURL& context_url, scoped_refptr<ServiceWorkerContextWrapper> service_worker_context, PaymentAppInfoFetchCallback callback) { DCHECK_CURRENTLY_ON(BrowserThread::UI); std::unique_ptr<std::vector<GlobalRenderFrameHostId>> frame_routing_ids = service_worker_context->GetWindowClientFrameRoutingIds( blink::StorageKey(url::Origin::Create(context_url))); SelfDeleteFetcher* fetcher = new SelfDeleteFetcher(std::move(callback)); fetcher->Start(context_url, std::move(frame_routing_ids)); } PaymentAppInfoFetcher::WebContentsHelper::WebContentsHelper( WebContents* web_contents) : WebContentsObserver(web_contents) { DCHECK_CURRENTLY_ON(BrowserThread::UI); } PaymentAppInfoFetcher::WebContentsHelper::~WebContentsHelper() { DCHECK_CURRENTLY_ON(BrowserThread::UI); } PaymentAppInfoFetcher::SelfDeleteFetcher::SelfDeleteFetcher( PaymentAppInfoFetchCallback callback) : fetched_payment_app_info_(std::make_unique<PaymentAppInfo>()), callback_(std::move(callback)) { DCHECK_CURRENTLY_ON(BrowserThread::UI); } PaymentAppInfoFetcher::SelfDeleteFetcher::~SelfDeleteFetcher() { DCHECK_CURRENTLY_ON(BrowserThread::UI); } void PaymentAppInfoFetcher::SelfDeleteFetcher::Start( const GURL& context_url, const std::unique_ptr<std::vector<GlobalRenderFrameHostId>>& frame_routing_ids) { DCHECK_CURRENTLY_ON(BrowserThread::UI); if (frame_routing_ids->empty()) { // Cannot print this error to the developer console, because the appropriate // developer console has not been found. LOG(ERROR) << "Unable to find the top level web content for retrieving the web " "app manifest of a payment handler for \"" << context_url << "\"."; RunCallbackAndDestroy(); return; } for (const auto& frame : *frame_routing_ids) { // Find out the render frame host registering the payment app. Although a // service worker can manage instruments, the first instrument must be set // on a page that has a link to a web app manifest, so it can be fetched // here. RenderFrameHostImpl* render_frame_host = RenderFrameHostImpl::FromID(frame.child_id, frame.frame_routing_id); if (!render_frame_host || context_url.spec().compare( render_frame_host->GetLastCommittedURL().spec()) != 0) { continue; } // Get the main frame since web app manifest is only available in the main // frame's document by definition. The main frame's document must come from // the same origin. RenderFrameHostImpl* top_level_render_frame_host = render_frame_host; while (top_level_render_frame_host->GetParent() != nullptr) { top_level_render_frame_host = top_level_render_frame_host->GetParent(); } WebContentsImpl* top_level_web_content = static_cast<WebContentsImpl*>( WebContents::FromRenderFrameHost(top_level_render_frame_host)); if (!top_level_web_content) { top_level_render_frame_host->AddMessageToConsole( blink::mojom::ConsoleMessageLevel::kError, "Unable to find the web page for \"" + context_url.spec() + "\" to fetch payment handler manifest (for name and icon)."); continue; } if (top_level_web_content->IsHidden()) { top_level_render_frame_host->AddMessageToConsole( blink::mojom::ConsoleMessageLevel::kError, "Unable to fetch payment handler manifest (for name and icon) for " "\"" + context_url.spec() + "\" from a hidden top level web page \"" + top_level_web_content->GetLastCommittedURL().spec() + "\"."); continue; } if (!url::IsSameOriginWith(context_url, top_level_web_content->GetLastCommittedURL())) { top_level_render_frame_host->AddMessageToConsole( blink::mojom::ConsoleMessageLevel::kError, "Unable to fetch payment handler manifest (for name and icon) for " "\"" + context_url.spec() + "\" from a cross-origin top level web page \"" + top_level_web_content->GetLastCommittedURL().spec() + "\"."); continue; } web_contents_helper_ = std::make_unique<WebContentsHelper>(top_level_web_content); top_level_render_frame_host->GetPage().GetManifest( base::BindOnce(&PaymentAppInfoFetcher::SelfDeleteFetcher:: FetchPaymentAppManifestCallback, weak_ptr_factory_.GetWeakPtr())); return; } // Cannot print this error to the developer console, because the appropriate // developer console has not been found. LOG(ERROR) << "Unable to find the top level web content for retrieving the web " "app manifest of a payment handler for \"" << context_url << "\"."; RunCallbackAndDestroy(); } void PaymentAppInfoFetcher::SelfDeleteFetcher::RunCallbackAndDestroy() { DCHECK_CURRENTLY_ON(BrowserThread::UI); base::SequencedTaskRunnerHandle::Get()->PostTask( FROM_HERE, base::BindOnce(std::move(callback_), std::move(fetched_payment_app_info_))); delete this; } void PaymentAppInfoFetcher::SelfDeleteFetcher::FetchPaymentAppManifestCallback( const GURL& url, blink::mojom::ManifestPtr manifest) { DCHECK_CURRENTLY_ON(BrowserThread::UI); manifest_url_ = url; if (manifest_url_.is_empty()) { WarnIfPossible( "The page that installed the payment handler does not contain a web " "app manifest link: <link rel=\"manifest\" " "href=\"some-file-name-here\">. This manifest defines the payment " "handler's name and icon. User may not recognize this payment handler " "in UI, because it will be labeled only by its origin."); RunCallbackAndDestroy(); return; } if (blink::IsEmptyManifest(manifest)) { WarnIfPossible( "Unable to download a valid payment handler web app manifest from \"" + manifest_url_.spec() + "\". This manifest cannot be empty and must in JSON format. The " "manifest defines the payment handler's name and icon. User may not " "recognize this payment handler in UI, because it will be labeled only " "by its origin."); RunCallbackAndDestroy(); return; } fetched_payment_app_info_->prefer_related_applications = manifest->prefer_related_applications; for (const auto& related_application : manifest->related_applications) { fetched_payment_app_info_->related_applications.emplace_back( StoredRelatedApplication()); if (related_application.platform) { base::UTF16ToUTF8( related_application.platform->c_str(), related_application.platform->length(), &(fetched_payment_app_info_->related_applications.back().platform)); } if (related_application.id) { base::UTF16ToUTF8( related_application.id->c_str(), related_application.id->length(), &(fetched_payment_app_info_->related_applications.back().id)); } } if (!manifest->name) { WarnIfPossible("The payment handler's web app manifest \"" + manifest_url_.spec() + "\" does not contain a \"name\" field. User may not " "recognize this payment handler in UI, because it will be " "labeled only by its origin."); } else if (manifest->name->empty()) { WarnIfPossible( "The \"name\" field in the payment handler's web app manifest \"" + manifest_url_.spec() + "\" is empty. User may not recognize this payment handler in UI, " "because it will be labeled only by its origin."); } else { base::UTF16ToUTF8(manifest->name->c_str(), manifest->name->length(), &(fetched_payment_app_info_->name)); } if (manifest->icons.empty()) { WarnIfPossible( "Unable to download the payment handler's icon, because the web app " "manifest \"" + manifest_url_.spec() + "\" does not contain an \"icons\" field with a valid URL in \"src\" " "sub-field. User may not recognize this payment handler in UI."); RunCallbackAndDestroy(); return; } WebContents* web_contents = web_contents_helper_->web_contents(); if (!web_contents) { LOG(WARNING) << "Unable to download the payment handler's icon because no " "renderer was found, possibly because the page was closed " "or navigated away during installation. User may not " "recognize this payment handler in UI, because it will be " "labeled only by its name and origin."; RunCallbackAndDestroy(); return; } gfx::NativeView native_view = web_contents->GetNativeView(); icon_url_ = blink::ManifestIconSelector::FindBestMatchingIcon( manifest->icons, payments::IconSizeCalculator::IdealIconHeight(native_view), payments::IconSizeCalculator::MinimumIconHeight(), ManifestIconDownloader::kMaxWidthToHeightRatio, blink::mojom::ManifestImageResource_Purpose::ANY); if (!icon_url_.is_valid()) { WarnIfPossible( "No suitable payment handler icon found in the \"icons\" field defined " "in the web app manifest \"" + manifest_url_.spec() + "\". This is most likely due to unsupported MIME types in the " "\"icons\" field. User may not recognize this payment handler in UI."); RunCallbackAndDestroy(); return; } bool can_download = ManifestIconDownloader::Download( web_contents, icon_url_, payments::IconSizeCalculator::IdealIconHeight(native_view), payments::IconSizeCalculator::MinimumIconHeight(), /* maximum_icon_size_in_px= */ std::numeric_limits<int>::max(), base::BindOnce(&PaymentAppInfoFetcher::SelfDeleteFetcher::OnIconFetched, weak_ptr_factory_.GetWeakPtr()), false /* square_only */); // |can_download| is false only if web contents are null or the icon URL is // not valid. Both of these conditions are manually checked above, so // |can_download| should never be false. The manual checks above are necessary // to provide more detailed error messages. DCHECK(can_download); } void PaymentAppInfoFetcher::SelfDeleteFetcher::OnIconFetched( const SkBitmap& icon) { DCHECK_CURRENTLY_ON(BrowserThread::UI); if (icon.drawsNothing()) { WarnIfPossible("Unable to download a valid payment handler icon from \"" + icon_url_.spec() + "\", which is defined in the web app manifest \"" + manifest_url_.spec() + "\". User may not recognize this payment handler in UI."); RunCallbackAndDestroy(); return; } std::vector<unsigned char> bitmap_data; bool success = gfx::PNGCodec::EncodeBGRASkBitmap(icon, false, &bitmap_data); DCHECK(success); base::Base64Encode( base::StringPiece(reinterpret_cast<const char*>(&bitmap_data[0]), bitmap_data.size()), &(fetched_payment_app_info_->icon)); RunCallbackAndDestroy(); } void PaymentAppInfoFetcher::SelfDeleteFetcher::WarnIfPossible( const std::string& message) { DCHECK_CURRENTLY_ON(BrowserThread::UI); DCHECK(web_contents_helper_); if (web_contents_helper_->web_contents()) { web_contents_helper_->web_contents()->GetMainFrame()->AddMessageToConsole( blink::mojom::ConsoleMessageLevel::kWarning, message); } else { LOG(WARNING) << message; } } } // namespace content
scheib/chromium
content/browser/payments/payment_app_info_fetcher.cc
C++
bsd-3-clause
13,104
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. /** * @fileoverview A command is an abstraction of an action a user can do in the * UI. * * When the focus changes in the document for each command a canExecute event * is dispatched on the active element. By listening to this event you can * enable and disable the command by setting the event.canExecute property. * * When a command is executed a command event is dispatched on the active * element. Note that you should stop the propagation after you have handled the * command if there might be other command listeners higher up in the DOM tree. */ cr.define('cr.ui', function() { /** * This is used to identify keyboard shortcuts. * @param {string} shortcut The text used to describe the keys for this * keyboard shortcut. * @constructor */ function KeyboardShortcut(shortcut) { var mods = {}; var ident = ''; shortcut.split('-').forEach(function(part) { var partLc = part.toLowerCase(); switch (partLc) { case 'alt': case 'ctrl': case 'meta': case 'shift': mods[partLc + 'Key'] = true; break; default: if (ident) throw Error('Invalid shortcut'); ident = part; } }); this.ident_ = ident; this.mods_ = mods; } KeyboardShortcut.prototype = { /** * Whether the keyboard shortcut object matches a keyboard event. * @param {!Event} e The keyboard event object. * @return {boolean} Whether we found a match or not. */ matchesEvent: function(e) { if (e.keyIdentifier == this.ident_) { // All keyboard modifiers needs to match. var mods = this.mods_; return ['altKey', 'ctrlKey', 'metaKey', 'shiftKey'].every(function(k) { return e[k] == !!mods[k]; }); } return false; } }; /** * Creates a new command element. * @constructor * @extends {HTMLElement} */ var Command = cr.ui.define('command'); Command.prototype = { __proto__: HTMLElement.prototype, /** * Initializes the command. */ decorate: function() { CommandManager.init(this.ownerDocument); if (this.hasAttribute('shortcut')) this.shortcut = this.getAttribute('shortcut'); }, /** * Executes the command by dispatching a command event on the given element. * If |element| isn't given, the active element is used instead. * If the command is {@code disabled} this does nothing. * @param {HTMLElement=} opt_element Optional element to dispatch event on. */ execute: function(opt_element) { if (this.disabled) return; var doc = this.ownerDocument; if (doc.activeElement) { var e = new cr.Event('command', true, false); e.command = this; (opt_element || doc.activeElement).dispatchEvent(e); } }, /** * Call this when there have been changes that might change whether the * command can be executed or not. * @param {Node=} opt_node Node for which to actuate command state. */ canExecuteChange: function(opt_node) { dispatchCanExecuteEvent(this, opt_node || this.ownerDocument.activeElement); }, /** * The keyboard shortcut that triggers the command. This is a string * consisting of a keyIdentifier (as reported by WebKit in keydown) as * well as optional key modifiers joinded with a '-'. * * Multiple keyboard shortcuts can be provided by separating them by * whitespace. * * For example: * "F1" * "U+0008-Meta" for Apple command backspace. * "U+0041-Ctrl" for Control A * "U+007F U+0008-Meta" for Delete and Command Backspace * * @type {string} */ shortcut_: '', get shortcut() { return this.shortcut_; }, set shortcut(shortcut) { var oldShortcut = this.shortcut_; if (shortcut !== oldShortcut) { this.keyboardShortcuts_ = shortcut.split(/\s+/).map(function(shortcut) { return new KeyboardShortcut(shortcut); }); // Set this after the keyboardShortcuts_ since that might throw. this.shortcut_ = shortcut; cr.dispatchPropertyChange(this, 'shortcut', this.shortcut_, oldShortcut); } }, /** * Whether the event object matches the shortcut for this command. * @param {!Event} e The key event object. * @return {boolean} Whether it matched or not. */ matchesEvent: function(e) { if (!this.keyboardShortcuts_) return false; return this.keyboardShortcuts_.some(function(keyboardShortcut) { return keyboardShortcut.matchesEvent(e); }); } }; /** * The label of the command. * @type {string} */ cr.defineProperty(Command, 'label', cr.PropertyKind.ATTR); /** * Whether the command is disabled or not. * @type {boolean} */ cr.defineProperty(Command, 'disabled', cr.PropertyKind.BOOL_ATTR); /** * Whether the command is hidden or not. * @type {boolean} */ cr.defineProperty(Command, 'hidden', cr.PropertyKind.BOOL_ATTR); /** * Whether the command is checked or not. * @type {boolean} */ cr.defineProperty(Command, 'checked', cr.PropertyKind.BOOL_ATTR); /** * Dispatches a canExecute event on the target. * @param {cr.ui.Command} command The command that we are testing for. * @param {Element} target The target element to dispatch the event on. */ function dispatchCanExecuteEvent(command, target) { var e = new CanExecuteEvent(command, true); target.dispatchEvent(e); command.disabled = !e.canExecute; } /** * The command managers for different documents. */ var commandManagers = {}; /** * Keeps track of the focused element and updates the commands when the focus * changes. * @param {!Document} doc The document that we are managing the commands for. * @constructor */ function CommandManager(doc) { doc.addEventListener('focus', this.handleFocus_.bind(this), true); // Make sure we add the listener to the bubbling phase so that elements can // prevent the command. doc.addEventListener('keydown', this.handleKeyDown_.bind(this), false); } /** * Initializes a command manager for the document as needed. * @param {!Document} doc The document to manage the commands for. */ CommandManager.init = function(doc) { var uid = cr.getUid(doc); if (!(uid in commandManagers)) { commandManagers[uid] = new CommandManager(doc); } }; CommandManager.prototype = { /** * Handles focus changes on the document. * @param {Event} e The focus event object. * @private */ handleFocus_: function(e) { var target = e.target; // Ignore focus on a menu button or command item if (target.menu || target.command) return; var commands = Array.prototype.slice.call( target.ownerDocument.querySelectorAll('command')); commands.forEach(function(command) { dispatchCanExecuteEvent(command, target); }); }, /** * Handles the keydown event and routes it to the right command. * @param {!Event} e The keydown event. */ handleKeyDown_: function(e) { var target = e.target; var commands = Array.prototype.slice.call( target.ownerDocument.querySelectorAll('command')); for (var i = 0, command; command = commands[i]; i++) { if (!command.disabled && command.matchesEvent(e)) { e.preventDefault(); // We do not want any other element to handle this. e.stopPropagation(); command.execute(); return; } } } }; /** * The event type used for canExecute events. * @param {!cr.ui.Command} command The command that we are evaluating. * @extends {Event} * @constructor * @class */ function CanExecuteEvent(command) { var e = command.ownerDocument.createEvent('Event'); e.initEvent('canExecute', true, false); e.__proto__ = CanExecuteEvent.prototype; e.command = command; return e; } CanExecuteEvent.prototype = { __proto__: Event.prototype, /** * The current command * @type {cr.ui.Command} */ command: null, /** * Whether the target can execute the command. Setting this also stops the * propagation. * @type {boolean} */ canExecute_: false, get canExecute() { return this.canExecute_; }, set canExecute(canExecute) { this.canExecute_ = !!canExecute; this.stopPropagation(); } }; // Export return { Command: Command, CanExecuteEvent: CanExecuteEvent }; });
timopulkkinen/BubbleFish
ui/webui/resources/js/cr/ui/command.js
JavaScript
bsd-3-clause
8,976
/** ****************************************************************************** * api-scanner - Scan for API imports from a packaged 360 game * ****************************************************************************** * Copyright 2015 x1nixmzeng. All rights reserved. * * Released under the BSD license - see LICENSE in the root for more details. * ****************************************************************************** */ #include "api_scanner_loader.h" namespace xe { namespace tools { DEFINE_string(target, "", "List of file to extract imports from"); int api_scanner_main(std::vector<std::wstring>& args) { // XXX we need gflags to split multiple flags into arrays for us if (args.size() == 2 || !FLAGS_target.empty()) { apiscanner_loader loader_; std::wstring target(cvars::target.empty() ? args[1] : xe::to_wstring(cvars::target)); std::wstring target_abs = xe::to_absolute_path(target); // XXX For each target? if (loader_.LoadTitleImports(target)) { for (const auto title : loader_.GetAllTitles()) { printf("%08x\n", title.title_id); for (const auto import : title.imports) { printf("\t%s\n", import.c_str()); } } } } return 0; } } // namespace tools } // namespace xe DEFINE_ENTRY_POINT(L"api-scanner", L"api-scanner --target=<target file>", xe::tools::api_scanner_main);
sephiroth99/xenia
src/xenia/tools/api-scanner/api_scanner_main.cc
C++
bsd-3-clause
1,501
using System.Linq.Expressions; using NHibernate.Metadata; namespace NHibernate.Linq.Expressions { public class EntityExpression : NHibernateExpression { private readonly string _alias; private readonly string _associationPath; private readonly IClassMetadata _metaData; private readonly Expression _expression; public string Alias { get { return _alias; } } public string AssociationPath { get { return _associationPath; } } public IClassMetadata MetaData { get { return _metaData; } } public Expression Expression { get { return _expression; } } public EntityExpression(string associationPath, string alias, System.Type type, IClassMetadata metaData, Expression expression) : base(IsRoot(expression) ? NHibernateExpressionType.RootEntity : NHibernateExpressionType.Entity, type) { _associationPath = associationPath; _alias = alias; _metaData = metaData; _expression = expression; } private static bool IsRoot(Expression expr) { if (expr == null) return true; if (!(expr is EntityExpression)) return true; return false; } public override string ToString() { return Alias; } public virtual string GetAliasedIdentifierPropertyName() { if ((NHibernateExpressionType)this.NodeType == NHibernateExpressionType.RootEntity) { return this.MetaData.IdentifierPropertyName; } return string.Format("{0}.{1}", this.Alias, this.MetaData.IdentifierPropertyName); } } }
OrchardCMS/Orchard
src/Libraries/NHibernate/NHibernate.Linq/Expressions/EntityExpression.cs
C#
bsd-3-clause
1,479
// Copyright 2011 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package x509 import ( "strings" "time" "unicode/utf8" ) type InvalidReason int const ( // NotAuthorizedToSign results when a certificate is signed by another // which isn't marked as a CA certificate. NotAuthorizedToSign InvalidReason = iota // Expired results when a certificate has expired, based on the time // given in the VerifyOptions. Expired // CANotAuthorizedForThisName results when an intermediate or root // certificate has a name constraint which doesn't include the name // being checked. CANotAuthorizedForThisName ) // CertificateInvalidError results when an odd error occurs. Users of this // library probably want to handle all these errors uniformly. type CertificateInvalidError struct { Cert *Certificate Reason InvalidReason } func (e CertificateInvalidError) Error() string { switch e.Reason { case NotAuthorizedToSign: return "x509: certificate is not authorized to sign other other certificates" case Expired: return "x509: certificate has expired or is not yet valid" case CANotAuthorizedForThisName: return "x509: a root or intermediate certificate is not authorized to sign in this domain" } return "x509: unknown error" } // HostnameError results when the set of authorized names doesn't match the // requested name. type HostnameError struct { Certificate *Certificate Host string } func (h HostnameError) Error() string { var valid string c := h.Certificate if len(c.DNSNames) > 0 { valid = strings.Join(c.DNSNames, ", ") } else { valid = c.Subject.CommonName } return "certificate is valid for " + valid + ", not " + h.Host } // UnknownAuthorityError results when the certificate issuer is unknown type UnknownAuthorityError struct { cert *Certificate } func (e UnknownAuthorityError) Error() string { return "x509: certificate signed by unknown authority" } // VerifyOptions contains parameters for Certificate.Verify. It's a structure // because other PKIX verification APIs have ended up needing many options. type VerifyOptions struct { DNSName string Intermediates *CertPool Roots *CertPool CurrentTime time.Time // if zero, the current time is used } const ( leafCertificate = iota intermediateCertificate rootCertificate ) // isValid performs validity checks on the c. func (c *Certificate) isValid(certType int, opts *VerifyOptions) error { now := opts.CurrentTime if now.IsZero() { now = time.Now() } if now.Before(c.NotBefore) || now.After(c.NotAfter) { return CertificateInvalidError{c, Expired} } if len(c.PermittedDNSDomains) > 0 { for _, domain := range c.PermittedDNSDomains { if opts.DNSName == domain || (strings.HasSuffix(opts.DNSName, domain) && len(opts.DNSName) >= 1+len(domain) && opts.DNSName[len(opts.DNSName)-len(domain)-1] == '.') { continue } return CertificateInvalidError{c, CANotAuthorizedForThisName} } } // KeyUsage status flags are ignored. From Engineering Security, Peter // Gutmann: A European government CA marked its signing certificates as // being valid for encryption only, but no-one noticed. Another // European CA marked its signature keys as not being valid for // signatures. A different CA marked its own trusted root certificate // as being invalid for certificate signing. Another national CA // distributed a certificate to be used to encrypt data for the // country’s tax authority that was marked as only being usable for // digital signatures but not for encryption. Yet another CA reversed // the order of the bit flags in the keyUsage due to confusion over // encoding endianness, essentially setting a random keyUsage in // certificates that it issued. Another CA created a self-invalidating // certificate by adding a certificate policy statement stipulating // that the certificate had to be used strictly as specified in the // keyUsage, and a keyUsage containing a flag indicating that the RSA // encryption key could only be used for Diffie-Hellman key agreement. if certType == intermediateCertificate && (!c.BasicConstraintsValid || !c.IsCA) { return CertificateInvalidError{c, NotAuthorizedToSign} } return nil } // Verify attempts to verify c by building one or more chains from c to a // certificate in opts.roots, using certificates in opts.Intermediates if // needed. If successful, it returns one or more chains where the first // element of the chain is c and the last element is from opts.Roots. // // WARNING: this doesn't do any revocation checking. func (c *Certificate) Verify(opts VerifyOptions) (chains [][]*Certificate, err error) { err = c.isValid(leafCertificate, &opts) if err != nil { return } if len(opts.DNSName) > 0 { err = c.VerifyHostname(opts.DNSName) if err != nil { return } } return c.buildChains(make(map[int][][]*Certificate), []*Certificate{c}, &opts) } func appendToFreshChain(chain []*Certificate, cert *Certificate) []*Certificate { n := make([]*Certificate, len(chain)+1) copy(n, chain) n[len(chain)] = cert return n } func (c *Certificate) buildChains(cache map[int][][]*Certificate, currentChain []*Certificate, opts *VerifyOptions) (chains [][]*Certificate, err error) { for _, rootNum := range opts.Roots.findVerifiedParents(c) { root := opts.Roots.certs[rootNum] err = root.isValid(rootCertificate, opts) if err != nil { continue } chains = append(chains, appendToFreshChain(currentChain, root)) } nextIntermediate: for _, intermediateNum := range opts.Intermediates.findVerifiedParents(c) { intermediate := opts.Intermediates.certs[intermediateNum] for _, cert := range currentChain { if cert == intermediate { continue nextIntermediate } } err = intermediate.isValid(intermediateCertificate, opts) if err != nil { continue } var childChains [][]*Certificate childChains, ok := cache[intermediateNum] if !ok { childChains, err = intermediate.buildChains(cache, appendToFreshChain(currentChain, intermediate), opts) cache[intermediateNum] = childChains } chains = append(chains, childChains...) } if len(chains) > 0 { err = nil } if len(chains) == 0 && err == nil { err = UnknownAuthorityError{c} } return } func matchHostnames(pattern, host string) bool { if len(pattern) == 0 || len(host) == 0 { return false } patternParts := strings.Split(pattern, ".") hostParts := strings.Split(host, ".") if len(patternParts) != len(hostParts) { return false } for i, patternPart := range patternParts { if patternPart == "*" { continue } if patternPart != hostParts[i] { return false } } return true } // toLowerCaseASCII returns a lower-case version of in. See RFC 6125 6.4.1. We use // an explicitly ASCII function to avoid any sharp corners resulting from // performing Unicode operations on DNS labels. func toLowerCaseASCII(in string) string { // If the string is already lower-case then there's nothing to do. isAlreadyLowerCase := true for _, c := range in { if c == utf8.RuneError { // If we get a UTF-8 error then there might be // upper-case ASCII bytes in the invalid sequence. isAlreadyLowerCase = false break } if 'A' <= c && c <= 'Z' { isAlreadyLowerCase = false break } } if isAlreadyLowerCase { return in } out := []byte(in) for i, c := range out { if 'A' <= c && c <= 'Z' { out[i] += 'a' - 'A' } } return string(out) } // VerifyHostname returns nil if c is a valid certificate for the named host. // Otherwise it returns an error describing the mismatch. func (c *Certificate) VerifyHostname(h string) error { lowered := toLowerCaseASCII(h) if len(c.DNSNames) > 0 { for _, match := range c.DNSNames { if matchHostnames(toLowerCaseASCII(match), lowered) { return nil } } // If Subject Alt Name is given, we ignore the common name. } else if matchHostnames(toLowerCaseASCII(c.Subject.CommonName), lowered) { return nil } return HostnameError{c, h} }
tav/go
src/pkg/crypto/x509/verify.go
GO
bsd-3-clause
8,108
/* * This file is part of Pebble. * * Copyright (c) 2014 by Mitchell Bösecke * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ package com.mitchellbosecke.pebble.extension; import com.mitchellbosecke.pebble.attributes.AttributeResolver; import com.mitchellbosecke.pebble.operator.BinaryOperator; import com.mitchellbosecke.pebble.operator.UnaryOperator; import com.mitchellbosecke.pebble.tokenParser.TokenParser; import java.util.List; import java.util.Map; public abstract class AbstractExtension implements Extension { @Override public List<TokenParser> getTokenParsers() { return null; } @Override public List<BinaryOperator> getBinaryOperators() { return null; } @Override public List<UnaryOperator> getUnaryOperators() { return null; } @Override public Map<String, Filter> getFilters() { return null; } @Override public Map<String, Test> getTests() { return null; } @Override public Map<String, Function> getFunctions() { return null; } @Override public Map<String, Object> getGlobalVariables() { return null; } @Override public List<NodeVisitorFactory> getNodeVisitors() { return null; } @Override public List<AttributeResolver> getAttributeResolver() { return null; } }
mbosecke/pebble
pebble/src/main/java/com/mitchellbosecke/pebble/extension/AbstractExtension.java
Java
bsd-3-clause
1,366
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.routing.allocation; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.cluster.ESAllocationTestCase; import java.io.BufferedReader; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; /** * A base testcase that allows to run tests based on the output of the CAT API * The input is a line based cat/shards output like: * kibana-int 0 p STARTED 2 24.8kb 10.202.245.2 r5-9-35 * * the test builds up a clusterstate from the cat input and optionally runs a full balance on it. * This can be used to debug cluster allocation decisions. */ public abstract class CatAllocationTestCase extends ESAllocationTestCase { protected abstract Path getCatPath() throws IOException; public void testRun() throws IOException { Set<String> nodes = new HashSet<>(); Map<String, Idx> indices = new HashMap<>(); try (BufferedReader reader = Files.newBufferedReader(getCatPath(), StandardCharsets.UTF_8)) { String line = null; // regexp FTW Pattern pattern = Pattern.compile("^(.+)\\s+(\\d)\\s+([rp])\\s+(STARTED|RELOCATING|INITIALIZING|UNASSIGNED)" + "\\s+\\d+\\s+[0-9.a-z]+\\s+(\\d+\\.\\d+\\.\\d+\\.\\d+).*$"); while((line = reader.readLine()) != null) { final Matcher matcher; if ((matcher = pattern.matcher(line)).matches()) { final String index = matcher.group(1); Idx idx = indices.get(index); if (idx == null) { idx = new Idx(index); indices.put(index, idx); } final int shard = Integer.parseInt(matcher.group(2)); final boolean primary = matcher.group(3).equals("p"); ShardRoutingState state = ShardRoutingState.valueOf(matcher.group(4)); String ip = matcher.group(5); nodes.add(ip); ShardRouting routing = TestShardRouting.newShardRouting(index, shard, ip, null, primary, state); idx.add(routing); logger.debug("Add routing {}", routing); } else { fail("can't read line: " + line); } } } logger.info("Building initial routing table"); MetaData.Builder builder = MetaData.builder(); RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); for(Idx idx : indices.values()) { IndexMetaData.Builder idxMetaBuilder = IndexMetaData.builder(idx.name).settings(settings(Version.CURRENT)) .numberOfShards(idx.numShards()).numberOfReplicas(idx.numReplicas()); for (ShardRouting shardRouting : idx.routing) { if (shardRouting.active()) { Set<String> allocationIds = idxMetaBuilder.getInSyncAllocationIds(shardRouting.id()); if (allocationIds == null) { allocationIds = new HashSet<>(); } else { allocationIds = new HashSet<>(allocationIds); } allocationIds.add(shardRouting.allocationId().getId()); idxMetaBuilder.putInSyncAllocationIds(shardRouting.id(), allocationIds); } } IndexMetaData idxMeta = idxMetaBuilder.build(); builder.put(idxMeta, false); IndexRoutingTable.Builder tableBuilder = new IndexRoutingTable.Builder(idxMeta.getIndex()).initializeAsRecovery(idxMeta); Map<Integer, IndexShardRoutingTable> shardIdToRouting = new HashMap<>(); for (ShardRouting r : idx.routing) { IndexShardRoutingTable refData = new IndexShardRoutingTable.Builder(r.shardId()).addShard(r).build(); if (shardIdToRouting.containsKey(r.getId())) { refData = new IndexShardRoutingTable.Builder(shardIdToRouting.get(r.getId())).addShard(r).build(); } shardIdToRouting.put(r.getId(), refData); } for (IndexShardRoutingTable t: shardIdToRouting.values()) { tableBuilder.addIndexShard(t); } IndexRoutingTable table = tableBuilder.build(); routingTableBuilder.add(table); } MetaData metaData = builder.build(); RoutingTable routingTable = routingTableBuilder.build(); DiscoveryNodes.Builder builderDiscoNodes = DiscoveryNodes.builder(); for (String node : nodes) { builderDiscoNodes.add(newNode(node)); } ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING .getDefault(Settings.EMPTY)).metaData(metaData).routingTable(routingTable).nodes(builderDiscoNodes.build()).build(); if (balanceFirst()) { clusterState = rebalance(clusterState); } clusterState = allocateNew(clusterState); } protected abstract ClusterState allocateNew(ClusterState clusterState); protected boolean balanceFirst() { return true; } private ClusterState rebalance(ClusterState clusterState) { RoutingTable routingTable;AllocationService strategy = createAllocationService(Settings.builder() .build()); RoutingAllocation.Result routingResult = strategy.reroute(clusterState, "reroute"); clusterState = ClusterState.builder(clusterState).routingResult(routingResult).build(); int numRelocations = 0; while (true) { List<ShardRouting> initializing = clusterState.routingTable().shardsWithState(INITIALIZING); if (initializing.isEmpty()) { break; } logger.debug("Initializing shards: {}", initializing); numRelocations += initializing.size(); routingResult = strategy.applyStartedShards(clusterState, initializing); clusterState = ClusterState.builder(clusterState).routingResult(routingResult).build(); } logger.debug("--> num relocations to get balance: {}", numRelocations); return clusterState; } public class Idx { final String name; final List<ShardRouting> routing = new ArrayList<>(); public Idx(String name) { this.name = name; } public void add(ShardRouting r) { routing.add(r); } public int numReplicas() { int count = 0; for (ShardRouting msr : routing) { if (msr.primary() == false && msr.id()==0) { count++; } } return count; } public int numShards() { int max = 0; for (ShardRouting msr : routing) { if (msr.primary()) { max = Math.max(msr.getId()+1, max); } } return max; } } }
strahanjen/strahanjen.github.io
elasticsearch-master/core/src/test/java/org/elasticsearch/cluster/routing/allocation/CatAllocationTestCase.java
Java
bsd-3-clause
8,869
/** * BSD-style license; for more info see http://pmd.sourceforge.net/license.html */ package net.sourceforge.pmd.rules.design; import net.sourceforge.pmd.AbstractRule; import net.sourceforge.pmd.ast.ASTAssignmentOperator; import net.sourceforge.pmd.ast.ASTConditionalExpression; import net.sourceforge.pmd.ast.ASTEqualityExpression; import net.sourceforge.pmd.ast.ASTExpression; import net.sourceforge.pmd.ast.ASTName; import net.sourceforge.pmd.ast.ASTNullLiteral; import net.sourceforge.pmd.ast.ASTStatementExpression; import net.sourceforge.pmd.symboltable.VariableNameDeclaration; // TODO - should check that this is not the first assignment. e.g., this is OK: // Object x; // x = null; public class NullAssignmentRule extends AbstractRule { public Object visit(ASTNullLiteral node, Object data) { if (node.getNthParent(5) instanceof ASTStatementExpression) { ASTStatementExpression n = (ASTStatementExpression) node.getNthParent(5); if (isAssignmentToFinalField(n)) { return data; } if (n.jjtGetNumChildren() > 2 && n.jjtGetChild(1) instanceof ASTAssignmentOperator) { addViolation(data, node); } } else if (node.getNthParent(4) instanceof ASTConditionalExpression) { // "false" expression of ternary if (isBadTernary((ASTConditionalExpression)node.getNthParent(4))) { addViolation(data, node); } } else if (node.getNthParent(5) instanceof ASTConditionalExpression && node.getNthParent(4) instanceof ASTExpression) { // "true" expression of ternary if (isBadTernary((ASTConditionalExpression)node.getNthParent(5))) { addViolation(data, node); } } return data; } private boolean isAssignmentToFinalField(ASTStatementExpression n) { ASTName name = n.getFirstChildOfType(ASTName.class); return name != null && name.getNameDeclaration() instanceof VariableNameDeclaration && ((VariableNameDeclaration) name.getNameDeclaration()).getAccessNodeParent().isFinal(); } private boolean isBadTernary(ASTConditionalExpression n) { return n.isTernary() && !(n.jjtGetChild(0) instanceof ASTEqualityExpression); } }
pscadiz/pmd-4.2.6-gds
src/net/sourceforge/pmd/rules/design/NullAssignmentRule.java
Java
bsd-3-clause
2,343
<?php namespace Sabre\DAVACL\PrincipalBackend; /** * Abstract Principal Backend * * Currently this class has no function. It's here for consistency and so we * have a non-bc-breaking way to add a default generic implementation to * functions we may add in the future. * * @copyright Copyright (C) 2007-2014 fruux GmbH (https://fruux.com/). * @author Evert Pot (http://evertpot.com/) * @license http://sabre.io/license/ Modified BSD License */ abstract class AbstractBackend implements BackendInterface { /** * Finds a principal by its URI. * * This method may receive any type of uri, but mailto: addresses will be * the most common. * * Implementation of this API is optional. It is currently used by the * CalDAV system to find principals based on their email addresses. If this * API is not implemented, some features may not work correctly. * * This method must return a relative principal path, or null, if the * principal was not found or you refuse to find it. * * @param string $uri * @return string */ function findByUri($uri) { // Note that the default implementation here is a bit slow and could // likely be optimized. if (substr($uri,0,7)!=='mailto:') { return; } $result = $this->searchPrincipals( '', ['{http://sabredav.org/ns}email-address' => substr($uri,7)] ); if ($result) { return $result[0]; } } }
evert/sabre-dav
lib/DAVACL/PrincipalBackend/AbstractBackend.php
PHP
bsd-3-clause
1,535
// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include <limits.h> #include <stddef.h> #include <stdint.h> #include "base/files/file_path.h" #include "base/files/memory_mapped_file.h" #include "base/logging.h" #include "base/macros.h" #include "base/memory/ptr_util.h" #include "base/strings/string_piece.h" #include "ipc/ipc_message.h" #include "tools/ipc_fuzzer/message_lib/message_cracker.h" #include "tools/ipc_fuzzer/message_lib/message_file.h" #include "tools/ipc_fuzzer/message_lib/message_file_format.h" #include "tools/ipc_fuzzer/message_lib/message_names.h" namespace ipc_fuzzer { namespace { // Helper class to read IPC message file into a MessageVector and // fix message types. class Reader { public: Reader(const base::FilePath& path); bool Read(MessageVector* messages); private: template <typename T> bool CutObject(const T** object); // Reads the header, checks magic and version. bool ReadHeader(); bool MapFile(); bool ReadMessages(); // Last part of the file is a string table for message names. bool ReadStringTable(); // Reads type <-> name mapping into name_map_. References string table. bool ReadNameTable(); // Removes obsolete messages from the vector. bool RemoveUnknownMessages(); // Does type -> name -> correct_type fixup. void FixMessageTypes(); // Raw data. base::FilePath path_; base::MemoryMappedFile mapped_file_; base::StringPiece file_data_; base::StringPiece string_table_; // Parsed data. const FileHeader* header_; MessageVector* messages_; MessageNames name_map_; DISALLOW_COPY_AND_ASSIGN(Reader); }; Reader::Reader(const base::FilePath& path) : path_(path), header_(NULL), messages_(NULL) { } template <typename T> bool Reader::CutObject(const T** object) { if (file_data_.size() < sizeof(T)) { LOG(ERROR) << "Unexpected EOF."; return false; } *object = reinterpret_cast<const T*>(file_data_.data()); file_data_.remove_prefix(sizeof(T)); return true; } bool Reader::ReadHeader() { if (!CutObject<FileHeader>(&header_)) return false; if (header_->magic != FileHeader::kMagicValue) { LOG(ERROR) << path_.value() << " is not an IPC message file."; return false; } if (header_->version != FileHeader::kCurrentVersion) { LOG(ERROR) << "Wrong version for message file " << path_.value() << ". " << "File version is " << header_->version << ", " << "current version is " << FileHeader::kCurrentVersion << "."; return false; } return true; } bool Reader::MapFile() { if (!mapped_file_.Initialize(path_)) { LOG(ERROR) << "Failed to map testcase: " << path_.value(); return false; } const char* data = reinterpret_cast<const char*>(mapped_file_.data()); file_data_ = base::StringPiece(data, mapped_file_.length()); return true; } bool Reader::ReadMessages() { for (size_t i = 0; i < header_->message_count; ++i) { const char* begin = file_data_.begin(); const char* end = file_data_.end(); IPC::Message::NextMessageInfo info; IPC::Message::FindNext(begin, end, &info); if (!info.message_found) { LOG(ERROR) << "Failed to parse message."; return false; } CHECK_EQ(info.message_end, info.pickle_end); size_t msglen = info.message_end - begin; if (msglen > INT_MAX) { LOG(ERROR) << "Message too large."; return false; } // Copy is necessary to fix message type later. IPC::Message const_message(begin, msglen); messages_->push_back(std::make_unique<IPC::Message>(const_message)); file_data_.remove_prefix(msglen); } return true; } bool Reader::ReadStringTable() { size_t name_count = header_->name_count; if (!name_count) return true; if (name_count > file_data_.size() / sizeof(NameTableEntry)) { LOG(ERROR) << "Invalid name table size: " << name_count; return false; } size_t string_table_offset = name_count * sizeof(NameTableEntry); string_table_ = file_data_.substr(string_table_offset); if (string_table_.empty()) { LOG(ERROR) << "Missing string table."; return false; } if (string_table_.end()[-1] != '\0') { LOG(ERROR) << "String table doesn't end with NUL."; return false; } return true; } bool Reader::ReadNameTable() { for (size_t i = 0; i < header_->name_count; ++i) { const NameTableEntry* entry; if (!CutObject<NameTableEntry>(&entry)) return false; size_t offset = entry->string_table_offset; if (offset >= string_table_.size()) { LOG(ERROR) << "Invalid string table offset: " << offset; return false; } name_map_.Add(entry->type, string_table_.data() + offset); } return true; } bool Reader::RemoveUnknownMessages() { MessageVector::iterator it = messages_->begin(); while (it != messages_->end()) { uint32_t type = (*it)->type(); if (!name_map_.TypeExists(type)) { LOG(ERROR) << "Missing name table entry for type " << type; return false; } const std::string& name = name_map_.TypeToName(type); if (!MessageNames::GetInstance()->NameExists(name)) { LOG(WARNING) << "Unknown message " << name; it = messages_->erase(it); } else { ++it; } } return true; } // Message types are based on line numbers, so a minor edit of *_messages.h // changes the types of messages in that file. The types are fixed here to // increase the lifetime of message files. This is only a partial fix because // message arguments and structure layouts can change as well. void Reader::FixMessageTypes() { for (const auto& message : *messages_) { uint32_t type = message->type(); const std::string& name = name_map_.TypeToName(type); uint32_t correct_type = MessageNames::GetInstance()->NameToType(name); if (type != correct_type) MessageCracker::SetMessageType(message.get(), correct_type); } } bool Reader::Read(MessageVector* messages) { messages_ = messages; if (!MapFile()) return false; if (!ReadHeader()) return false; if (!ReadMessages()) return false; if (!ReadStringTable()) return false; if (!ReadNameTable()) return false; if (!RemoveUnknownMessages()) return false; FixMessageTypes(); return true; } } // namespace bool MessageFile::Read(const base::FilePath& path, MessageVector* messages) { Reader reader(path); return reader.Read(messages); } } // namespace ipc_fuzzer
endlessm/chromium-browser
tools/ipc_fuzzer/message_lib/message_file_reader.cc
C++
bsd-3-clause
6,548
// Copyright (c) 2011 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/renderer_host/chrome_url_request_user_data.h" namespace { const char* const kKeyName = "chrome_url_request_user_data"; } // namespace ChromeURLRequestUserData::ChromeURLRequestUserData() : is_prerender_(false) { } // static ChromeURLRequestUserData* ChromeURLRequestUserData::Get( const net::URLRequest* request) { DCHECK(request); return static_cast<ChromeURLRequestUserData*>(request->GetUserData(kKeyName)); } // static ChromeURLRequestUserData* ChromeURLRequestUserData::Create( net::URLRequest* request) { DCHECK(request); DCHECK(!Get(request)); ChromeURLRequestUserData* user_data = new ChromeURLRequestUserData(); request->SetUserData(kKeyName, user_data); return user_data; } // static void ChromeURLRequestUserData::Delete(net::URLRequest* request) { DCHECK(request); request->SetUserData(kKeyName, NULL); }
aYukiSekiguchi/ACCESS-Chromium
chrome/browser/renderer_host/chrome_url_request_user_data.cc
C++
bsd-3-clause
1,050
define(["require"], function (require) { function boot(ev) { ev.target.removeEventListener("click", boot); require(["demos/water/water"]); } const start = document.querySelector(".code-demo.water [data-trigger='water.start']"); start.addEventListener("click", boot); start.disabled = false; });
canena/canena.github.io
src/_resources/js/demos/water/boot.js
JavaScript
bsd-3-clause
333
/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE23_Relative_Path_Traversal__char_connect_socket_w32CreateFile_66a.cpp Label Definition File: CWE23_Relative_Path_Traversal.label.xml Template File: sources-sink-66a.tmpl.cpp */ /* * @description * CWE: 23 Relative Path Traversal * BadSource: connect_socket Read data using a connect socket (client side) * GoodSource: Use a fixed file name * Sinks: w32CreateFile * BadSink : Open the file named in data using CreateFile() * Flow Variant: 66 Data flow: data passed in an array from one function to another in different source files * * */ #include "std_testcase.h" #ifdef _WIN32 #define BASEPATH "c:\\temp\\" #else #include <wchar.h> #define BASEPATH "/tmp/" #endif #ifdef _WIN32 #include <winsock2.h> #include <windows.h> #include <direct.h> #pragma comment(lib, "ws2_32") /* include ws2_32.lib when linking */ #define CLOSE_SOCKET closesocket #else /* NOT _WIN32 */ #include <sys/types.h> #include <sys/socket.h> #include <netinet/in.h> #include <arpa/inet.h> #include <unistd.h> #define INVALID_SOCKET -1 #define SOCKET_ERROR -1 #define CLOSE_SOCKET close #define SOCKET int #endif #define TCP_PORT 27015 #define IP_ADDRESS "127.0.0.1" namespace CWE23_Relative_Path_Traversal__char_connect_socket_w32CreateFile_66 { #ifndef OMITBAD /* bad function declaration */ void badSink(char * dataArray[]); void bad() { char * data; char * dataArray[5]; char dataBuffer[FILENAME_MAX] = BASEPATH; data = dataBuffer; { #ifdef _WIN32 WSADATA wsaData; int wsaDataInit = 0; #endif int recvResult; struct sockaddr_in service; char *replace; SOCKET connectSocket = INVALID_SOCKET; size_t dataLen = strlen(data); do { #ifdef _WIN32 if (WSAStartup(MAKEWORD(2,2), &wsaData) != NO_ERROR) { break; } wsaDataInit = 1; #endif /* POTENTIAL FLAW: Read data using a connect socket */ connectSocket = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP); if (connectSocket == INVALID_SOCKET) { break; } memset(&service, 0, sizeof(service)); service.sin_family = AF_INET; service.sin_addr.s_addr = inet_addr(IP_ADDRESS); service.sin_port = htons(TCP_PORT); if (connect(connectSocket, (struct sockaddr*)&service, sizeof(service)) == SOCKET_ERROR) { break; } /* Abort on error or the connection was closed, make sure to recv one * less char than is in the recv_buf in order to append a terminator */ /* Abort on error or the connection was closed */ recvResult = recv(connectSocket, (char *)(data + dataLen), sizeof(char) * (FILENAME_MAX - dataLen - 1), 0); if (recvResult == SOCKET_ERROR || recvResult == 0) { break; } /* Append null terminator */ data[dataLen + recvResult / sizeof(char)] = '\0'; /* Eliminate CRLF */ replace = strchr(data, '\r'); if (replace) { *replace = '\0'; } replace = strchr(data, '\n'); if (replace) { *replace = '\0'; } } while (0); if (connectSocket != INVALID_SOCKET) { CLOSE_SOCKET(connectSocket); } #ifdef _WIN32 if (wsaDataInit) { WSACleanup(); } #endif } /* put data in array */ dataArray[2] = data; badSink(dataArray); } #endif /* OMITBAD */ #ifndef OMITGOOD /* good function declarations */ /* goodG2B uses the GoodSource with the BadSink */ void goodG2BSink(char * dataArray[]); static void goodG2B() { char * data; char * dataArray[5]; char dataBuffer[FILENAME_MAX] = BASEPATH; data = dataBuffer; /* FIX: Use a fixed file name */ strcat(data, "file.txt"); dataArray[2] = data; goodG2BSink(dataArray); } void good() { goodG2B(); } #endif /* OMITGOOD */ } /* close namespace */ /* Below is the main(). It is only used when building this testcase on its own for testing or for building a binary to use in testing binary analysis tools. It is not used when compiling all the testcases as one application, which is how source code analysis tools are tested. */ #ifdef INCLUDEMAIN using namespace CWE23_Relative_Path_Traversal__char_connect_socket_w32CreateFile_66; /* so that we can use good and bad easily */ int main(int argc, char * argv[]) { /* seed randomness */ srand( (unsigned)time(NULL) ); #ifndef OMITGOOD printLine("Calling good()..."); good(); printLine("Finished good()"); #endif /* OMITGOOD */ #ifndef OMITBAD printLine("Calling bad()..."); bad(); printLine("Finished bad()"); #endif /* OMITBAD */ return 0; } #endif
JianpingZeng/xcc
xcc/test/juliet/testcases/CWE23_Relative_Path_Traversal/s01/CWE23_Relative_Path_Traversal__char_connect_socket_w32CreateFile_66a.cpp
C++
bsd-3-clause
5,179
/* * Copyright (c) 2014 ARM Limited * All rights reserved * * The license below extends only to copyright in the software and shall * not be construed as granting a license to any other intellectual * property including but not limited to intellectual property relating * to a hardware implementation of the functionality of the software * licensed hereunder. You may use the software subject to the license * terms below provided that you ensure that this notice is replicated * unmodified and in its entirety in all distributions of the software, * modified or unmodified, in source code or in binary form. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer; * redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution; * neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * Authors: Andreas Sandberg */ #include "debug/VIOPci.hh" #include "dev/virtio/pci.hh" #include "mem/packet_access.hh" #include "params/PciVirtIO.hh" PciVirtIO::PciVirtIO(const Params *params) : PciDevice(params), queueNotify(0), interruptDeliveryPending(false), vio(*params->vio), callbackKick(this) { // Override the subsystem ID with the device ID from VirtIO config.subsystemID = htole(vio.deviceId); BARSize[0] = BAR0_SIZE_BASE + vio.configSize; vio.registerKickCallback(&callbackKick); } PciVirtIO::~PciVirtIO() { } Tick PciVirtIO::read(PacketPtr pkt) { const unsigned M5_VAR_USED size(pkt->getSize()); int bar; Addr offset; if (!getBAR(pkt->getAddr(), bar, offset)) panic("Invalid PCI memory access to unmapped memory.\n"); assert(bar == 0); DPRINTF(VIOPci, "Reading offset 0x%x [len: %i]\n", offset, size); // Forward device configuration writes to the device VirtIO model if (offset >= OFF_VIO_DEVICE) { vio.readConfig(pkt, offset - OFF_VIO_DEVICE); return 0; } pkt->makeResponse(); switch(offset) { case OFF_DEVICE_FEATURES: DPRINTF(VIOPci, " DEVICE_FEATURES request\n"); assert(size == sizeof(uint32_t)); pkt->set<uint32_t>(vio.deviceFeatures); break; case OFF_GUEST_FEATURES: DPRINTF(VIOPci, " GUEST_FEATURES request\n"); assert(size == sizeof(uint32_t)); pkt->set<uint32_t>(vio.getGuestFeatures()); break; case OFF_QUEUE_ADDRESS: DPRINTF(VIOPci, " QUEUE_ADDRESS request\n"); assert(size == sizeof(uint32_t)); pkt->set<uint32_t>(vio.getQueueAddress()); break; case OFF_QUEUE_SIZE: DPRINTF(VIOPci, " QUEUE_SIZE request\n"); assert(size == sizeof(uint16_t)); pkt->set<uint16_t>(vio.getQueueSize()); break; case OFF_QUEUE_SELECT: DPRINTF(VIOPci, " QUEUE_SELECT\n"); assert(size == sizeof(uint16_t)); pkt->set<uint16_t>(vio.getQueueSelect()); break; case OFF_QUEUE_NOTIFY: DPRINTF(VIOPci, " QUEUE_NOTIFY request\n"); assert(size == sizeof(uint16_t)); pkt->set<uint16_t>(queueNotify); break; case OFF_DEVICE_STATUS: DPRINTF(VIOPci, " DEVICE_STATUS request\n"); assert(size == sizeof(uint8_t)); pkt->set<uint8_t>(vio.getDeviceStatus()); break; case OFF_ISR_STATUS: { DPRINTF(VIOPci, " ISR_STATUS\n"); assert(size == sizeof(uint8_t)); uint8_t isr_status(interruptDeliveryPending ? 1 : 0); interruptDeliveryPending = false; pkt->set<uint8_t>(isr_status); } break; default: panic("Unhandled read offset (0x%x)\n", offset); } return 0; } Tick PciVirtIO::write(PacketPtr pkt) { const unsigned M5_VAR_USED size(pkt->getSize()); int bar; Addr offset; if (!getBAR(pkt->getAddr(), bar, offset)) panic("Invalid PCI memory access to unmapped memory.\n"); assert(bar == 0); DPRINTF(VIOPci, "Writing offset 0x%x [len: %i]\n", offset, size); // Forward device configuration writes to the device VirtIO model if (offset >= OFF_VIO_DEVICE) { vio.writeConfig(pkt, offset - OFF_VIO_DEVICE); return 0; } pkt->makeResponse(); switch(offset) { case OFF_DEVICE_FEATURES: warn("Guest tried to write device features."); break; case OFF_GUEST_FEATURES: DPRINTF(VIOPci, " WRITE GUEST_FEATURES request\n"); assert(size == sizeof(uint32_t)); vio.setGuestFeatures(pkt->get<uint32_t>()); break; case OFF_QUEUE_ADDRESS: DPRINTF(VIOPci, " WRITE QUEUE_ADDRESS\n"); assert(size == sizeof(uint32_t)); vio.setQueueAddress(pkt->get<uint32_t>()); break; case OFF_QUEUE_SIZE: panic("Guest tried to write queue size."); break; case OFF_QUEUE_SELECT: DPRINTF(VIOPci, " WRITE QUEUE_SELECT\n"); assert(size == sizeof(uint16_t)); vio.setQueueSelect(pkt->get<uint16_t>()); break; case OFF_QUEUE_NOTIFY: DPRINTF(VIOPci, " WRITE QUEUE_NOTIFY\n"); assert(size == sizeof(uint16_t)); queueNotify = pkt->get<uint16_t>(); vio.onNotify(queueNotify); break; case OFF_DEVICE_STATUS: { assert(size == sizeof(uint8_t)); uint8_t status(pkt->get<uint8_t>()); DPRINTF(VIOPci, "VirtIO set status: 0x%x\n", status); vio.setDeviceStatus(status); } break; case OFF_ISR_STATUS: warn("Guest tried to write ISR status."); break; default: panic("Unhandled read offset (0x%x)\n", offset); } return 0; } void PciVirtIO::kick() { DPRINTF(VIOPci, "kick(): Sending interrupt...\n"); interruptDeliveryPending = true; intrPost(); } PciVirtIO * PciVirtIOParams::create() { return new PciVirtIO(this); }
etashjian/ECE757-final
src/dev/virtio/pci.cc
C++
bsd-3-clause
7,129
<?php /** * Magento * * NOTICE OF LICENSE * * This source file is subject to the Open Software License (OSL 3.0) * that is bundled with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://opensource.org/licenses/osl-3.0.php * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@magentocommerce.com so we can send you a copy immediately. * * DISCLAIMER * * Do not edit or add to this file if you wish to upgrade Magento to newer * versions in the future. If you wish to customize Magento for your * needs please refer to http://www.magentocommerce.com for more information. * * @category Mage * @package Mage_Adminhtml * @copyright Copyright (c) 2011 Magento Inc. (http://www.magentocommerce.com) * @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0) */ class Mage_Adminhtml_Block_Permissions_Tab_Rolesusers extends Mage_Adminhtml_Block_Widget_Tabs { public function __construct() { parent::__construct(); $roleId = $this->getRequest()->getParam('rid', false); $users = Mage::getModel("admin/user")->getCollection()->load(); $this->setTemplate('permissions/rolesusers.phtml') ->assign('users', $users->getItems()) ->assign('roleId', $roleId); } protected function _prepareLayout() { $this->setChild('userGrid', $this->getLayout()->createBlock('adminhtml/permissions_role_grid_user', 'roleUsersGrid')); return parent::_prepareLayout(); } protected function _getGridHtml() { return $this->getChildHtml('userGrid'); } protected function _getJsObjectName() { return $this->getChild('userGrid')->getJsObjectName(); } }
5452/durex
includes/src/Mage_Adminhtml_Block_Permissions_Tab_Rolesusers.php
PHP
bsd-3-clause
1,869
'use strict'; angular.module("ngLocale", [], ["$provide", function ($provide) { var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"}; function getDecimals(n) { n = n + ''; var i = n.indexOf('.'); return (i == -1) ? 0 : n.length - i - 1; } function getVF(n, opt_precision) { var v = opt_precision; if (undefined === v) { v = Math.min(getDecimals(n), 3); } var base = Math.pow(10, v); var f = ((n * base) | 0) % base; return {v: v, f: f}; } $provide.value("$locale", { "DATETIME_FORMATS": { "AMPMS": [ "AM", "PM" ], "DAY": [ "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday" ], "ERANAMES": [ "Before Christ", "Anno Domini" ], "ERAS": [ "BC", "AD" ], "FIRSTDAYOFWEEK": 6, "MONTH": [ "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December" ], "SHORTDAY": [ "Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat" ], "SHORTMONTH": [ "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" ], "STANDALONEMONTH": [ "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December" ], "WEEKENDRANGE": [ 5, 6 ], "fullDate": "EEEE, d MMMM y", "longDate": "d MMMM y", "medium": "d MMM y h:mm:ss a", "mediumDate": "d MMM y", "mediumTime": "h:mm:ss a", "short": "d/M/yy h:mm a", "shortDate": "d/M/yy", "shortTime": "h:mm a" }, "NUMBER_FORMATS": { "CURRENCY_SYM": "$", "DECIMAL_SEP": ".", "GROUP_SEP": ",", "PATTERNS": [ { "gSize": 3, "lgSize": 3, "maxFrac": 3, "minFrac": 0, "minInt": 1, "negPre": "-", "negSuf": "", "posPre": "", "posSuf": "" }, { "gSize": 3, "lgSize": 3, "maxFrac": 2, "minFrac": 2, "minInt": 1, "negPre": "-\u00a4", "negSuf": "", "posPre": "\u00a4", "posSuf": "" } ] }, "id": "en-jm", "localeID": "en_JM", "pluralCat": function (n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (i == 1 && vf.v == 0) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER; } }); }]);
mudunuriRaju/tlr-live
tollbackend/web/js/angular-1.5.5/i18n/angular-locale_en-jm.js
JavaScript
bsd-3-clause
4,012
/* ** License Applicability. Except to the extent portions of this file are ** made subject to an alternative license as permitted in the SGI Free ** Software License B, Version 1.1 (the "License"), the contents of this ** file are subject only to the provisions of the License. You may not use ** this file except in compliance with the License. You may obtain a copy ** of the License at Silicon Graphics, Inc., attn: Legal Services, 1600 ** Amphitheatre Parkway, Mountain View, CA 94043-1351, or at: ** ** http://oss.sgi.com/projects/FreeB ** ** Note that, as provided in the License, the Software is distributed on an ** "AS IS" basis, with ALL EXPRESS AND IMPLIED WARRANTIES AND CONDITIONS ** DISCLAIMED, INCLUDING, WITHOUT LIMITATION, ANY IMPLIED WARRANTIES AND ** CONDITIONS OF MERCHANTABILITY, SATISFACTORY QUALITY, FITNESS FOR A ** PARTICULAR PURPOSE, AND NON-INFRINGEMENT. ** ** Original Code. The Original Code is: OpenGL Sample Implementation, ** Version 1.2.1, released January 26, 2000, developed by Silicon Graphics, ** Inc. The Original Code is Copyright (c) 1991-2000 Silicon Graphics, Inc. ** Copyright in any portions created by third parties is as indicated ** elsewhere herein. All Rights Reserved. ** ** Additional Notice Provisions: The application programming interfaces ** established by SGI in conjunction with the Original Code are The ** OpenGL(R) Graphics System: A Specification (Version 1.2.1), released ** April 1, 1999; The OpenGL(R) Graphics System Utility Library (Version ** 1.3), released November 4, 1998; and OpenGL(R) Graphics with the X ** Window System(R) (Version 1.3), released October 19, 1998. This software ** was created using the OpenGL(R) version 1.2.1 Sample Implementation ** published by SGI, but has not been independently verified as being ** compliant with the OpenGL(R) version 1.2.1 Specification. */ /* * bufpool.c++ * * $Date: 2004/05/12 15:29:36 $ $Revision: 1.2 $ * $Header: /home/krh/git/sync/mesa-cvs-repo/Mesa/src/glu/sgi/libnurbs/internals/bufpool.cc,v 1.2 2004/05/12 15:29:36 brianp Exp $ */ #include "glimports.h" #include "myassert.h" #include "bufpool.h" /*----------------------------------------------------------------------------- * Pool - allocate a new pool of buffers *----------------------------------------------------------------------------- */ Pool::Pool( int _buffersize, int initpoolsize, const char *n ) { if((unsigned)_buffersize < sizeof(Buffer)) buffersize = sizeof(Buffer); else buffersize = _buffersize; initsize = initpoolsize * buffersize; nextsize = initsize; name = n; magic = is_allocated; nextblock = 0; curblock = 0; freelist = 0; nextfree = 0; } /*----------------------------------------------------------------------------- * ~Pool - free a pool of buffers and the pool itself *----------------------------------------------------------------------------- */ Pool::~Pool( void ) { assert( (this != 0) && (magic == is_allocated) ); while( nextblock ) { delete [] blocklist[--nextblock]; blocklist[nextblock] = 0; } magic = is_free; } void Pool::grow( void ) { assert( (this != 0) && (magic == is_allocated) ); curblock = new char[nextsize]; blocklist[nextblock++] = curblock; nextfree = nextsize; nextsize *= 2; } /*----------------------------------------------------------------------------- * Pool::clear - free buffers associated with pool but keep pool *----------------------------------------------------------------------------- */ void Pool::clear( void ) { assert( (this != 0) && (magic == is_allocated) ); while( nextblock ) { delete [] blocklist[--nextblock]; blocklist[nextblock] = 0; } curblock = 0; freelist = 0; nextfree = 0; if( nextsize > initsize ) nextsize /= 2; }
anasazi/POP-REU-Project
pkgs/libs/mesa/src/src/glu/sgi/libnurbs/internals/bufpool.cc
C++
bsd-3-clause
3,853
from django.apps import AppConfig class ContentStoreAppConfig(AppConfig): name = "contentstore" def ready(self): import contentstore.signals contentstore.signals
praekelt/seed-stage-based-messaging
contentstore/apps.py
Python
bsd-3-clause
190
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "base/prefs/json_pref_store.h" #include <algorithm> #include "base/bind.h" #include "base/callback.h" #include "base/file_util.h" #include "base/json/json_file_value_serializer.h" #include "base/json/json_string_value_serializer.h" #include "base/memory/ref_counted.h" #include "base/message_loop/message_loop_proxy.h" #include "base/sequenced_task_runner.h" #include "base/threading/sequenced_worker_pool.h" #include "base/values.h" namespace { // Some extensions we'll tack on to copies of the Preferences files. const base::FilePath::CharType* kBadExtension = FILE_PATH_LITERAL("bad"); // Differentiates file loading between origin thread and passed // (aka file) thread. class FileThreadDeserializer : public base::RefCountedThreadSafe<FileThreadDeserializer> { public: FileThreadDeserializer(JsonPrefStore* delegate, base::SequencedTaskRunner* sequenced_task_runner) : no_dir_(false), error_(PersistentPrefStore::PREF_READ_ERROR_NONE), delegate_(delegate), sequenced_task_runner_(sequenced_task_runner), origin_loop_proxy_(base::MessageLoopProxy::current()) { } void Start(const base::FilePath& path) { DCHECK(origin_loop_proxy_->BelongsToCurrentThread()); sequenced_task_runner_->PostTask( FROM_HERE, base::Bind(&FileThreadDeserializer::ReadFileAndReport, this, path)); } // Deserializes JSON on the sequenced task runner. void ReadFileAndReport(const base::FilePath& path) { DCHECK(sequenced_task_runner_->RunsTasksOnCurrentThread()); value_.reset(DoReading(path, &error_, &no_dir_)); origin_loop_proxy_->PostTask( FROM_HERE, base::Bind(&FileThreadDeserializer::ReportOnOriginThread, this)); } // Reports deserialization result on the origin thread. void ReportOnOriginThread() { DCHECK(origin_loop_proxy_->BelongsToCurrentThread()); delegate_->OnFileRead(value_.release(), error_, no_dir_); } static base::Value* DoReading(const base::FilePath& path, PersistentPrefStore::PrefReadError* error, bool* no_dir) { int error_code; std::string error_msg; JSONFileValueSerializer serializer(path); base::Value* value = serializer.Deserialize(&error_code, &error_msg); HandleErrors(value, path, error_code, error_msg, error); *no_dir = !base::PathExists(path.DirName()); return value; } static void HandleErrors(const base::Value* value, const base::FilePath& path, int error_code, const std::string& error_msg, PersistentPrefStore::PrefReadError* error); private: friend class base::RefCountedThreadSafe<FileThreadDeserializer>; ~FileThreadDeserializer() {} bool no_dir_; PersistentPrefStore::PrefReadError error_; scoped_ptr<base::Value> value_; const scoped_refptr<JsonPrefStore> delegate_; const scoped_refptr<base::SequencedTaskRunner> sequenced_task_runner_; const scoped_refptr<base::MessageLoopProxy> origin_loop_proxy_; }; // static void FileThreadDeserializer::HandleErrors( const base::Value* value, const base::FilePath& path, int error_code, const std::string& error_msg, PersistentPrefStore::PrefReadError* error) { *error = PersistentPrefStore::PREF_READ_ERROR_NONE; if (!value) { DVLOG(1) << "Error while loading JSON file: " << error_msg << ", file: " << path.value(); switch (error_code) { case JSONFileValueSerializer::JSON_ACCESS_DENIED: *error = PersistentPrefStore::PREF_READ_ERROR_ACCESS_DENIED; break; case JSONFileValueSerializer::JSON_CANNOT_READ_FILE: *error = PersistentPrefStore::PREF_READ_ERROR_FILE_OTHER; break; case JSONFileValueSerializer::JSON_FILE_LOCKED: *error = PersistentPrefStore::PREF_READ_ERROR_FILE_LOCKED; break; case JSONFileValueSerializer::JSON_NO_SUCH_FILE: *error = PersistentPrefStore::PREF_READ_ERROR_NO_FILE; break; default: *error = PersistentPrefStore::PREF_READ_ERROR_JSON_PARSE; // JSON errors indicate file corruption of some sort. // Since the file is corrupt, move it to the side and continue with // empty preferences. This will result in them losing their settings. // We keep the old file for possible support and debugging assistance // as well as to detect if they're seeing these errors repeatedly. // TODO(erikkay) Instead, use the last known good file. base::FilePath bad = path.ReplaceExtension(kBadExtension); // If they've ever had a parse error before, put them in another bucket. // TODO(erikkay) if we keep this error checking for very long, we may // want to differentiate between recent and long ago errors. if (base::PathExists(bad)) *error = PersistentPrefStore::PREF_READ_ERROR_JSON_REPEAT; base::Move(path, bad); break; } } else if (!value->IsType(base::Value::TYPE_DICTIONARY)) { *error = PersistentPrefStore::PREF_READ_ERROR_JSON_TYPE; } } } // namespace scoped_refptr<base::SequencedTaskRunner> JsonPrefStore::GetTaskRunnerForFile( const base::FilePath& filename, base::SequencedWorkerPool* worker_pool) { std::string token("json_pref_store-"); token.append(filename.AsUTF8Unsafe()); return worker_pool->GetSequencedTaskRunnerWithShutdownBehavior( worker_pool->GetNamedSequenceToken(token), base::SequencedWorkerPool::BLOCK_SHUTDOWN); } JsonPrefStore::JsonPrefStore(const base::FilePath& filename, base::SequencedTaskRunner* sequenced_task_runner) : path_(filename), sequenced_task_runner_(sequenced_task_runner), prefs_(new base::DictionaryValue()), read_only_(false), writer_(filename, sequenced_task_runner), initialized_(false), read_error_(PREF_READ_ERROR_OTHER) {} bool JsonPrefStore::GetValue(const std::string& key, const base::Value** result) const { base::Value* tmp = NULL; if (!prefs_->Get(key, &tmp)) return false; if (result) *result = tmp; return true; } void JsonPrefStore::AddObserver(PrefStore::Observer* observer) { observers_.AddObserver(observer); } void JsonPrefStore::RemoveObserver(PrefStore::Observer* observer) { observers_.RemoveObserver(observer); } bool JsonPrefStore::HasObservers() const { return observers_.might_have_observers(); } bool JsonPrefStore::IsInitializationComplete() const { return initialized_; } bool JsonPrefStore::GetMutableValue(const std::string& key, base::Value** result) { return prefs_->Get(key, result); } void JsonPrefStore::SetValue(const std::string& key, base::Value* value) { DCHECK(value); scoped_ptr<base::Value> new_value(value); base::Value* old_value = NULL; prefs_->Get(key, &old_value); if (!old_value || !value->Equals(old_value)) { prefs_->Set(key, new_value.release()); ReportValueChanged(key); } } void JsonPrefStore::SetValueSilently(const std::string& key, base::Value* value) { DCHECK(value); scoped_ptr<base::Value> new_value(value); base::Value* old_value = NULL; prefs_->Get(key, &old_value); if (!old_value || !value->Equals(old_value)) { prefs_->Set(key, new_value.release()); if (!read_only_) writer_.ScheduleWrite(this); } } void JsonPrefStore::RemoveValue(const std::string& key) { if (prefs_->Remove(key, NULL)) ReportValueChanged(key); } void JsonPrefStore::MarkNeedsEmptyValue(const std::string& key) { keys_need_empty_value_.insert(key); } bool JsonPrefStore::ReadOnly() const { return read_only_; } PersistentPrefStore::PrefReadError JsonPrefStore::GetReadError() const { return read_error_; } PersistentPrefStore::PrefReadError JsonPrefStore::ReadPrefs() { if (path_.empty()) { OnFileRead(NULL, PREF_READ_ERROR_FILE_NOT_SPECIFIED, false); return PREF_READ_ERROR_FILE_NOT_SPECIFIED; } PrefReadError error; bool no_dir; base::Value* value = FileThreadDeserializer::DoReading(path_, &error, &no_dir); OnFileRead(value, error, no_dir); return error; } void JsonPrefStore::ReadPrefsAsync(ReadErrorDelegate *error_delegate) { initialized_ = false; error_delegate_.reset(error_delegate); if (path_.empty()) { OnFileRead(NULL, PREF_READ_ERROR_FILE_NOT_SPECIFIED, false); return; } // Start async reading of the preferences file. It will delete itself // in the end. scoped_refptr<FileThreadDeserializer> deserializer( new FileThreadDeserializer(this, sequenced_task_runner_.get())); deserializer->Start(path_); } void JsonPrefStore::CommitPendingWrite() { if (writer_.HasPendingWrite() && !read_only_) writer_.DoScheduledWrite(); } void JsonPrefStore::ReportValueChanged(const std::string& key) { FOR_EACH_OBSERVER(PrefStore::Observer, observers_, OnPrefValueChanged(key)); if (!read_only_) writer_.ScheduleWrite(this); } void JsonPrefStore::OnFileRead(base::Value* value_owned, PersistentPrefStore::PrefReadError error, bool no_dir) { scoped_ptr<base::Value> value(value_owned); read_error_ = error; if (no_dir) { FOR_EACH_OBSERVER(PrefStore::Observer, observers_, OnInitializationCompleted(false)); return; } initialized_ = true; switch (error) { case PREF_READ_ERROR_ACCESS_DENIED: case PREF_READ_ERROR_FILE_OTHER: case PREF_READ_ERROR_FILE_LOCKED: case PREF_READ_ERROR_JSON_TYPE: case PREF_READ_ERROR_FILE_NOT_SPECIFIED: read_only_ = true; break; case PREF_READ_ERROR_NONE: DCHECK(value.get()); prefs_.reset(static_cast<base::DictionaryValue*>(value.release())); break; case PREF_READ_ERROR_NO_FILE: // If the file just doesn't exist, maybe this is first run. In any case // there's no harm in writing out default prefs in this case. break; case PREF_READ_ERROR_JSON_PARSE: case PREF_READ_ERROR_JSON_REPEAT: break; default: NOTREACHED() << "Unknown error: " << error; } if (error_delegate_.get() && error != PREF_READ_ERROR_NONE) error_delegate_->OnError(error); FOR_EACH_OBSERVER(PrefStore::Observer, observers_, OnInitializationCompleted(true)); } JsonPrefStore::~JsonPrefStore() { CommitPendingWrite(); } bool JsonPrefStore::SerializeData(std::string* output) { // TODO(tc): Do we want to prune webkit preferences that match the default // value? JSONStringValueSerializer serializer(output); serializer.set_pretty_print(true); scoped_ptr<base::DictionaryValue> copy( prefs_->DeepCopyWithoutEmptyChildren()); // Iterates |keys_need_empty_value_| and if the key exists in |prefs_|, // ensure its empty ListValue or DictonaryValue is preserved. for (std::set<std::string>::const_iterator it = keys_need_empty_value_.begin(); it != keys_need_empty_value_.end(); ++it) { const std::string& key = *it; base::Value* value = NULL; if (!prefs_->Get(key, &value)) continue; if (value->IsType(base::Value::TYPE_LIST)) { const base::ListValue* list = NULL; if (value->GetAsList(&list) && list->empty()) copy->Set(key, new base::ListValue); } else if (value->IsType(base::Value::TYPE_DICTIONARY)) { const base::DictionaryValue* dict = NULL; if (value->GetAsDictionary(&dict) && dict->empty()) copy->Set(key, new base::DictionaryValue); } } return serializer.Serialize(*(copy.get())); }
cvsuser-chromium/chromium
base/prefs/json_pref_store.cc
C++
bsd-3-clause
11,988
<?php /** * Zym Framework * * LICENSE * * This source file is subject to the new BSD license that is bundled * with this package in the file LICENSE.txt. * * @category Zym * @package Zym_View * @subpackage Helper * @copyright Copyright (c) 2008 Zym. (http://www.zym-project.com/) * @license http://www.zym-project.com/license New BSD License */ /** * @see Zend_Controller_Front */ require_once 'Zend/Controller/Front.php'; /** * Get response obj * * @author Geoffrey Tran * @license http://www.zym-project.com/license New BSD License * @package Zym_View * @subpackage Helper * @copyright Copyright (c) 2008 Zym. (http://www.zym-project.com/) */ class Zym_View_Helper_GetResponse { /** * Get the response object * * @return Zend_Controller_Response_Abstract */ public function getResponse() { return Zend_Controller_Front::getInstance()->getResponse(); } }
robinsk/zym
library/Zym/View/Helper/GetResponse.php
PHP
bsd-3-clause
928
<?php /** * Magento * * NOTICE OF LICENSE * * This source file is subject to the Open Software License (OSL 3.0) * that is bundled with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://opensource.org/licenses/osl-3.0.php * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@magentocommerce.com so we can send you a copy immediately. * * DISCLAIMER * * Do not edit or add to this file if you wish to upgrade Magento to newer * versions in the future. If you wish to customize Magento for your * needs please refer to http://www.magentocommerce.com for more information. * * @category Mage * @package Mage_XmlConnect * @copyright Copyright (c) 2011 Magento Inc. (http://www.magentocommerce.com) * @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0) */ /** * Xmlconnect form checkbox element * * @category Mage * @package Mage_XmlConnect * @author Magento Core Team <core@magentocommerce.com> */ class Mage_XmlConnect_Model_Simplexml_Form_Element_Checkbox extends Mage_XmlConnect_Model_Simplexml_Form_Element_Abstract { /** * Init checkbox element * * @param array $attributes */ public function __construct($attributes = array()) { parent::__construct($attributes); $this->setType('checkbox'); } /** * Add value to element * * @param Mage_XmlConnect_Model_Simplexml_Element $xmlObj * @return Mage_XmlConnect_Model_Simplexml_Form_Element_Abstract */ protected function _addValue(Mage_XmlConnect_Model_Simplexml_Element $xmlObj) { $xmlObj->addAttribute('value', (int)$this->getValue()); return $this; } }
5452/durex
includes/src/Mage_XmlConnect_Model_Simplexml_Form_Element_Checkbox.php
PHP
bsd-3-clause
1,850
<?php /** * Magento * * NOTICE OF LICENSE * * This source file is subject to the Open Software License (OSL 3.0) * that is bundled with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://opensource.org/licenses/osl-3.0.php * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@magentocommerce.com so we can send you a copy immediately. * * DISCLAIMER * * Do not edit or add to this file if you wish to upgrade Magento to newer * versions in the future. If you wish to customize Magento for your * needs please refer to http://www.magentocommerce.com for more information. * * @category Mage * @package Mage_Catalog * @copyright Copyright (c) 2011 Magento Inc. (http://www.magentocommerce.com) * @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0) */ /** * Catalog product option values collection * * @category Mage * @package Mage_Catalog * @author Magento Core Team <core@magentocommerce.com> */ class Mage_Catalog_Model_Resource_Product_Option_Value_Collection extends Mage_Core_Model_Resource_Db_Collection_Abstract { /** * Resource initialization */ protected function _construct() { $this->_init('catalog/product_option_value'); } /** * Add price, title to result * * @param int $storeId * @return Mage_Catalog_Model_Resource_Product_Option_Value_Collection */ public function getValues($storeId) { $this->addPriceToResult($storeId) ->addTitleToResult($storeId); return $this; } /** * Add titles to result * * @param int $storeId * @return Mage_Catalog_Model_Resource_Product_Option_Value_Collection */ public function addTitlesToResult($storeId) { $adapter = $this->getConnection(); $optionTypePriceTable = $this->getTable('catalog/product_option_type_price'); $optionTitleTable = $this->getTable('catalog/product_option_type_title'); $priceExpr = $adapter->getCheckSql( 'store_value_price.price IS NULL', 'default_value_price.price', 'store_value_price.price' ); $priceTypeExpr = $adapter->getCheckSql( 'store_value_price.price_type IS NULL', 'default_value_price.price_type', 'store_value_price.price_type' ); $titleExpr = $adapter->getCheckSql( 'store_value_title.title IS NULL', 'default_value_title.title', 'store_value_title.title' ); $joinExprDefaultPrice = 'default_value_price.option_type_id = main_table.option_type_id AND ' . $adapter->quoteInto('default_value_price.store_id = ?', Mage_Catalog_Model_Abstract::DEFAULT_STORE_ID); $joinExprStorePrice = 'store_value_price.option_type_id = main_table.option_type_id AND ' . $adapter->quoteInto('store_value_price.store_id = ?', $storeId); $joinExprTitle = 'store_value_title.option_type_id = main_table.option_type_id AND ' . $adapter->quoteInto('store_value_title.store_id = ?', $storeId); $this->getSelect() ->joinLeft( array('default_value_price' => $optionTypePriceTable), $joinExprDefaultPrice, array('default_price'=>'price','default_price_type'=>'price_type') ) ->joinLeft( array('store_value_price' => $optionTypePriceTable), $joinExprStorePrice, array( 'store_price' => 'price', 'store_price_type' => 'price_type', 'price' => $priceExpr, 'price_type' => $priceTypeExpr ) ) ->join( array('default_value_title' => $optionTitleTable), 'default_value_title.option_type_id = main_table.option_type_id', array('default_title' => 'title') ) ->joinLeft( array('store_value_title' => $optionTitleTable), $joinExprTitle, array( 'store_title' => 'title', 'title' => $titleExpr) ) ->where('default_value_title.store_id = ?', Mage_Catalog_Model_Abstract::DEFAULT_STORE_ID); return $this; } /** * Add title result * * @param int $storeId * @return Mage_Catalog_Model_Resource_Product_Option_Value_Collection */ public function addTitleToResult($storeId) { $optionTitleTable = $this->getTable('catalog/product_option_type_title'); $titleExpr = $this->getConnection() ->getCheckSql('store_value_title.title IS NULL', 'default_value_title.title', 'store_value_title.title'); $joinExpr = 'store_value_title.option_type_id = main_table.option_type_id AND ' . $this->getConnection()->quoteInto('store_value_title.store_id = ?', $storeId); $this->getSelect() ->join( array('default_value_title' => $optionTitleTable), 'default_value_title.option_type_id = main_table.option_type_id', array('default_title' => 'title') ) ->joinLeft( array('store_value_title' => $optionTitleTable), $joinExpr, array( 'store_title' => 'title', 'title' => $titleExpr ) ) ->where('default_value_title.store_id = ?', Mage_Catalog_Model_Abstract::DEFAULT_STORE_ID); return $this; } /** * Add price to result * * @param int $storeId * @return Mage_Catalog_Model_Resource_Product_Option_Value_Collection */ public function addPriceToResult($storeId) { $optionTypeTable = $this->getTable('catalog/product_option_type_price'); $priceExpr = $this->getConnection() ->getCheckSql('store_value_price.price IS NULL', 'default_value_price.price', 'store_value_price.price'); $priceTypeExpr = $this->getConnection() ->getCheckSql( 'store_value_price.price_type IS NULL', 'default_value_price.price_type', 'store_value_price.price_type' ); $joinExprDefault = 'default_value_price.option_type_id = main_table.option_type_id AND ' . $this->getConnection()->quoteInto('default_value_price.store_id = ?', Mage_Catalog_Model_Abstract::DEFAULT_STORE_ID); $joinExprStore = 'store_value_price.option_type_id = main_table.option_type_id AND ' . $this->getConnection()->quoteInto('store_value_price.store_id = ?', $storeId); $this->getSelect() ->joinLeft( array('default_value_price' => $optionTypeTable), $joinExprDefault, array( 'default_price' => 'price', 'default_price_type'=>'price_type' ) ) ->joinLeft( array('store_value_price' => $optionTypeTable), $joinExprStore, array( 'store_price' => 'price', 'store_price_type' => 'price_type', 'price' => $priceExpr, 'price_type' => $priceTypeExpr ) ); return $this; } /** * Add option filter * * @param array $optionIds * @param int $storeId * @return Mage_Catalog_Model_Resource_Product_Option_Value_Collection */ public function getValuesByOption($optionIds, $storeId = null) { if (!is_array($optionIds)) { $optionIds = array($optionIds); } return $this->addFieldToFilter('main_table.option_type_id', array('in' => $optionIds)); } /** * Add option to filter * * @param array|Mage_Catalog_Model_Product_Option|int $option * @return Mage_Catalog_Model_Resource_Product_Option_Value_Collection */ public function addOptionToFilter($option) { if (empty($option)) { $this->addFieldToFilter('option_id', ''); } elseif (is_array($option)) { $this->addFieldToFilter('option_id', array('in' => $option)); } elseif ($option instanceof Mage_Catalog_Model_Product_Option) { $this->addFieldToFilter('option_id', $option->getId()); } else { $this->addFieldToFilter('option_id', $option); } return $this; } }
5452/durex
includes/src/Mage_Catalog_Model_Resource_Product_Option_Value_Collection.php
PHP
bsd-3-clause
8,912
# -*- coding: utf-8 -*- """Git tools.""" from shlex import split from plumbum import ProcessExecutionError from plumbum.cmd import git DEVELOPMENT_BRANCH = "develop" def run_git(*args, dry_run=False, quiet=False): """Run a git command, print it before executing and capture the output.""" command = git[split(" ".join(args))] if not quiet: print("{}{}".format("[DRY-RUN] " if dry_run else "", command)) if dry_run: return "" rv = command() if not quiet and rv: print(rv) return rv def branch_exists(branch): """Return True if the branch exists.""" try: run_git("rev-parse --verify {}".format(branch), quiet=True) return True except ProcessExecutionError: return False def get_current_branch(): """Get the current branch name.""" return run_git("rev-parse --abbrev-ref HEAD", quiet=True).strip()
andreoliw/clitoolkit
clit/git.py
Python
bsd-3-clause
899
package testclasses; import de.unifreiburg.cs.proglang.jgs.support.DynamicLabel; import util.printer.SecurePrinter; public class PrintMediumSuccess { public static void main(String[] args) { String med = "This is medium information"; med = DynamicLabel.makeMedium(med); SecurePrinter.printMedium(med); String low = "This is low information"; low = DynamicLabel.makeLow(low); SecurePrinter.printMedium(low); } }
luminousfennell/jgs
DynamicAnalyzer/src/main/java/testclasses/PrintMediumSuccess.java
Java
bsd-3-clause
430
define(["pat-autoscale", "jquery"], function(pattern, jQuery) { describe("pat-autoscale", function() { beforeEach(function() { $("<div/>", {id: "lab"}).appendTo(document.body); $(window).off(".autoscale"); }); afterEach(function() { $("#lab").remove(); }); describe("setup", function() { var force_method, mozilla, msie, version; beforeEach(function() { force_method=pattern.force_method; mozilla=jQuery.browser.mozilla; msie=jQuery.browser.msie; version=jQuery.browser.version; pattern.force_method=null; jQuery.browser.mozilla=false; jQuery.browser.msie=false; }); afterEach(function() { pattern.force_method=force_method; jQuery.browser.mozilla=mozilla; jQuery.browser.msie=msie; jQuery.browser.version=version; }); it("Force zoom on old IE versions", function() { jQuery.browser.msie=true; jQuery.browser.version="8.192.921"; pattern._setup(); expect(pattern.force_method).toBe("zoom"); }); it("Force nothing on recent IE versions", function() { jQuery.browser.msie=true; jQuery.browser.version="9.0.19A"; pattern._setup(); expect(pattern.force_method).toBe(null); }); it("Force scale on gecko", function() { // See https://bugzilla.mozilla.org/show_bug.cgi?id=390936 jQuery.browser.mozilla=true; pattern._setup(); expect(pattern.force_method).toBe("scale"); }); it("Force nothing on other browsers", function() { pattern._setup(); expect(pattern.force_method).toBe(null); }); }); describe("init", function() { var force_method; beforeEach(function() { force_method=pattern.force_method; }); afterEach(function() { pattern.force_method=force_method; }); it("Return jQuery object", function() { var jq = jasmine.createSpyObj("jQuery", ["each"]); jq.each.andReturn(jq); expect(pattern.init(jq)).toBe(jq); }); it("Perform initial scaling", function() { $("<div/>", {id: "parent"}).css({width: "200px"}) .append($("<div/>", {id: "child", "data-pat-auto-scale": "scale"}) .css({width: "50px"})) .appendTo("#lab"); var $child = $("#child"); spyOn(pattern, "scale"); pattern.init($child); expect(pattern.scale).toHaveBeenCalled(); }); it("Honour method override", function() { $("<div/>", {id: "parent"}).css({width: "200px"}) .append($("<div/>", {id: "child", "data-pat-auto-scale": "scale"}) .css({width: "50px"})) .appendTo("#lab"); var $child = $("#child"); pattern.force_method = "forced"; pattern.init($child); expect($child.data("patterns.auto-scale").method).toBe("forced"); }); }); describe("scale", function() { it("Scale element", function() { $("<div/>", {id: "parent"}).css({width: "200px"}) .append($("<div/>", {id: "child"}).css({width: "50px"})) .appendTo("#lab"); var child = document.getElementById("child"); $(child).data("patterns.auto-scale", {method: "scale", minWidth: 0, maxWidth: 1000}); pattern.scale.apply(child, []); expect(child.getAttribute("style")).toMatch(/transform: scale\(4\);/); }); it("Zoom element", function() { $("<div/>", {id: "parent"}).css({width: "200px"}) .append($("<div/>", {id: "child"}).css({width: "50px"})) .appendTo("#lab"); var child = document.getElementById("child"); $(child).data("patterns.auto-scale", {method: "zoom", minWidth: 0, maxWidth: 1000}); pattern.scale.apply(child, []); expect(child.style.zoom).toBe("4"); }); it("Honour minimum width", function() { $("<div/>", {id: "parent"}).css({width: "100px"}) .append($("<div/>", {id: "child"}).css({width: "400px"})) .appendTo("#lab"); var child = document.getElementById("child"); $(child).data("patterns.auto-scale", {method: "zoom", minWidth: 200, maxWidth: 1000}); pattern.scale.apply(child, []); expect(child.style.zoom).toBe("0.5"); }); it("Honour maximum width", function() { $("<div/>", {id: "parent"}).css({width: "200px"}) .append($("<div/>", {id: "child"}).css({width: "50px"})) .appendTo("#lab"); var child = document.getElementById("child"); $(child).data("patterns.auto-scale", {method: "zoom", minWidth: 0, maxWidth: 100}); pattern.scale.apply(child, []); expect(child.style.zoom).toBe("2"); }); it("Add scaled class", function() { $("<div/>", {id: "parent"}).css({width: "200px"}) .append($("<div/>", {id: "child"}).css({width: "50px"})) .appendTo("#lab"); var child = document.getElementById("child"); $(child).data("patterns.auto-scale", {method: "zoom", minWidth: 0, maxWidth: 1000}); pattern.scale.apply(child, []); expect($(child).hasClass("scaled")).toBeTruthy(); }); }); }); });
Patternslib/Patterns-archive
tests/specs/pat/autoscale.js
JavaScript
bsd-3-clause
6,201
require 'spec_helper' RSpec.describe Spree::CheckoutController, type: :controller do # copied from original checkout controller spec let(:token) { 'some_token' } let(:user) { FactoryGirl.create(:user) } let(:order) { OrderWalkthrough.up_to(:delivery) } before do allow_any_instance_of(ActionDispatch::Request).to receive(:remote_ip).and_return("128.0.0.1") allow(controller).to receive(:try_spree_current_user).and_return(user) allow(controller).to receive(:spree_current_user).and_return(user) allow(controller).to receive(:current_order).and_return(order) end describe "PATCH /checkout/update/payment" do context "when payment_method is PayU" do let(:payment_method) { FactoryGirl.create :payu_payment_method } let(:payment_params) do { state: "payment", order: { payments_attributes: [{ payment_method_id: payment_method.id }] } } end subject { spree_post :update, payment_params } before do # we need to fake it because it's returned back with order allow(SecureRandom).to receive(:uuid).and_return("36332498-294f-41a1-980c-7b2ec0e3a8a4") allow(OpenPayU::Configuration).to receive(:merchant_pos_id).and_return("145278") allow(OpenPayU::Configuration).to receive(:signature_key).and_return("S3CRET_KEY") end let(:payu_order_create_status) { "SUCCESS" } let!(:payu_order_create) do stub_request(:post, "https://145278:S3CRET_KEY@secure.payu.com/api/v2/orders") .with(body: { merchantPosId: "145278", customerIp: "128.0.0.1", extOrderId: order.id, description: "Order from Spree Test Store", currencyCode: "USD", totalAmount: 2000, orderUrl: "http://test.host/orders/#{order.number}", notifyUrl: "http://test.host/payu/notify", continueUrl: "http://test.host/orders/#{order.number}", buyer: { email: user.email, phone: "555-555-0199", firstName: "John", lastName: "Doe", language: "PL", delivery: { street: "10 Lovely Street", postalCode: "35005", city: "Herndon", countryCode: "US" } }, products: { products: [ { name: order.line_items.first.product.name, unitPrice: 1000, quantity: 1 } ] }, reqId: "{36332498-294f-41a1-980c-7b2ec0e3a8a4}" }, headers: { 'Content-Type' => 'application/json', 'User-Agent' => 'Ruby' } ) .to_return( status: 200, body: { status: { statusCode: payu_order_create_status }, redirect_uri: "http://payu.com/redirect/url/4321" }.to_json, headers: {} ) end it "creates new PayU order" do expect { subject }.to_not raise_error expect(payu_order_create).to have_been_requested end context "when PayU order creation succeeded" do it "updates order payment" do subject payment = order.payments.last expect(payment.payment_method).to eq(payment_method) expect(payment).to be_pending expect(payment.amount).to eq(order.total) end it "redirects to payu redirect url" do expect(subject).to redirect_to("http://payu.com/redirect/url/4321") end context "when payment save failed" do before do allow_any_instance_of(Spree::Payment).to receive(:save).and_return(false) allow_any_instance_of(Spree::Payment).to receive(:errors) .and_return(double(full_messages: ["payment save failed"])) end it "logs errors" do subject expect(flash[:error]).to include("payment save failed") end it "renders checkout state with redirect" do expect(subject).to redirect_to "http://test.host/checkout/payment" end end context "when order transition failed" do before do allow(order).to receive(:next).and_return(false) allow(order).to(receive(:errors) .and_return(double(full_messages: ["order cannot transition to this state"]))) end it "logs errors" do subject expect(flash[:error]).to include("order cannot transition to this state") end it "renders checkout state with redirect" do expect(subject).to redirect_to "http://test.host/checkout/payment" end end end context "when PayU order creation returns unexpected status" do let(:payu_order_create_status) { "FAIL" } it "logs error in order" do subject expect(assigns(:order).errors[:base]).to include("PayU error ") end it "renders :edit page" do expect(subject).to render_template(:edit) end end context "when something failed inside PayU order creation" do before do allow(OpenPayU::Order).to receive(:create).and_raise(RuntimeError.new("Payment timeout!")) end it "logs error in order" do subject expect(assigns(:order).errors[:base]).to include("PayU error Payment timeout!") end it "renders :edit page" do expect(subject).to render_template(:edit) end end end context "when order attributes are missing" do let(:payment_params) { { state: "payment", order: { some: "details" } } } subject { spree_post :update, payment_params } it "renders checkout state with redirect" do expect(subject).to redirect_to "http://test.host/checkout/payment" end it "logs error" do subject expect(flash[:error]).to include("No payment found") end end end end
netguru/spree_payu_integration
spec/controllers/spree/checkout_controller_decorator_spec.rb
Ruby
bsd-3-clause
6,155
#include "pri.h" #include "calcu_erase_dot_noise.h" #include "iip_erase_dot_noise.h" void iip_erase_dot_noise::_exec_uchar( long l_width, long l_height, long l_area_xpos, long l_area_ypos, long l_area_xsize, long l_area_ysize, long l_channels, unsigned char *ucharp_in, unsigned char *ucharp_out ) { long l_start, l_scansize; long xx,yy; unsigned char *ucharp_in_y1,*ucharp_in_y2,*ucharp_in_y3; unsigned char *ucharp_in_x11,*ucharp_in_x12,*ucharp_in_x13, *ucharp_in_x21,*ucharp_in_x22,*ucharp_in_x23, *ucharp_in_x31,*ucharp_in_x32,*ucharp_in_x33; unsigned char *ucharp_out_y1,*ucharp_out_y2; unsigned char *ucharp_out_x1,*ucharp_out_x2; unsigned char *ucharp_tmp; calcu_erase_dot_noise cl_dot; l_height; /* 初期値 */ l_scansize = l_width * l_channels; l_start = l_area_ypos * l_scansize + l_area_xpos * l_channels; ucharp_in += l_start; ucharp_out += l_start; /* 縦方向ポインター初期化 */ ucharp_in_y1 = ucharp_in; ucharp_in_y2 = ucharp_in_y3 = NULL; ucharp_out_y1 = ucharp_out; ucharp_out_y2 = NULL; /* 縦方向ループ */ for (yy = 0L; yy < l_area_ysize; ++yy, /* 縦方向の3連ポインター進める */ ucharp_in_y3 = ucharp_in_y2, ucharp_in_y2 = ucharp_in_y1, ucharp_in_y1 += l_scansize, ucharp_out_y2 = ucharp_out_y1, ucharp_out_y1 += l_scansize ) { /* カウントダウン表示中 */ if (ON == this->get_i_cv_sw()) { pri_funct_cv_run(yy); } /* 3連満ちるまで */ if (NULL == ucharp_in_y3) { continue; } /* 横方向ポインター初期化 */ ucharp_in_x11 = ucharp_in_y1; ucharp_in_x12 = ucharp_in_x13 = NULL; ucharp_in_x21 = ucharp_in_y2; ucharp_in_x22 = ucharp_in_x23 = NULL; ucharp_in_x31 = ucharp_in_y3; ucharp_in_x32 = ucharp_in_x33 = NULL; ucharp_out_x1 = ucharp_out_y2; ucharp_out_x2 = NULL; /* 横方向ループ */ for (xx = 0L; xx < l_area_xsize; ++xx, /* 横方向の3x3連ポインター進める */ ucharp_in_x33 = ucharp_in_x32, ucharp_in_x32 = ucharp_in_x31, ucharp_in_x31 += l_channels, ucharp_in_x23 = ucharp_in_x22, ucharp_in_x22 = ucharp_in_x21, ucharp_in_x21 += l_channels, ucharp_in_x13 = ucharp_in_x12, ucharp_in_x12 = ucharp_in_x11, ucharp_in_x11 += l_channels, ucharp_out_x2 = ucharp_out_x1, ucharp_out_x1 += l_channels ) { /* 3連満ちるまで */ if (NULL == ucharp_in_x13) { continue; } /* dotをつぶすか判断 */ ucharp_tmp = cl_dot.get_ucharp( ucharp_in_x11,ucharp_in_x12,ucharp_in_x13, ucharp_in_x21,ucharp_in_x22,ucharp_in_x23, ucharp_in_x31,ucharp_in_x32,ucharp_in_x33 ); /* dotをつぶす */ if (NULL != ucharp_tmp) { ucharp_out_x2[CH_RED] = ucharp_tmp[CH_RED]; ucharp_out_x2[CH_GRE] = ucharp_tmp[CH_GRE]; ucharp_out_x2[CH_BLU] = ucharp_tmp[CH_BLU]; } } } }
masafumi-inoue/GTS
sources/libcpp72iip_erase_dot_noise/iip_erase_dot_noise_uchar.cpp
C++
bsd-3-clause
2,775
// Copyright (c) 2013-2015 The btcsuite developers // Use of this source code is governed by an ISC // license that can be found in the LICENSE file. package wire_test import ( "bytes" "io" ) // fixedWriter implements the io.Writer interface and intentially allows // testing of error paths by forcing short writes. type fixedWriter struct { b []byte pos int } // Write writes the contents of p to w. When the contents of p would cause // the writer to exceed the maximum allowed size of the fixed writer, // io.ErrShortWrite is returned and the writer is left unchanged. // // This satisfies the io.Writer interface. func (w *fixedWriter) Write(p []byte) (n int, err error) { lenp := len(p) if w.pos+lenp > cap(w.b) { return 0, io.ErrShortWrite } n = lenp w.pos += copy(w.b[w.pos:], p) return } // Bytes returns the bytes already written to the fixed writer. func (w *fixedWriter) Bytes() []byte { return w.b } // newFixedWriter returns a new io.Writer that will error once more bytes than // the specified max have been written. func newFixedWriter(max int) io.Writer { b := make([]byte, max, max) fw := fixedWriter{b, 0} return &fw } // fixedReader implements the io.Reader interface and intentially allows // testing of error paths by forcing short reads. type fixedReader struct { buf []byte pos int iobuf *bytes.Buffer } // Read reads the next len(p) bytes from the fixed reader. When the number of // bytes read would exceed the maximum number of allowed bytes to be read from // the fixed writer, an error is returned. // // This satisfies the io.Reader interface. func (fr *fixedReader) Read(p []byte) (n int, err error) { n, err = fr.iobuf.Read(p) fr.pos += n return } // newFixedReader returns a new io.Reader that will error once more bytes than // the specified max have been read. func newFixedReader(max int, buf []byte) io.Reader { b := make([]byte, max, max) if buf != nil { copy(b[:], buf) } iobuf := bytes.NewBuffer(b) fr := fixedReader{b, 0, iobuf} return &fr }
hectorj/btcd
wire/fixedIO_test.go
GO
isc
2,030
java_import 'org.apollo.game.action.DistancedAction' # A distanced action which opens a door. class OpenDoorAction < DistancedAction include DoorConstants attr_reader :door def initialize(mob, door) super(0, true, mob, door.position, DOOR_SIZE) @door = door end def executeAction mob.turn_to(@door.position) DoorUtil.toggle(@door) stop end def equals(other) get_class == other.get_class && @door == other.door end end # MessageListener for opening and closing doors. on :message, :first_object_action do |player, message| if DoorUtil.door?(message.id) door = DoorUtil.get_door_object(message.position, message.id) player.start_action(OpenDoorAction.new(player, door)) unless door.nil? end end
garyttierney/apollo
data/plugins/navigation/door/door.rb
Ruby
isc
753
import * as React from 'react'; import { BsPrefixComponent } from './helpers'; interface NavbarToggleProps { label?: string; } declare class NavbarToggle< As extends React.ReactType = 'button' > extends BsPrefixComponent<As, NavbarToggleProps> {} export default NavbarToggle;
glenjamin/react-bootstrap
types/components/NavbarToggle.d.ts
TypeScript
mit
284
using System; using System.ComponentModel; using System.Drawing; using System.Windows.Forms; namespace DotSpatial.Symbology.Forms { public partial class LineSymbolDialog { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.components = new System.ComponentModel.Container(); System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(LineSymbolDialog)); this.lblSymbologyType = new System.Windows.Forms.Label(); this.lblPredefinedSymbol = new System.Windows.Forms.Label(); this.lblSymbolPreview = new System.Windows.Forms.Label(); this.btnSymbolDetails = new System.Windows.Forms.Button(); this.cmbCategories = new System.Windows.Forms.ComboBox(); this.predefinedLineSymbolControl1 = new DotSpatial.Symbology.Forms.PredefinedLineSymbolControl(); this.symbolPreview1 = new DotSpatial.Symbology.Forms.SymbolPreview(); this.dialogButtons1 = new DotSpatial.Symbology.Forms.DialogButtons(); this.SuspendLayout(); // // lblSymbologyType // resources.ApplyResources(this.lblSymbologyType, "lblSymbologyType"); this.lblSymbologyType.Name = "lblSymbologyType"; // // lblPredefinedSymbol // resources.ApplyResources(this.lblPredefinedSymbol, "lblPredefinedSymbol"); this.lblPredefinedSymbol.Name = "lblPredefinedSymbol"; // // lblSymbolPreview // resources.ApplyResources(this.lblSymbolPreview, "lblSymbolPreview"); this.lblSymbolPreview.Name = "lblSymbolPreview"; // // btnSymbolDetails // resources.ApplyResources(this.btnSymbolDetails, "btnSymbolDetails"); this.btnSymbolDetails.Name = "btnSymbolDetails"; this.btnSymbolDetails.UseVisualStyleBackColor = true; this.btnSymbolDetails.Click += new System.EventHandler(this.BtnSymbolDetailsClick); // // cmbCategories // resources.ApplyResources(this.cmbCategories, "cmbCategories"); this.cmbCategories.FormattingEnabled = true; this.cmbCategories.Name = "cmbCategories"; this.cmbCategories.SelectedIndexChanged += new System.EventHandler(this.CmbCategoriesSelectedIndexChanged); // // predefinedLineSymbolControl1 // resources.ApplyResources(this.predefinedLineSymbolControl1, "predefinedLineSymbolControl1"); this.predefinedLineSymbolControl1.BackColor = System.Drawing.Color.White; this.predefinedLineSymbolControl1.CategoryFilter = String.Empty; this.predefinedLineSymbolControl1.CellMargin = 8; this.predefinedLineSymbolControl1.CellSize = new System.Drawing.Size(62, 62); this.predefinedLineSymbolControl1.ControlRectangle = new System.Drawing.Rectangle(0, 0, 272, 253); this.predefinedLineSymbolControl1.DefaultCategoryFilter = "All"; this.predefinedLineSymbolControl1.DynamicColumns = true; this.predefinedLineSymbolControl1.IsInitialized = false; this.predefinedLineSymbolControl1.IsSelected = true; this.predefinedLineSymbolControl1.Name = "predefinedLineSymbolControl1"; this.predefinedLineSymbolControl1.SelectedIndex = -1; this.predefinedLineSymbolControl1.SelectionBackColor = System.Drawing.Color.LightGray; this.predefinedLineSymbolControl1.SelectionForeColor = System.Drawing.Color.White; this.predefinedLineSymbolControl1.ShowSymbolNames = true; this.predefinedLineSymbolControl1.TextFont = new System.Drawing.Font("Arial", 8F); this.predefinedLineSymbolControl1.VerticalScrollEnabled = true; // // symbolPreview1 // resources.ApplyResources(this.symbolPreview1, "symbolPreview1"); this.symbolPreview1.BackColor = System.Drawing.Color.White; this.symbolPreview1.Name = "symbolPreview1"; // // dialogButtons1 // resources.ApplyResources(this.dialogButtons1, "dialogButtons1"); this.dialogButtons1.Name = "dialogButtons1"; // // LineSymbolDialog // resources.ApplyResources(this, "$this"); this.Controls.Add(this.dialogButtons1); this.Controls.Add(this.predefinedLineSymbolControl1); this.Controls.Add(this.cmbCategories); this.Controls.Add(this.symbolPreview1); this.Controls.Add(this.btnSymbolDetails); this.Controls.Add(this.lblSymbolPreview); this.Controls.Add(this.lblPredefinedSymbol); this.Controls.Add(this.lblSymbologyType); this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog; this.HelpButton = true; this.MaximizeBox = false; this.MinimizeBox = false; this.Name = "LineSymbolDialog"; this.ResumeLayout(false); this.PerformLayout(); } #endregion private Button btnSymbolDetails; private ComboBox cmbCategories; private DialogButtons dialogButtons1; private Label lblPredefinedSymbol; private Label lblSymbolPreview; private Label lblSymbologyType; private PredefinedLineSymbolControl predefinedLineSymbolControl1; private SymbolPreview symbolPreview1; } }
CGX-GROUP/DotSpatial
Source/DotSpatial.Symbology.Forms/LineSymbolDialog.Designer.cs
C#
mit
6,632
function* f() { var x; try { x = yield 1; } catch (ex) { yield ex; } return 2; } var g = f(); expect(g.next()).toEqual({value: 1, done: false}); expect(g.next()).toEqual({value: 2, done: true}); g = f(); expect(g.next()).toEqual({value: 1, done: false}); expect(g.throw(3)).toEqual({value: 3, done: false}); expect(g.next()).toEqual({value: 2, done: true});
kellyselden/babel
packages/babel-preset-es2015/test/fixtures/traceur/Yield/YieldAssignThrow.js
JavaScript
mit
378
package events import ( "errors" "github.com/miketheprogrammer/go-thrust/lib/commands" "github.com/miketheprogrammer/go-thrust/lib/dispatcher" ) /* Create a new EventHandler for a give event. */ func NewHandler(event string, fn interface{}) (ThrustEventHandler, error) { h := ThrustEventHandler{} h.Event = event h.Type = "event" err := h.SetHandleFunc(fn) dispatcher.RegisterHandler(h) return h, err } /** Begin Thrust Handler Code. **/ type Handler interface { Handle(cr commands.CommandResponse) Register() SetHandleFunc(fn interface{}) } type ThrustEventHandler struct { Type string Event string Handler interface{} } func (teh ThrustEventHandler) Handle(cr commands.CommandResponse) { if cr.Action != "event" { return } if cr.Type != teh.Event && teh.Event != "*" { return } cr.Event.Type = cr.Type if fn, ok := teh.Handler.(func(commands.CommandResponse)); ok == true { fn(cr) return } if fn, ok := teh.Handler.(func(commands.EventResult)); ok == true { fn(cr.Event) return } } func (teh *ThrustEventHandler) SetHandleFunc(fn interface{}) error { if fn, ok := fn.(func(commands.CommandResponse)); ok == true { teh.Handler = fn return nil } if fn, ok := fn.(func(commands.EventResult)); ok == true { teh.Handler = fn return nil } return errors.New("Invalid Handler Definition") }
FPurchess/blank
vendor/src/github.com/miketheprogrammer/go-thrust/lib/events/eventhandler.go
GO
mit
1,347
from __future__ import unicode_literals from .atomicparsley import AtomicParsleyPP from .ffmpeg import ( FFmpegPostProcessor, FFmpegAudioFixPP, FFmpegEmbedSubtitlePP, FFmpegExtractAudioPP, FFmpegFixupStretchedPP, FFmpegMergerPP, FFmpegMetadataPP, FFmpegVideoConvertorPP, ) from .xattrpp import XAttrMetadataPP from .execafterdownload import ExecAfterDownloadPP def get_postprocessor(key): return globals()[key + 'PP'] __all__ = [ 'AtomicParsleyPP', 'ExecAfterDownloadPP', 'FFmpegAudioFixPP', 'FFmpegEmbedSubtitlePP', 'FFmpegExtractAudioPP', 'FFmpegFixupStretchedPP', 'FFmpegMergerPP', 'FFmpegMetadataPP', 'FFmpegPostProcessor', 'FFmpegVideoConvertorPP', 'XAttrMetadataPP', ]
janusnic/youtube-dl-GUI
youtube_dl/postprocessor/__init__.py
Python
mit
760
// Type definitions for yargs 17.0 // Project: https://github.com/chevex/yargs, https://yargs.js.org // Definitions by: Martin Poelstra <https://github.com/poelstra> // Mizunashi Mana <https://github.com/mizunashi-mana> // Jeffery Grajkowski <https://github.com/pushplay> // Jimi (Dimitris) Charalampidis <https://github.com/JimiC> // Steffen Viken Valvåg <https://github.com/steffenvv> // Emily Marigold Klassen <https://github.com/forivall> // ExE Boss <https://github.com/ExE-Boss> // Aankhen <https://github.com/Aankhen> // Ben Coe <https://github.com/bcoe> // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped // TypeScript Version: 3.0 // The following TSLint rules have been disabled: // unified-signatures: Because there is useful information in the argument names of the overloaded signatures // Convention: // Use 'union types' when: // - parameter types have similar signature type (i.e. 'string | ReadonlyArray<string>') // - parameter names have the same semantic meaning (i.e. ['command', 'commands'] , ['key', 'keys']) // An example for not using 'union types' is the declaration of 'env' where `prefix` and `enable` parameters // have different semantics. On the other hand, in the declaration of 'usage', a `command: string` parameter // has the same semantic meaning with declaring an overload method by using `commands: ReadonlyArray<string>`, // thus it's preferred to use `command: string | ReadonlyArray<string>` // Use parameterless declaration instead of declaring all parameters optional, // when all parameters are optional and more than one import { DetailedArguments, Configuration } from 'yargs-parser'; declare namespace yargs { type BuilderCallback<T, R> = ((args: Argv<T>) => PromiseLike<Argv<R>>) | ((args: Argv<T>) => Argv<R>) | ((args: Argv<T>) => void); type ParserConfigurationOptions = Configuration & { /** Sort commands alphabetically. Default is `false` */ 'sort-commands': boolean; }; /** * The type parameter `T` is the expected shape of the parsed options. * `Arguments<T>` is those options plus `_` and `$0`, and an indexer falling * back to `unknown` for unknown options. * * For the return type / `argv` property, we create a mapped type over * `Arguments<T>` to simplify the inferred type signature in client code. */ interface Argv<T = {}> { (): { [key in keyof Arguments<T>]: Arguments<T>[key] } | Promise<{ [key in keyof Arguments<T>]: Arguments<T>[key] }>; (args: ReadonlyArray<string>, cwd?: string): Argv<T>; /** * Set key names as equivalent such that updates to a key will propagate to aliases and vice-versa. * * Optionally `.alias()` can take an object that maps keys to aliases. * Each key of this object should be the canonical version of the option, and each value should be a string or an array of strings. */ // Aliases for previously declared options can inherit the types of those options. alias<K1 extends keyof T, K2 extends string>(shortName: K1, longName: K2 | ReadonlyArray<K2>): Argv<T & { [key in K2]: T[K1] }>; alias<K1 extends keyof T, K2 extends string>(shortName: K2, longName: K1 | ReadonlyArray<K1>): Argv<T & { [key in K2]: T[K1] }>; alias(shortName: string | ReadonlyArray<string>, longName: string | ReadonlyArray<string>): Argv<T>; alias(aliases: { [shortName: string]: string | ReadonlyArray<string> }): Argv<T>; /** * Get the arguments as a plain old object. * * Arguments without a corresponding flag show up in the `argv._` array. * * The script name or node command is available at `argv.$0` similarly to how `$0` works in bash or perl. * * If `yargs` is executed in an environment that embeds node and there's no script name (e.g. Electron or nw.js), * it will ignore the first parameter since it expects it to be the script name. In order to override * this behavior, use `.parse(process.argv.slice(1))` instead of .argv and the first parameter won't be ignored. */ argv: { [key in keyof Arguments<T>]: Arguments<T>[key] } | Promise<{ [key in keyof Arguments<T>]: Arguments<T>[key] }>; /** * Tell the parser to interpret `key` as an array. * If `.array('foo')` is set, `--foo foo bar` will be parsed as `['foo', 'bar']` rather than as `'foo'`. * Also, if you use the option multiple times all the values will be flattened in one array so `--foo foo --foo bar` will be parsed as `['foo', 'bar']` * * When the option is used with a positional, use `--` to tell `yargs` to stop adding values to the array. */ array<K extends keyof T>(key: K | ReadonlyArray<K>): Argv<Omit<T, K> & { [key in K]: ToArray<T[key]> }>; array<K extends string>(key: K | ReadonlyArray<K>): Argv<T & { [key in K]: Array<string | number> | undefined }>; /** * Interpret `key` as a boolean. If a non-flag option follows `key` in `process.argv`, that string won't get set as the value of `key`. * * `key` will default to `false`, unless a `default(key, undefined)` is explicitly set. * * If `key` is an array, interpret all the elements as booleans. */ boolean<K extends keyof T>(key: K | ReadonlyArray<K>): Argv<Omit<T, K> & { [key in K]: boolean | undefined }>; boolean<K extends string>(key: K | ReadonlyArray<K>): Argv<T & { [key in K]: boolean | undefined }>; /** * Check that certain conditions are met in the provided arguments. * @param func Called with two arguments, the parsed `argv` hash and an array of options and their aliases. * If `func` throws or returns a non-truthy value, show the thrown error, usage information, and exit. * @param global Indicates whether `check()` should be enabled both at the top-level and for each sub-command. */ check(func: (argv: Arguments<T>, aliases: { [alias: string]: string }) => any, global?: boolean): Argv<T>; /** * Limit valid values for key to a predefined set of choices, given as an array or as an individual value. * If this method is called multiple times, all enumerated values will be merged together. * Choices are generally strings or numbers, and value matching is case-sensitive. * * Optionally `.choices()` can take an object that maps multiple keys to their choices. * * Choices can also be specified as choices in the object given to `option()`. */ choices<K extends keyof T, C extends ReadonlyArray<any>>(key: K, values: C): Argv<Omit<T, K> & { [key in K]: C[number] | undefined }>; choices<K extends string, C extends ReadonlyArray<any>>(key: K, values: C): Argv<T & { [key in K]: C[number] | undefined }>; choices<C extends { [key: string]: ReadonlyArray<any> }>(choices: C): Argv<Omit<T, keyof C> & { [key in keyof C]: C[key][number] | undefined }>; /** * Provide a synchronous function to coerce or transform the value(s) given on the command line for `key`. * * The coercion function should accept one argument, representing the parsed value from the command line, and should return a new value or throw an error. * The returned value will be used as the value for `key` (or one of its aliases) in `argv`. * * If the function throws, the error will be treated as a validation failure, delegating to either a custom `.fail()` handler or printing the error message in the console. * * Coercion will be applied to a value after all other modifications, such as `.normalize()`. * * Optionally `.coerce()` can take an object that maps several keys to their respective coercion function. * * You can also map the same function to several keys at one time. Just pass an array of keys as the first argument to `.coerce()`. * * If you are using dot-notion or arrays, .e.g., `user.email` and `user.password`, coercion will be applied to the final object that has been parsed */ coerce<K extends keyof T, V>(key: K | ReadonlyArray<K>, func: (arg: any) => V): Argv<Omit<T, K> & { [key in K]: V | undefined }>; coerce<K extends string, V>(key: K | ReadonlyArray<K>, func: (arg: any) => V): Argv<T & { [key in K]: V | undefined }>; coerce<O extends { [key: string]: (arg: any) => any }>(opts: O): Argv<Omit<T, keyof O> & { [key in keyof O]: ReturnType<O[key]> | undefined }>; /** * Define the commands exposed by your application. * @param command Should be a string representing the command or an array of strings representing the command and its aliases. * @param description Use to provide a description for each command your application accepts (the values stored in `argv._`). * Set `description` to false to create a hidden command. Hidden commands don't show up in the help output and aren't available for completion. * @param [builder] Object to give hints about the options that your command accepts. * Can also be a function. This function is executed with a yargs instance, and can be used to provide advanced command specific help. * * Note that when `void` is returned, the handler `argv` object type will not include command-specific arguments. * @param [handler] Function, which will be executed with the parsed `argv` object. */ command<U = T>( command: string | ReadonlyArray<string>, description: string, builder?: BuilderCallback<T, U>, handler?: (args: Arguments<U>) => void, middlewares?: MiddlewareFunction[], deprecated?: boolean | string, ): Argv<U>; command<O extends { [key: string]: Options }>( command: string | ReadonlyArray<string>, description: string, builder?: O, handler?: (args: Arguments<InferredOptionTypes<O>>) => void, middlewares?: MiddlewareFunction[], deprecated?: boolean | string, ): Argv<T>; command<U>(command: string | ReadonlyArray<string>, description: string, module: CommandModule<T, U>): Argv<U>; command<U = T>( command: string | ReadonlyArray<string>, showInHelp: false, builder?: BuilderCallback<T, U>, handler?: (args: Arguments<U>) => void, middlewares?: MiddlewareFunction[], deprecated?: boolean | string, ): Argv<T>; command<O extends { [key: string]: Options }>( command: string | ReadonlyArray<string>, showInHelp: false, builder?: O, handler?: (args: Arguments<InferredOptionTypes<O>>) => void, ): Argv<T>; command<U>(command: string | ReadonlyArray<string>, showInHelp: false, module: CommandModule<T, U>): Argv<U>; command<U>(module: CommandModule<T, U>): Argv<U>; // Advanced API /** Apply command modules from a directory relative to the module calling this method. */ commandDir(dir: string, opts?: RequireDirectoryOptions): Argv<T>; /** * Enable bash/zsh-completion shortcuts for commands and options. * * If invoked without parameters, `.completion()` will make completion the command to output the completion script. * * @param [cmd] When present in `argv._`, will result in the `.bashrc` or `.zshrc` completion script being outputted. * To enable bash/zsh completions, concat the generated script to your `.bashrc` or `.bash_profile` (or `.zshrc` for zsh). * @param [description] Provide a description in your usage instructions for the command that generates the completion scripts. * @param [func] Rather than relying on yargs' default completion functionality, which shiver me timbers is pretty awesome, you can provide your own completion method. */ completion(): Argv<T>; completion(cmd: string, func?: AsyncCompletionFunction): Argv<T>; completion(cmd: string, func?: SyncCompletionFunction): Argv<T>; completion(cmd: string, func?: PromiseCompletionFunction): Argv<T>; completion(cmd: string, description?: string | false, func?: AsyncCompletionFunction): Argv<T>; completion(cmd: string, description?: string | false, func?: SyncCompletionFunction): Argv<T>; completion(cmd: string, description?: string | false, func?: PromiseCompletionFunction): Argv<T>; /** * Tells the parser that if the option specified by `key` is passed in, it should be interpreted as a path to a JSON config file. * The file is loaded and parsed, and its properties are set as arguments. * Because the file is loaded using Node's require(), the filename MUST end in `.json` to be interpreted correctly. * * If invoked without parameters, `.config()` will make --config the option to pass the JSON config file. * * @param [description] Provided to customize the config (`key`) option in the usage string. * @param [explicitConfigurationObject] An explicit configuration `object` */ config(): Argv<T>; config(key: string | ReadonlyArray<string>, description?: string, parseFn?: (configPath: string) => object): Argv<T>; config(key: string | ReadonlyArray<string>, parseFn: (configPath: string) => object): Argv<T>; config(explicitConfigurationObject: object): Argv<T>; /** * Given the key `x` is set, the key `y` must not be set. `y` can either be a single string or an array of argument names that `x` conflicts with. * * Optionally `.conflicts()` can accept an object specifying multiple conflicting keys. */ conflicts(key: string, value: string | ReadonlyArray<string>): Argv<T>; conflicts(conflicts: { [key: string]: string | ReadonlyArray<string> }): Argv<T>; /** * Interpret `key` as a boolean flag, but set its parsed value to the number of flag occurrences rather than `true` or `false`. Default value is thus `0`. */ count<K extends keyof T>(key: K | ReadonlyArray<K>): Argv<Omit<T, K> & { [key in K]: number }>; count<K extends string>(key: K | ReadonlyArray<K>): Argv<T & { [key in K]: number }>; /** * Set `argv[key]` to `value` if no option was specified in `process.argv`. * * Optionally `.default()` can take an object that maps keys to default values. * * The default value can be a `function` which returns a value. The name of the function will be used in the usage string. * * Optionally, `description` can also be provided and will take precedence over displaying the value in the usage instructions. */ default<K extends keyof T, V>(key: K, value: V, description?: string): Argv<Omit<T, K> & { [key in K]: V }>; default<K extends string, V>(key: K, value: V, description?: string): Argv<T & { [key in K]: V }>; default<D extends { [key: string]: any }>(defaults: D, description?: string): Argv<Omit<T, keyof D> & D>; /** * @deprecated since version 6.6.0 * Use '.demandCommand()' or '.demandOption()' instead */ demand<K extends keyof T>(key: K | ReadonlyArray<K>, msg?: string | true): Argv<Defined<T, K>>; demand<K extends string>(key: K | ReadonlyArray<K>, msg?: string | true): Argv<T & { [key in K]: unknown }>; demand(key: string | ReadonlyArray<string>, required?: boolean): Argv<T>; demand(positionals: number, msg: string): Argv<T>; demand(positionals: number, required?: boolean): Argv<T>; demand(positionals: number, max: number, msg?: string): Argv<T>; /** * @param key If is a string, show the usage information and exit if key wasn't specified in `process.argv`. * If is an array, demand each element. * @param msg If string is given, it will be printed when the argument is missing, instead of the standard error message. * @param demand Controls whether the option is demanded; this is useful when using .options() to specify command line parameters. */ demandOption<K extends keyof T>(key: K | ReadonlyArray<K>, msg?: string | true): Argv<Defined<T, K>>; demandOption<K extends string>(key: K | ReadonlyArray<K>, msg?: string | true): Argv<T & { [key in K]: unknown }>; demandOption(key: string | ReadonlyArray<string>, demand?: boolean): Argv<T>; /** * Demand in context of commands. * You can demand a minimum and a maximum number a user can have within your program, as well as provide corresponding error messages if either of the demands is not met. */ demandCommand(): Argv<T>; demandCommand(min: number, minMsg?: string): Argv<T>; demandCommand(min: number, max?: number, minMsg?: string, maxMsg?: string): Argv<T>; /** * Shows a [deprecated] notice in front of the option */ deprecateOption(option: string, msg?: string): Argv<T>; /** * Describe a `key` for the generated usage information. * * Optionally `.describe()` can take an object that maps keys to descriptions. */ describe(key: string | ReadonlyArray<string>, description: string): Argv<T>; describe(descriptions: { [key: string]: string }): Argv<T>; /** Should yargs attempt to detect the os' locale? Defaults to `true`. */ detectLocale(detect: boolean): Argv<T>; /** * Tell yargs to parse environment variables matching the given prefix and apply them to argv as though they were command line arguments. * * Use the "__" separator in the environment variable to indicate nested options. (e.g. prefix_nested__foo => nested.foo) * * If this method is called with no argument or with an empty string or with true, then all env vars will be applied to argv. * * Program arguments are defined in this order of precedence: * 1. Command line args * 2. Env vars * 3. Config file/objects * 4. Configured defaults * * Env var parsing is disabled by default, but you can also explicitly disable it by calling `.env(false)`, e.g. if you need to undo previous configuration. */ env(): Argv<T>; env(prefix: string): Argv<T>; env(enable: boolean): Argv<T>; /** A message to print at the end of the usage instructions */ epilog(msg: string): Argv<T>; /** A message to print at the end of the usage instructions */ epilogue(msg: string): Argv<T>; /** * Give some example invocations of your program. * Inside `cmd`, the string `$0` will get interpolated to the current script name or node command for the present script similar to how `$0` works in bash or perl. * Examples will be printed out as part of the help message. */ example(command: string, description: string): Argv<T>; example(command: ReadonlyArray<[string, string?]>): Argv<T>; /** Manually indicate that the program should exit, and provide context about why we wanted to exit. Follows the behavior set by `.exitProcess().` */ exit(code: number, err: Error): void; /** * By default, yargs exits the process when the user passes a help flag, the user uses the `.version` functionality, validation fails, or the command handler fails. * Calling `.exitProcess(false)` disables this behavior, enabling further actions after yargs have been validated. */ exitProcess(enabled: boolean): Argv<T>; /** * Method to execute when a failure occurs, rather than printing the failure message. * @param func Is called with the failure message that would have been printed, the Error instance originally thrown and yargs state when the failure occurred. */ fail(func: ((msg: string, err: Error, yargs: Argv<T>) => any) | boolean): Argv<T>; /** * Allows to programmatically get completion choices for any line. * @param args An array of the words in the command line to complete. * @param done The callback to be called with the resulting completions. */ getCompletion(args: ReadonlyArray<string>, done: (completions: ReadonlyArray<string>) => void): Argv<T>; /** * Indicate that an option (or group of options) should not be reset when a command is executed * * Options default to being global. */ global(key: string | ReadonlyArray<string>): Argv<T>; /** Given a key, or an array of keys, places options under an alternative heading when displaying usage instructions */ group(key: string | ReadonlyArray<string>, groupName: string): Argv<T>; /** Hides a key from the generated usage information. Unless a `--show-hidden` option is also passed with `--help` (see `showHidden()`). */ hide(key: string): Argv<T>; /** * Configure an (e.g. `--help`) and implicit command that displays the usage string and exits the process. * By default yargs enables help on the `--help` option. * * Note that any multi-char aliases (e.g. `help`) used for the help option will also be used for the implicit command. * If there are no multi-char aliases (e.g. `h`), then all single-char aliases will be used for the command. * * If invoked without parameters, `.help()` will use `--help` as the option and help as the implicit command to trigger help output. * * @param [description] Customizes the description of the help option in the usage string. * @param [enableExplicit] If `false` is provided, it will disable --help. */ help(): Argv<T>; help(enableExplicit: boolean): Argv<T>; help(option: string, enableExplicit: boolean): Argv<T>; help(option: string, description?: string, enableExplicit?: boolean): Argv<T>; /** * Given the key `x` is set, it is required that the key `y` is set. * y` can either be the name of an argument to imply, a number indicating the position of an argument or an array of multiple implications to associate with `x`. * * Optionally `.implies()` can accept an object specifying multiple implications. */ implies(key: string, value: string | ReadonlyArray<string>): Argv<T>; implies(implies: { [key: string]: string | ReadonlyArray<string> }): Argv<T>; /** * Return the locale that yargs is currently using. * * By default, yargs will auto-detect the operating system's locale so that yargs-generated help content will display in the user's language. */ locale(): string; /** * Override the auto-detected locale from the user's operating system with a static locale. * Note that the OS locale can be modified by setting/exporting the `LC_ALL` environment variable. */ locale(loc: string): Argv<T>; /** * Define global middleware functions to be called first, in list order, for all cli command. * @param callbacks Can be a function or a list of functions. Each callback gets passed a reference to argv. * @param [applyBeforeValidation] Set to `true` to apply middleware before validation. This will execute the middleware prior to validation checks, but after parsing. */ middleware(callbacks: MiddlewareFunction<T> | ReadonlyArray<MiddlewareFunction<T>>, applyBeforeValidation?: boolean): Argv<T>; /** * The number of arguments that should be consumed after a key. This can be a useful hint to prevent parsing ambiguity. * * Optionally `.nargs()` can take an object of `key`/`narg` pairs. */ nargs(key: string, count: number): Argv<T>; nargs(nargs: { [key: string]: number }): Argv<T>; /** The key provided represents a path and should have `path.normalize()` applied. */ normalize<K extends keyof T>(key: K | ReadonlyArray<K>): Argv<Omit<T, K> & { [key in K]: ToString<T[key]> }>; normalize<K extends string>(key: K | ReadonlyArray<K>): Argv<T & { [key in K]: string | undefined }>; /** * Tell the parser to always interpret key as a number. * * If `key` is an array, all elements will be parsed as numbers. * * If the option is given on the command line without a value, `argv` will be populated with `undefined`. * * If the value given on the command line cannot be parsed as a number, `argv` will be populated with `NaN`. * * Note that decimals, hexadecimals, and scientific notation are all accepted. */ number<K extends keyof T>(key: K | ReadonlyArray<K>): Argv<Omit<T, K> & { [key in K]: ToNumber<T[key]> }>; number<K extends string>(key: K | ReadonlyArray<K>): Argv<T & { [key in K]: number | undefined }>; /** * Method to execute when a command finishes successfully. * @param func Is called with the successful result of the command that finished. */ onFinishCommand(func: (result: any) => void): Argv<T>; /** * This method can be used to make yargs aware of options that could exist. * You can also pass an opt object which can hold further customization, like `.alias()`, `.demandOption()` etc. for that option. */ option<K extends keyof T, O extends Options>(key: K, options: O): Argv<Omit<T, K> & { [key in K]: InferredOptionType<O> }>; option<K extends string, O extends Options>(key: K, options: O): Argv<T & { [key in K]: InferredOptionType<O> }>; option<O extends { [key: string]: Options }>(options: O): Argv<Omit<T, keyof O> & InferredOptionTypes<O>>; /** * This method can be used to make yargs aware of options that could exist. * You can also pass an opt object which can hold further customization, like `.alias()`, `.demandOption()` etc. for that option. */ options<K extends keyof T, O extends Options>(key: K, options: O): Argv<Omit<T, K> & { [key in K]: InferredOptionType<O> }>; options<K extends string, O extends Options>(key: K, options: O): Argv<T & { [key in K]: InferredOptionType<O> }>; options<O extends { [key: string]: Options }>(options: O): Argv<Omit<T, keyof O> & InferredOptionTypes<O>>; /** * Parse `args` instead of `process.argv`. Returns the `argv` object. `args` may either be a pre-processed argv array, or a raw argument string. * * Note: Providing a callback to parse() disables the `exitProcess` setting until after the callback is invoked. * @param [context] Provides a useful mechanism for passing state information to commands */ parse(): { [key in keyof Arguments<T>]: Arguments<T>[key] } | Promise<{ [key in keyof Arguments<T>]: Arguments<T>[key] }>; parse(arg: string | ReadonlyArray<string>, context?: object, parseCallback?: ParseCallback<T>): { [key in keyof Arguments<T>]: Arguments<T>[key] } | Promise<{ [key in keyof Arguments<T>]: Arguments<T>[key] }>; parseSync(): { [key in keyof Arguments<T>]: Arguments<T>[key] }; parseSync(arg: string | ReadonlyArray<string>, context?: object, parseCallback?: ParseCallback<T>): { [key in keyof Arguments<T>]: Arguments<T>[key] }; parseAsync(): Promise<{ [key in keyof Arguments<T>]: Arguments<T>[key] }>; parseAsync(arg: string | ReadonlyArray<string>, context?: object, parseCallback?: ParseCallback<T>): Promise<{ [key in keyof Arguments<T>]: Arguments<T>[key] }>; /** * If the arguments have not been parsed, this property is `false`. * * If the arguments have been parsed, this contain detailed parsed arguments. */ parsed: DetailedArguments | false; /** Allows to configure advanced yargs features. */ parserConfiguration(configuration: Partial<ParserConfigurationOptions>): Argv<T>; /** * Similar to `config()`, indicates that yargs should interpret the object from the specified key in package.json as a configuration object. * @param [cwd] If provided, the package.json will be read from this location */ pkgConf(key: string | ReadonlyArray<string>, cwd?: string): Argv<T>; /** * Allows you to configure a command's positional arguments with an API similar to `.option()`. * `.positional()` should be called in a command's builder function, and is not available on the top-level yargs instance. If so, it will throw an error. */ positional<K extends keyof T, O extends PositionalOptions>(key: K, opt: O): Argv<Omit<T, K> & { [key in K]: InferredOptionType<O> }>; positional<K extends string, O extends PositionalOptions>(key: K, opt: O): Argv<T & { [key in K]: InferredOptionType<O> }>; /** Should yargs provide suggestions regarding similar commands if no matching command is found? */ recommendCommands(): Argv<T>; /** * @deprecated since version 6.6.0 * Use '.demandCommand()' or '.demandOption()' instead */ require<K extends keyof T>(key: K | ReadonlyArray<K>, msg?: string | true): Argv<Defined<T, K>>; require(key: string, msg: string): Argv<T>; require(key: string, required: boolean): Argv<T>; require(keys: ReadonlyArray<number>, msg: string): Argv<T>; require(keys: ReadonlyArray<number>, required: boolean): Argv<T>; require(positionals: number, required: boolean): Argv<T>; require(positionals: number, msg: string): Argv<T>; /** * @deprecated since version 6.6.0 * Use '.demandCommand()' or '.demandOption()' instead */ required<K extends keyof T>(key: K | ReadonlyArray<K>, msg?: string | true): Argv<Defined<T, K>>; required(key: string, msg: string): Argv<T>; required(key: string, required: boolean): Argv<T>; required(keys: ReadonlyArray<number>, msg: string): Argv<T>; required(keys: ReadonlyArray<number>, required: boolean): Argv<T>; required(positionals: number, required: boolean): Argv<T>; required(positionals: number, msg: string): Argv<T>; requiresArg(key: string | ReadonlyArray<string>): Argv<T>; /** Set the name of your script ($0). Default is the base filename executed by node (`process.argv[1]`) */ scriptName($0: string): Argv<T>; /** * Generate a bash completion script. * Users of your application can install this script in their `.bashrc`, and yargs will provide completion shortcuts for commands and options. */ showCompletionScript(): Argv<T>; /** * Configure the `--show-hidden` option that displays the hidden keys (see `hide()`). * @param option If `boolean`, it enables/disables this option altogether. i.e. hidden keys will be permanently hidden if first argument is `false`. * If `string` it changes the key name ("--show-hidden"). * @param description Changes the default description ("Show hidden options") */ showHidden(option?: string | boolean): Argv<T>; showHidden(option: string, description?: string): Argv<T>; /** * Print the usage data using the console function consoleLevel for printing. * @param [consoleLevel='error'] */ showHelp(consoleLevel?: string): Argv<T>; /** * Provide the usage data as a string. * @param printCallback a function with a single argument. */ showHelp(printCallback: (s: string) => void): Argv<T>; /** * By default, yargs outputs a usage string if any error is detected. * Use the `.showHelpOnFail()` method to customize this behavior. * @param enable If `false`, the usage string is not output. * @param [message] Message that is output after the error message. */ showHelpOnFail(enable: boolean, message?: string): Argv<T>; /** Specifies either a single option key (string), or an array of options. If any of the options is present, yargs validation is skipped. */ skipValidation(key: string | ReadonlyArray<string>): Argv<T>; /** * Any command-line argument given that is not demanded, or does not have a corresponding description, will be reported as an error. * * Unrecognized commands will also be reported as errors. */ strict(): Argv<T>; strict(enabled: boolean): Argv<T>; /** * Similar to .strict(), except that it only applies to unrecognized commands. * A user can still provide arbitrary options, but unknown positional commands * will raise an error. */ strictCommands(): Argv<T>; strictCommands(enabled: boolean): Argv<T>; /** * Similar to `.strict()`, except that it only applies to unrecognized options. A * user can still provide arbitrary positional options, but unknown options * will raise an error. */ strictOptions(): Argv<T>; strictOptions(enabled: boolean): Argv<T>; /** * Tell the parser logic not to interpret `key` as a number or boolean. This can be useful if you need to preserve leading zeros in an input. * * If `key` is an array, interpret all the elements as strings. * * `.string('_')` will result in non-hyphenated arguments being interpreted as strings, regardless of whether they resemble numbers. */ string<K extends keyof T>(key: K | ReadonlyArray<K>): Argv<Omit<T, K> & { [key in K]: ToString<T[key]> }>; string<K extends string>(key: K | ReadonlyArray<K>): Argv<T & { [key in K]: string | undefined }>; // Intended to be used with '.wrap()' terminalWidth(): number; updateLocale(obj: { [key: string]: string }): Argv<T>; /** * Override the default strings used by yargs with the key/value pairs provided in obj * * If you explicitly specify a locale(), you should do so before calling `updateStrings()`. */ updateStrings(obj: { [key: string]: string }): Argv<T>; /** * Set a usage message to show which commands to use. * Inside `message`, the string `$0` will get interpolated to the current script name or node command for the present script similar to how `$0` works in bash or perl. * * If the optional `description`/`builder`/`handler` are provided, `.usage()` acts an an alias for `.command()`. * This allows you to use `.usage()` to configure the default command that will be run as an entry-point to your application * and allows you to provide configuration for the positional arguments accepted by your program: */ usage(message: string): Argv<T>; usage<U>(command: string | ReadonlyArray<string>, description: string, builder?: (args: Argv<T>) => Argv<U>, handler?: (args: Arguments<U>) => void): Argv<T>; usage<U>(command: string | ReadonlyArray<string>, showInHelp: boolean, builder?: (args: Argv<T>) => Argv<U>, handler?: (args: Arguments<U>) => void): Argv<T>; usage<O extends { [key: string]: Options }>(command: string | ReadonlyArray<string>, description: string, builder?: O, handler?: (args: Arguments<InferredOptionTypes<O>>) => void): Argv<T>; usage<O extends { [key: string]: Options }>(command: string | ReadonlyArray<string>, showInHelp: boolean, builder?: O, handler?: (args: Arguments<InferredOptionTypes<O>>) => void): Argv<T>; /** * Add an option (e.g. `--version`) that displays the version number (given by the version parameter) and exits the process. * By default yargs enables version for the `--version` option. * * If no arguments are passed to version (`.version()`), yargs will parse the package.json of your module and use its version value. * * If the boolean argument `false` is provided, it will disable `--version`. */ version(): Argv<T>; version(version: string): Argv<T>; version(enable: boolean): Argv<T>; version(optionKey: string, version: string): Argv<T>; version(optionKey: string, description: string, version: string): Argv<T>; /** * Format usage output to wrap at columns many columns. * * By default wrap will be set to `Math.min(80, windowWidth)`. Use `.wrap(null)` to specify no column limit (no right-align). * Use `.wrap(yargs.terminalWidth())` to maximize the width of yargs' usage instructions. */ wrap(columns: number | null): Argv<T>; } type Arguments<T = {}> = T & { /** Non-option arguments */ _: Array<string | number>; /** The script name or node command */ $0: string; /** All remaining options */ [argName: string]: unknown; }; interface RequireDirectoryOptions { /** Look for command modules in all subdirectories and apply them as a flattened (non-hierarchical) list. */ recurse?: boolean; /** The types of files to look for when requiring command modules. */ extensions?: ReadonlyArray<string>; /** * A synchronous function called for each command module encountered. * Accepts `commandObject`, `pathToFile`, and `filename` as arguments. * Returns `commandObject` to include the command; any falsy value to exclude/skip it. */ visit?: (commandObject: any, pathToFile?: string, filename?: string) => any; /** Whitelist certain modules */ include?: RegExp | ((pathToFile: string) => boolean); /** Blacklist certain modules. */ exclude?: RegExp | ((pathToFile: string) => boolean); } interface Options { /** string or array of strings, alias(es) for the canonical option key, see `alias()` */ alias?: string | ReadonlyArray<string>; /** boolean, interpret option as an array, see `array()` */ array?: boolean; /** boolean, interpret option as a boolean flag, see `boolean()` */ boolean?: boolean; /** value or array of values, limit valid option arguments to a predefined set, see `choices()` */ choices?: Choices; /** function, coerce or transform parsed command line values into another value, see `coerce()` */ coerce?: (arg: any) => any; /** boolean, interpret option as a path to a JSON config file, see `config()` */ config?: boolean; /** function, provide a custom config parsing function, see `config()` */ configParser?: (configPath: string) => object; /** string or object, require certain keys not to be set, see `conflicts()` */ conflicts?: string | ReadonlyArray<string> | { [key: string]: string | ReadonlyArray<string> }; /** boolean, interpret option as a count of boolean flags, see `count()` */ count?: boolean; /** value, set a default value for the option, see `default()` */ default?: any; /** string, use this description for the default value in help content, see `default()` */ defaultDescription?: string; /** * @deprecated since version 6.6.0 * Use 'demandOption' instead */ demand?: boolean | string; /** boolean or string, mark the argument as deprecated, see `deprecateOption()` */ deprecate?: boolean | string; /** boolean or string, mark the argument as deprecated, see `deprecateOption()` */ deprecated?: boolean | string; /** boolean or string, demand the option be given, with optional error message, see `demandOption()` */ demandOption?: boolean | string; /** string, the option description for help content, see `describe()` */ desc?: string; /** string, the option description for help content, see `describe()` */ describe?: string; /** string, the option description for help content, see `describe()` */ description?: string; /** boolean, indicate that this key should not be reset when a command is invoked, see `global()` */ global?: boolean; /** string, when displaying usage instructions place the option under an alternative group heading, see `group()` */ group?: string; /** don't display option in help output. */ hidden?: boolean; /** string or object, require certain keys to be set, see `implies()` */ implies?: string | ReadonlyArray<string> | { [key: string]: string | ReadonlyArray<string> }; /** number, specify how many arguments should be consumed for the option, see `nargs()` */ nargs?: number; /** boolean, apply path.normalize() to the option, see `normalize()` */ normalize?: boolean; /** boolean, interpret option as a number, `number()` */ number?: boolean; /** * @deprecated since version 6.6.0 * Use 'demandOption' instead */ require?: boolean | string; /** * @deprecated since version 6.6.0 * Use 'demandOption' instead */ required?: boolean | string; /** boolean, require the option be specified with a value, see `requiresArg()` */ requiresArg?: boolean; /** boolean, skips validation if the option is present, see `skipValidation()` */ skipValidation?: boolean; /** boolean, interpret option as a string, see `string()` */ string?: boolean; type?: "array" | "count" | PositionalOptionsType; } interface PositionalOptions { /** string or array of strings, see `alias()` */ alias?: string | ReadonlyArray<string>; /** boolean, interpret option as an array, see `array()` */ array?: boolean; /** value or array of values, limit valid option arguments to a predefined set, see `choices()` */ choices?: Choices; /** function, coerce or transform parsed command line values into another value, see `coerce()` */ coerce?: (arg: any) => any; /** string or object, require certain keys not to be set, see `conflicts()` */ conflicts?: string | ReadonlyArray<string> | { [key: string]: string | ReadonlyArray<string> }; /** value, set a default value for the option, see `default()` */ default?: any; /** boolean or string, demand the option be given, with optional error message, see `demandOption()` */ demandOption?: boolean | string; /** string, the option description for help content, see `describe()` */ desc?: string; /** string, the option description for help content, see `describe()` */ describe?: string; /** string, the option description for help content, see `describe()` */ description?: string; /** string or object, require certain keys to be set, see `implies()` */ implies?: string | ReadonlyArray<string> | { [key: string]: string | ReadonlyArray<string> }; /** boolean, apply path.normalize() to the option, see normalize() */ normalize?: boolean; type?: PositionalOptionsType; } /** Remove keys K in T */ type Omit<T, K> = { [key in Exclude<keyof T, K>]: T[key] }; /** Remove undefined as a possible value for keys K in T */ type Defined<T, K extends keyof T> = Omit<T, K> & { [key in K]: Exclude<T[key], undefined> }; /** Convert T to T[] and T | undefined to T[] | undefined */ type ToArray<T> = Array<Exclude<T, undefined>> | Extract<T, undefined>; /** Gives string[] if T is an array type, otherwise string. Preserves | undefined. */ type ToString<T> = (Exclude<T, undefined> extends any[] ? string[] : string) | Extract<T, undefined>; /** Gives number[] if T is an array type, otherwise number. Preserves | undefined. */ type ToNumber<T> = (Exclude<T, undefined> extends any[] ? number[] : number) | Extract<T, undefined>; type InferredOptionType<O extends Options | PositionalOptions> = O extends ( | { required: string | true } | { require: string | true } | { demand: string | true } | { demandOption: string | true } ) ? Exclude<InferredOptionTypeInner<O>, undefined> : InferredOptionTypeInner<O>; type InferredOptionTypeInner<O extends Options | PositionalOptions> = O extends { default: any, coerce: (arg: any) => infer T } ? T : O extends { default: infer D } ? D : O extends { type: "count" } ? number : O extends { count: true } ? number : RequiredOptionType<O> | undefined; type RequiredOptionType<O extends Options | PositionalOptions> = O extends { type: "array", string: true } ? string[] : O extends { type: "array", number: true } ? number[] : O extends { type: "array", normalize: true } ? string[] : O extends { type: "string", array: true } ? string[] : O extends { type: "number", array: true } ? number[] : O extends { string: true, array: true } ? string[] : O extends { number: true, array: true } ? number[] : O extends { normalize: true, array: true } ? string[] : O extends { type: "array" } ? Array<string | number> : O extends { type: "boolean" } ? boolean : O extends { type: "number" } ? number : O extends { type: "string" } ? string : O extends { array: true } ? Array<string | number> : O extends { boolean: true } ? boolean : O extends { number: true } ? number : O extends { string: true } ? string : O extends { normalize: true } ? string : O extends { choices: ReadonlyArray<infer C> } ? C : O extends { coerce: (arg: any) => infer T } ? T : unknown; type InferredOptionTypes<O extends { [key: string]: Options }> = { [key in keyof O]: InferredOptionType<O[key]> }; interface CommandModule<T = {}, U = {}> { /** array of strings (or a single string) representing aliases of `exports.command`, positional args defined in an alias are ignored */ aliases?: ReadonlyArray<string> | string; /** object declaring the options the command accepts, or a function accepting and returning a yargs instance */ builder?: CommandBuilder<T, U>; /** string (or array of strings) that executes this command when given on the command line, first string may contain positional args */ command?: ReadonlyArray<string> | string; /** boolean (or string) to show deprecation notice */ deprecated?: boolean | string; /** string used as the description for the command in help text, use `false` for a hidden command */ describe?: string | false; /** a function which will be passed the parsed argv. */ handler: (args: Arguments<U>) => void; } type ParseCallback<T = {}> = (err: Error | undefined, argv: Arguments<T>|Promise<Arguments<T>>, output: string) => void; type CommandBuilder<T = {}, U = {}> = { [key: string]: Options } | ((args: Argv<T>) => Argv<U>) | ((args: Argv<T>) => PromiseLike<Argv<U>>); type SyncCompletionFunction = (current: string, argv: any) => string[]; type AsyncCompletionFunction = (current: string, argv: any, done: (completion: ReadonlyArray<string>) => void) => void; type PromiseCompletionFunction = (current: string, argv: any) => Promise<string[]>; type MiddlewareFunction<T = {}> = (args: Arguments<T>) => void; type Choices = ReadonlyArray<string | number | true | undefined>; type PositionalOptionsType = "boolean" | "number" | "string"; } declare var yargs: yargs.Argv; export = yargs;
georgemarshall/DefinitelyTyped
types/yargs/index.d.ts
TypeScript
mit
48,896
using Mono.Cecil; namespace Cake.Web.Docs.Reflection.Model { /// <summary> /// Represents reflected method information. /// </summary> public interface IMethodInfo { /// <summary> /// Gets the method identity. /// </summary> /// <value>The method identity.</value> string Identity { get; } /// <summary> /// Gets the method definition. /// </summary> /// <value> /// The method definition. /// </value> MethodDefinition Definition { get; } /// <summary> /// The associated metadata. /// </summary> IDocumentationMetadata Metadata { get; } } }
naasking/website
src/Cake.Web.Docs/Reflection/Model/IMethodInfo.cs
C#
mit
701