repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
xorgy/phoronix-test-suite | pts-core/objects/phodevi/parsers/phodevi_bsd_parser.php | 2403 | <?php
/*
Phoronix Test Suite
URLs: http://www.phoronix.com, http://www.phoronix-test-suite.com/
Copyright (C) 2009 - 2013, Phoronix Media
Copyright (C) 2009 - 2013, Michael Larabel
phodevi_bsd_parser.php: General parsing functions specific to BSD
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
class phodevi_bsd_parser
{
public static function read_sysctl($desc)
{
// Read sysctl, used by *BSDs
$info = false;
if(pts_client::executable_in_path('sysctl'))
{
$desc = pts_arrays::to_array($desc);
for($i = 0; $i < count($desc) && empty($info); $i++)
{
$output = shell_exec('sysctl ' . $desc[$i] . ' 2>&1');
if((($point = strpos($output, ':')) > 0 || ($point = strpos($output, '=')) > 0) && strpos($output, 'unknown oid') === false && strpos($output, 'is invalid') === false && strpos($output, 'not available') === false)
{
$info = trim(substr($output, $point + 1));
}
}
}
return $info;
}
public static function read_kenv($v)
{
$ret = null;
if(pts_client::executable_in_path('kenv'))
{
$kenv = shell_exec('kenv 2> /dev/null');
$v = PHP_EOL . $v . '=';
if(($x = strpos($kenv, $v)) !== false)
{
$ret = substr($kenv, ($x + strlen($v)));
$ret = substr($ret, 0, strpos($ret, PHP_EOL));
if($ret[0] == '"' && $ret[(strlen($ret) - 1)] == '"')
{
$ret = substr($ret, 1, -1);
}
}
}
return $ret != 'empty' ? $ret : null;
}
public static function read_acpiconf($desc)
{
$info = false;
if(pts_client::executable_in_path('acpiconf 2> /dev/null'))
{
$output = shell_exec('acpiconf -i0');
if(($point = strpos($output, $desc . ':')) !== false)
{
$info = substr($output, $point + strlen($desc) + 1);
$info = substr($info, 0, strpos($info, "\n"));
$info = trim($info);
}
}
return $info;
}
}
?>
| gpl-3.0 |
zarelit/nodeshot | nodeshot/community/notifications/tasks.py | 1088 | from celery import task
from django.core import management
from .settings import TEXTS
@task
def purge_notifications():
"""
deletes old notifications
"""
management.call_command('purge_notifications')
# ------ Asynchronous tasks ------ #
@task
def create_notifications(users, notification_model, notification_type, related_object):
"""
create notifications in a background job to avoid slowing down users
"""
# shortcuts for readability
Notification = notification_model
# text
additional = related_object.__dict__ if related_object else ''
notification_text = TEXTS[notification_type] % additional
# loop users, notification settings check is done in Notification model
for user in users:
n = Notification(
to_user=user,
type=notification_type,
text=notification_text
)
# attach related object if present
if related_object:
n.related_object = related_object
# create notification and send according to user settings
n.save()
| gpl-3.0 |
IvenBach/Rubberduck | Rubberduck.VBEditor.VBA/SafeComWrappers/VB/Control.cs | 1211 | using Rubberduck.VBEditor.SafeComWrappers.Abstract;
using VB = Microsoft.Vbe.Interop;
// ReSharper disable once CheckNamespace - Special dispensation due to conflicting file vs namespace priorities
namespace Rubberduck.VBEditor.SafeComWrappers.VBA
{
public class Control : SafeComWrapper<VB.Forms.Control>, IControl
{
public Control(VB.Forms.Control target, bool rewrapping = false)
: base(target, rewrapping)
{
}
public string Name
{
get => IsWrappingNullReference ? string.Empty : Target.Name;
set { if (!IsWrappingNullReference) Target.Name = value; }
}
public override bool Equals(ISafeComWrapper<VB.Forms.Control> other)
{
return IsEqualIfNull(other) || (other != null && ReferenceEquals(other.Target, Target));
}
public bool Equals(IControl other)
{
return Equals(other as SafeComWrapper<VB.Forms.Control>);
}
public override int GetHashCode()
{
return IsWrappingNullReference ? 0 : Target.GetHashCode();
}
protected override void Dispose(bool disposing) => base.Dispose(disposing);
}
} | gpl-3.0 |
mateor/pdroid | android-2.3.4_r1/tags/1.26/frameworks/base/media/libstagefright/codecs/aacdec/tns_ar_filter.cpp | 14127 | /* ------------------------------------------------------------------
* Copyright (C) 1998-2009 PacketVideo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
* -------------------------------------------------------------------
*/
/*
Pathname: tns_ar_filter.c
------------------------------------------------------------------------------
REVISION HISTORY
Description: Modified from original shareware code
Description: Implemented 24-bit fixed point version
Optimized C code
Description:
- Added OVERFLOW_SHIFT_DOWN to avoid overflow.
- Increased precision by using the Q format of the LPC coefficient.
- Modified interface to add LPC Q format and scratch memory
for the state variables.
- Added pv_memset to clear state filter
- Updated format for comments (to PV standard)
- Updated copyright notice
Description:
- Changed multiplication scheme to increase precision. This
works better than older version.
Description:
- Include log2(order) as a scaling down parameter.
Description:
Modified to reflect code review comments
- misspelled words, extra comments and explicit requirements
Description:
deleted comment about fix Q format (L 107)
Description: Implemented a more efficient version, which eliminated the use
of "scratch memory" via introducing a pointer that references the actual
output.
Description: Removed the parameter "scratch_Int32_buffer" as this space
in memory is no longer needed by this function.
Description: Removed references to "scratch_Int32_buffer" in the Inputs
section.
Description:
Modified casting to ensure proper operations for different platforms
Description:
Per code review comment:
Eliminated casting to UInt and Int in b_low and b_high, they are
redundant and may add unncessary extra cycles in some platforms
Description: Updated the SW template to include the full pathname to the
source file and a slightly modified copyright header.
Description: Changed the order of the unsigned * signed multiply so the
casting to Int32 is performed on the unsigned operand.
Description:
Modified 32 by 16 bit multiplications to avoid unnecessary moves to
registers. Also split the code (based on flag direction) to simplify
pointer's updates
Description:
------------------------------------------------------------------------------
INPUT AND OUTPUT DEFINITIONS
Inputs:
spec = spectral input to be shaped by the filter.
Fixed point format
Int32[]
length = spec_length
spec_length = length of spec array.
const Int
direction = direction for application of tns filter.
+1 filters spectrum from low to high frequencies
(first input to filter is spec[0])
-1 filters spectrum from high to low frequencies
(first input to filter is spec[spec_length-1])
const Int
lpc = array of lpc coefficients, minus lpc[0] which is assumed to be "1"
Fixed point format
const Int[]
length = TNS_MAX_ORDER
Q_lpc = Q format for the lpc coeffcients (for max. precision, it assumes
that all 16 bits are used)
const Int
order = order of the TNS filter (Range of 1 - TNS_MAX_ORDER)
Int
Local Stores/Buffers/Pointers Needed:
None
Global Stores/Buffers/Pointers Needed:
None
Outputs:
None
Pointers and Buffers Modified:
spec = contains spectral data after application of TNS filter
Int32 array
length = spec_length
Local Stores Modified:
Global Stores Modified:
------------------------------------------------------------------------------
FUNCTION DESCRIPTION
A block of spectral data (Int32 spec[]) of length (const Int spec_length)
is processed by a simple all-pole filter defined by
LPC coefficients passed via (const Int lpc[])
TNS filter equation
y(n) = x(n) - lpc(2)*y(n-1) - ... - lpc(order+1)*y(n-order)
The filter calculation is performed in place, i.e. the output is passed
back to the calling function via (Int32 spec[])
The filter's order is defined by the variable (const Int order)
The direction of the filter's application is defined by (const Int inc)
------------------------------------------------------------------------------
REQUIREMENTS
This function should match the functionality of the ISO code.
The implementation does support filter orders bigger or equal to 1.
The size of the spectral coeffcients has to be bigger or equal than 1.
------------------------------------------------------------------------------
REFERENCES
(1) ISO/IEC 14496-3:1999(E)
Part 3
Subpart 4.6.8 (Temporal Noise Shaping)
(2) MPEG-2 NBC Audio Decoder
"This software module was originally developed by AT&T, Dolby
Laboratories, Fraunhofer Gesellschaft IIS in the course of development
of the MPEG-2 NBC/MPEG-4 Audio standard ISO/IEC 13818-7, 14496-1,2 and
3. This software module is an implementation of a part of one or more
MPEG-2 NBC/MPEG-4 Audio tools as specified by the MPEG-2 NBC/MPEG-4
Audio standard. ISO/IEC gives users of the MPEG-2 NBC/MPEG-4 Audio
standards free license to this software module or modifications thereof
for use in hardware or software products claiming conformance to the
MPEG-2 NBC/MPEG-4 Audio standards. Those intending to use this software
module in hardware or software products are advised that this use may
infringe existing patents. The original developer of this software
module and his/her company, the subsequent editors and their companies,
and ISO/IEC have no liability for use of this software module or
modifications thereof in an implementation. Copyright is not released
for non MPEG-2 NBC/MPEG-4 Audio conforming products.The original
developer retains full right to use the code for his/her own purpose,
assign or donate the code to a third party and to inhibit third party
from using the code for non MPEG-2 NBC/MPEG-4 Audio conforming products.
This copyright notice must be included in all copies or derivative
works."
Copyright(c)1996.
------------------------------------------------------------------------------
PSEUDO-CODE
FOR (i=0; i<order; i++)
state[i] = 0;
ENDFOR
IF (inc == -1)
THEN
spec = spec + spec_length - 1;
ENDIF
FOR (i=0; i<spec_length; i++)
y = *spec;
FOR (j=0; j<order; j++)
y -= lpc[j] * state[j];
ENDFOR
FOR (j=order-1; j>0; j--)
state[j] = state[j-1];
ENDFOR
state[0] = y;
*spec = y;
spec = spec + inc;
ENDFOR
------------------------------------------------------------------------------
RESOURCES USED
When the code is written for a specific target processor
the resources used should be documented below.
STACK USAGE: [stack count for this module] + [variable to represent
stack usage for each subroutine called]
where: [stack usage variable] = stack usage for [subroutine
name] (see [filename].ext)
DATA MEMORY USED: x words
PROGRAM MEMORY USED: x words
CLOCK CYCLES: [cycle count equation for this module] + [variable
used to represent cycle count for each subroutine
called]
where: [cycle count variable] = cycle count for [subroutine
name] (see [filename].ext)
------------------------------------------------------------------------------
*/
/*----------------------------------------------------------------------------
; INCLUDES
----------------------------------------------------------------------------*/
#include "pv_audio_type_defs.h"
#include "e_tns_const.h"
#include "tns_ar_filter.h"
#include "fxp_mul32.h"
/*----------------------------------------------------------------------------
; MACROS
; Define module specific macros here
----------------------------------------------------------------------------*/
/*----------------------------------------------------------------------------
; DEFINES
; Include all pre-processor statements here. Include conditional
; compile variables also.
----------------------------------------------------------------------------*/
#define MASK_LOW16 0xFFFF
#define UPPER16 16
/*----------------------------------------------------------------------------
; LOCAL FUNCTION DEFINITIONS
; Function Prototype declaration
----------------------------------------------------------------------------*/
/*----------------------------------------------------------------------------
; LOCAL STORE/BUFFER/POINTER DEFINITIONS
; Variable declaration - defined here and used outside this module
----------------------------------------------------------------------------*/
/*----------------------------------------------------------------------------
; EXTERNAL FUNCTION REFERENCES
; Declare functions defined elsewhere and referenced in this module
----------------------------------------------------------------------------*/
/*----------------------------------------------------------------------------
; EXTERNAL GLOBAL STORE/BUFFER/POINTER REFERENCES
; Declare variables used in this module but defined elsewhere
----------------------------------------------------------------------------*/
/*----------------------------------------------------------------------------
; FUNCTION CODE
----------------------------------------------------------------------------*/
Int tns_ar_filter(
Int32 spec[],
const Int spec_length,
const Int direction,
const Int32 lpc[],
const Int Q_lpc,
const Int order)
{
Int i;
Int j;
/*
* Multiplication related variables
*/
Int32 temp;
/*
* Filter related variables
*/
Int32 y0;
/*
* Circular buffer to hold the filter's state
* (y[n-1],y[n-2],y[n-3],etc.)
*
* p_state and p_lpc should take advantage
* of any special circular buffer instructions
* if this code is hand-optimized in assembly.
*/
Int32 *p_state = NULL;
const Int32 *p_lpc;
Int shift_up;
Int shift_down_amount;
/*
* Pointer to the I/O memory space
*/
Int32 *p_spec = spec;
i = 0;
j = order;
/*
* get the power of 2 that is bigger than the order
* i is the bit counter and j is modified until exceed
* the power of 2 corresponding to TNS_MAX_ORDER
*/
while (j < 0x010)
{
j <<= 1;
i++;
}
/*
* 5 is the number of bits needed to represent 0x010
* TNS_MAX_ORDER = 20, power of 2 that include 20 is 5
*/
shift_down_amount = 4 - i;
shift_up = UPPER16 - Q_lpc;
/*
* shift_down_amount == power of 2 that is bigger than the order - 1
*/
shift_down_amount += shift_up;
if (direction == -1)
{
p_spec += spec_length - 1;
for (i = order; i != 0; i--)
{
y0 = *p_spec >> shift_down_amount;
p_lpc = lpc;
/* 32 by 32 bit multiplication */
for (j = order; j > i; j--)
{
temp = *p_state++;
y0 -= fxp_mul32_Q31(temp, *(p_lpc++)) << shift_up;
}
/*
* Record the output in-place
*/
p_state = p_spec;
*(p_spec--) = y0;
}
if (spec_length > order)
{
for (i = (spec_length - order); i != 0; i--)
{
y0 = *p_spec >> shift_down_amount;
p_lpc = &(lpc[0]);
/* 32 by 32 bit multiplication */
for (j = order; j != 0; j--)
{
temp = *p_state++;
y0 -= fxp_mul32_Q31(temp, *(p_lpc++)) << shift_up;
}
/*
* Record the output in-place
*/
p_state = p_spec;
*(p_spec--) = y0;
} /* END for (i = (spec_length - order); i>0; i--) */
}
}
else
{
for (i = order; i != 0; i--)
{
p_lpc = lpc;
y0 = 0;
/* 32 by 32 bit multiplication */
for (j = order; j > i; j--)
{
y0 -= fxp_mul32_Q31(*p_state--, *(p_lpc++));
}
p_state = p_spec;
/*
* Record the output in-place
*/
*(p_spec) = (*p_spec >> shift_down_amount) + (y0 << shift_up);
p_spec++;
}
if (spec_length > order)
{
for (i = (spec_length - order); i != 0; i--)
{
p_lpc = lpc;
y0 = 0;
/* 32 by 32 bit multiplication */
for (j = order; j != 0; j--)
{
y0 -= fxp_mul32_Q31(*p_state--, *(p_lpc++));
}
p_state = p_spec;
/*
* Record the output in-place
*/
*(p_spec) = (*p_spec >> shift_down_amount) + (y0 << shift_up);
p_spec++;
} /* END for (i = (spec_length - order); i>0; i--) */
}
}
return(shift_down_amount);
} /* tns_ar_filter */
| gpl-3.0 |
opedroso/czmq | bindings/jni/czmq-jni/src/test/java/org/zeromq/czmq/ZuuidTest.java | 547 | /*
################################################################################
# THIS FILE IS 100% GENERATED BY ZPROJECT; DO NOT EDIT EXCEPT EXPERIMENTALLY #
# Read the zproject/README.md for information about making permanent changes. #
################################################################################
*/
package org.zeromq.czmq;
import org.junit.Assert;
import org.junit.Test;
import org.scijava.nativelib.NativeLoader;
public class ZuuidTest {
@Test
public void test () {
Zuuid.test (false);
}
}
| mpl-2.0 |
alesstimec/juju | environs/manual/winrmprovisioner/provisioner_test.go | 2642 | package winrmprovisioner_test
import (
"bytes"
"fmt"
"io"
jc "github.com/juju/testing/checkers"
gc "gopkg.in/check.v1"
"github.com/juju/juju/apiserver/params"
"github.com/juju/juju/environs/manual"
"github.com/juju/juju/environs/manual/winrmprovisioner"
)
type TestClientAPI struct{}
func (t TestClientAPI) AddMachines(p []params.AddMachineParams) ([]params.AddMachinesResult, error) {
return make([]params.AddMachinesResult, 1, 1), nil
}
func (t TestClientAPI) ForceDestroyMachines(machines ...string) error {
if machines == nil {
return fmt.Errorf("epty machines")
}
return nil
}
func (t TestClientAPI) ProvisioningScript(param params.ProvisioningScriptParams) (script string, err error) {
return "magnifi script", nil
}
type provisionerSuite struct {
client *TestClientAPI
}
var _ = gc.Suite(&provisionerSuite{})
func (s *provisionerSuite) getArgs(c *gc.C) manual.ProvisionMachineArgs {
s.client = &TestClientAPI{}
return manual.ProvisionMachineArgs{
Host: winrmListenerAddr,
User: "Administrator",
Client: s.client,
}
}
func (s *provisionerSuite) TestProvisionMachine(c *gc.C) {
var err error
args := s.getArgs(c)
var stdin, stderr, stdout bytes.Buffer
args.Stdin, args.Stdout, args.Stderr = &stdin, &stderr, &stdout
args.WinRM = manual.WinRMArgs{}
args.WinRM.Client = &fakeWinRM{
fakePing: func() error {
return nil
},
fakeRun: func(cmd string, stdout, stderr io.Writer) error {
return nil
},
}
// this should return this error
// No hardware fields on runing the powershell deteciton script
machineId, err := winrmprovisioner.ProvisionMachine(args)
c.Assert(err, gc.NotNil)
c.Assert(machineId, jc.DeepEquals, "")
args.WinRM.Client = &fakeWinRM{
fakePing: func() error {
return nil
},
fakeRun: func(cmd string, stdout, stderr io.Writer) error {
c.Assert((len(cmd) > 0), gc.Equals, true)
fmt.Fprintf(stdout, "amd64\r\n")
fmt.Fprintf(stdout, "16\r\n")
fmt.Fprintf(stdout, "win2012r2\r\n")
fmt.Fprintf(stdout, "4\r\n")
return nil
},
}
machineId, err = winrmprovisioner.ProvisionMachine(args)
c.Assert(err, gc.IsNil)
c.Assert(machineId, jc.DeepEquals, "")
// this should return that the machine is already provisioned
args.WinRM.Client = &fakeWinRM{
fakePing: func() error {
return nil
},
fakeRun: func(cmd string, stdout, stderr io.Writer) error {
c.Assert((len(cmd) > 0), gc.Equals, true)
fmt.Fprintf(stdout, "Yes\r\n")
return nil
},
}
machineId, err = winrmprovisioner.ProvisionMachine(args)
c.Assert(err.Error(), jc.DeepEquals, "machine is already provisioned")
c.Assert(machineId, jc.DeepEquals, "")
}
| agpl-3.0 |
acbodine/koding | go/src/koding/remoteapi/client/j_compute_stack/j_compute_stack_some_responses.go | 2825 | package j_compute_stack
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"fmt"
"io"
"github.com/go-openapi/runtime"
strfmt "github.com/go-openapi/strfmt"
"koding/remoteapi/models"
)
// JComputeStackSomeReader is a Reader for the JComputeStackSome structure.
type JComputeStackSomeReader struct {
formats strfmt.Registry
}
// ReadResponse reads a server response into the received o.
func (o *JComputeStackSomeReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
switch response.Code() {
case 200:
result := NewJComputeStackSomeOK()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return result, nil
case 401:
result := NewJComputeStackSomeUnauthorized()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
default:
return nil, runtime.NewAPIError("unknown error", response, response.Code())
}
}
// NewJComputeStackSomeOK creates a JComputeStackSomeOK with default headers values
func NewJComputeStackSomeOK() *JComputeStackSomeOK {
return &JComputeStackSomeOK{}
}
/*JComputeStackSomeOK handles this case with default header values.
Request processed successfully
*/
type JComputeStackSomeOK struct {
Payload *models.DefaultResponse
}
func (o *JComputeStackSomeOK) Error() string {
return fmt.Sprintf("[POST /remote.api/JComputeStack.some][%d] jComputeStackSomeOK %+v", 200, o.Payload)
}
func (o *JComputeStackSomeOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.DefaultResponse)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewJComputeStackSomeUnauthorized creates a JComputeStackSomeUnauthorized with default headers values
func NewJComputeStackSomeUnauthorized() *JComputeStackSomeUnauthorized {
return &JComputeStackSomeUnauthorized{}
}
/*JComputeStackSomeUnauthorized handles this case with default header values.
Unauthorized request
*/
type JComputeStackSomeUnauthorized struct {
Payload *models.UnauthorizedRequest
}
func (o *JComputeStackSomeUnauthorized) Error() string {
return fmt.Sprintf("[POST /remote.api/JComputeStack.some][%d] jComputeStackSomeUnauthorized %+v", 401, o.Payload)
}
func (o *JComputeStackSomeUnauthorized) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.UnauthorizedRequest)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
| agpl-3.0 |
hackathon-oscs/rede-osc | lib/noosfero/constants.rb | 148 | module Noosfero::Constants
EMAIL_FORMAT = /\A([^@\s]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\Z/i
INTEGER_FORMAT = /\A\d*\Z/i
PROFILE_PER_PAGE = 10
end
| agpl-3.0 |
UniversityOfHawaii/kfs | kfs-kc/src/main/java/org/kuali/kra/external/sponsor/SponsorWebService.java | 1615 | /*
* The Kuali Financial System, a comprehensive financial management system for higher education.
*
* Copyright 2005-2014 The Kuali Foundation
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.kuali.kra.external.sponsor;
import java.util.List;
import javax.jws.WebParam;
import javax.jws.WebService;
import javax.jws.soap.SOAPBinding;
import org.kuali.kfs.module.external.kc.KcConstants;
import org.kuali.kfs.module.external.kc.dto.SponsorCriteriaDto;
import org.kuali.kfs.module.external.kc.dto.SponsorDTO;
@WebService(name = "SponsorWebService", targetNamespace = KcConstants.KC_NAMESPACE_URI)
@SOAPBinding(style = SOAPBinding.Style.DOCUMENT, use = SOAPBinding.Use.LITERAL, parameterStyle = SOAPBinding.ParameterStyle.WRAPPED)
public interface SponsorWebService {
public SponsorDTO getSponsor(@WebParam(name= "sponsorCode") String sponsorCode);
public List<SponsorDTO> getMatchingSponsors(@WebParam(name= "searchCriteria") SponsorCriteriaDto searchCriteria);
}
| agpl-3.0 |
UniversityOfHawaii/kfs | kfs-ar/src/main/java/org/kuali/kfs/module/ar/document/service/PredeterminedBillingScheduleMaintenanceService.java | 1353 | /*
* The Kuali Financial System, a comprehensive financial management system for higher education.
*
* Copyright 2005-2014 The Kuali Foundation
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.kuali.kfs.module.ar.document.service;
/**
* Services to support predetermined billing schedule maintenance
*/
public interface PredeterminedBillingScheduleMaintenanceService {
/**
* Has the Bill been copied to an Invoice Bill on an invoice doc?
*
* @param proposalNumber proposal number to check
* @param billId billId to check
* @return true if the Bill has been copied, false if otherwise
*/
public boolean hasBillBeenCopiedToInvoice(Long proposalNumber, String billId);
}
| agpl-3.0 |
closeio/nylas | migrations/versions/096_migrate_secret_data.py | 2459 | """migrate_secret_data
Revision ID: 38c29430efeb
Revises: 1683790906cf
Create Date: 2014-09-18 03:03:42.131932
"""
# revision identifiers, used by Alembic.
revision = '38c29430efeb'
down_revision = '1683790906cf'
import sqlalchemy as sa
def upgrade():
from inbox.config import config
import nacl.secret
import nacl.utils
from inbox.ignition import main_engine
from inbox.models.session import session_scope
engine = main_engine(pool_size=1, max_overflow=0)
Base = sa.ext.declarative.declarative_base()
Base.metadata.reflect(engine)
class Secret(Base):
__table__ = Base.metadata.tables['secret']
class GenericAccount(Base):
__table__ = Base.metadata.tables['genericaccount']
with session_scope(versioned=False) as \
db_session:
secrets = db_session.query(Secret).filter(
Secret.secret.isnot(None)).all()
# Join on the genericaccount and optionally easaccount tables to
# determine which secrets should have type 'password'.
generic_query = db_session.query(Secret.id).join(
GenericAccount).filter(Secret.id == GenericAccount.password_id)
password_secrets = [id_ for id_, in generic_query]
if engine.has_table('easaccount'):
class EASAccount(Base):
__table__ = Base.metadata.tables['easaccount']
eas_query = db_session.query(Secret.id).join(
EASAccount).filter(Secret.id == EASAccount.password_id)
password_secrets.extend([id_ for id_, in eas_query])
for s in secrets:
plain = s.secret.encode('utf-8') if isinstance(s.secret, unicode) \
else s.secret
if config.get_required('ENCRYPT_SECRETS'):
s._secret = nacl.secret.SecretBox(
key=config.get_required('SECRET_ENCRYPTION_KEY'),
encoder=nacl.encoding.HexEncoder
).encrypt(
plaintext=plain,
nonce=nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE))
# 1 is EncryptionScheme.SECRETBOX_WITH_STATIC_KEY
s.encryption_scheme = 1
else:
s._secret = plain
if s.id in password_secrets:
s.type = 'password'
else:
s.type = 'token'
db_session.add(s)
db_session.commit()
def downgrade():
pass
| agpl-3.0 |
UniversityOfHawaii/kfs | kfs-core/src/main/java/org/kuali/kfs/fp/document/validation/event/CheckEvent.java | 1195 | /*
* The Kuali Financial System, a comprehensive financial management system for higher education.
*
* Copyright 2005-2014 The Kuali Foundation
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.kuali.kfs.fp.document.validation.event;
import org.kuali.kfs.fp.businessobject.Check;
import org.kuali.rice.krad.rules.rule.event.KualiDocumentEvent;
/**
* Defines methods common to all CheckEvents.
*/
public interface CheckEvent extends KualiDocumentEvent {
/**
* @return check associated with this event
*/
public Check getCheck();
}
| agpl-3.0 |
mhelmetag/exercism.io | app/routes/notifications.rb | 433 | module ExercismWeb
module Routes
class Notifications < Core
get '/notifications' do
please_login
inbox = ExercismWeb::Presenters::Inbox.new(current_user)
erb :"notifications/index", locals: { inbox: inbox }
end
post '/notifications/read' do
please_login
current_user.notifications.update_all(read: true)
redirect '/notifications'
end
end
end
end
| agpl-3.0 |
Totallicks/totallicks-tuxguitar | TuxGuitar/src/org/herac/tuxguitar/gui/undo/undoables/track/UndoableTrackInfo.java | 2695 | package org.herac.tuxguitar.gui.undo.undoables.track;
import org.herac.tuxguitar.gui.TuxGuitar;
import org.herac.tuxguitar.gui.undo.CannotRedoException;
import org.herac.tuxguitar.gui.undo.CannotUndoException;
import org.herac.tuxguitar.gui.undo.UndoableEdit;
import org.herac.tuxguitar.gui.undo.undoables.UndoableCaretHelper;
import org.herac.tuxguitar.song.managers.TGSongManager;
import org.herac.tuxguitar.song.models.TGColor;
import org.herac.tuxguitar.song.models.TGTrack;
public class UndoableTrackInfo implements UndoableEdit{
private int doAction;
private int trackNumber;
private UndoableCaretHelper undoCaret;
private UndoableCaretHelper redoCaret;
private String undoName;
private String redoName;
private TGColor undoColor;
private TGColor redoColor;
private int undoOffset;
private int redoOffset;
private UndoableTrackInfo(){
super();
}
public void redo() throws CannotRedoException {
if(!canRedo()){
throw new CannotRedoException();
}
TGSongManager manager = TuxGuitar.instance().getSongManager();
manager.getTrackManager().changeInfo(manager.getTrack(this.trackNumber),this.redoName,this.redoColor.clone(manager.getFactory()),this.redoOffset);
TuxGuitar.instance().fireUpdate();
TuxGuitar.instance().getMixer().update();
this.redoCaret.update();
this.doAction = UNDO_ACTION;
}
public void undo() throws CannotUndoException {
if(!canUndo()){
throw new CannotUndoException();
}
TGSongManager manager = TuxGuitar.instance().getSongManager();
manager.getTrackManager().changeInfo(manager.getTrack(this.trackNumber),this.undoName,this.undoColor.clone(manager.getFactory()),this.undoOffset);
TuxGuitar.instance().fireUpdate();
TuxGuitar.instance().getMixer().update();
this.undoCaret.update();
this.doAction = REDO_ACTION;
}
public boolean canRedo() {
return (this.doAction == REDO_ACTION);
}
public boolean canUndo() {
return (this.doAction == UNDO_ACTION);
}
public static UndoableTrackInfo startUndo(TGTrack track){
UndoableTrackInfo undoable = new UndoableTrackInfo();
undoable.doAction = UNDO_ACTION;
undoable.trackNumber = track.getNumber();
undoable.undoCaret = new UndoableCaretHelper();
undoable.undoName = track.getName();
undoable.undoColor = track.getColor().clone(TuxGuitar.instance().getSongManager().getFactory());
undoable.undoOffset = track.getOffset();
return undoable;
}
public UndoableTrackInfo endUndo(TGTrack track){
this.redoCaret = new UndoableCaretHelper();
this.redoName = track.getName();
this.redoColor = track.getColor().clone(TuxGuitar.instance().getSongManager().getFactory());
this.redoOffset = track.getOffset();
return this;
}
}
| lgpl-2.1 |
whdc/ieo-beast | src/dr/util/TabularData.java | 650 | package dr.util;
/**
* Tabular data provider
*
* A very modest start. will evolve further according to needs.
*
* @author Joseph Heled
*/
public abstract class TabularData {
public abstract int nColumns();
public abstract String columnName(int nColumn);
public abstract int nRows();
public abstract Object data(int nRow, int nColumn);
/**
* Get column by name
* @param name
* @return
*/
public int getColumn(String name) {
for(int n = 0; n < nColumns(); ++n) {
if( columnName(n).equals(name) ) {
return n;
}
}
return -1;
}
}
| lgpl-2.1 |
sanguinariojoe/FreeCAD | src/Base/Persistence.cpp | 4558 | /***************************************************************************
* Copyright (c) 2011 Jürgen Riegel <juergen.riegel@web.de> *
* *
* This file is part of the FreeCAD CAx development system. *
* *
* This library is free software; you can redistribute it and/or *
* modify it under the terms of the GNU Library General Public *
* License as published by the Free Software Foundation; either *
* version 2 of the License, or (at your option) any later version. *
* *
* This library is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU Library General Public License for more details. *
* *
* You should have received a copy of the GNU Library General Public *
* License along with this library; see the file COPYING.LIB. If not, *
* write to the Free Software Foundation, Inc., 59 Temple Place, *
* Suite 330, Boston, MA 02111-1307, USA *
* *
***************************************************************************/
#include "PreCompiled.h"
#include "Writer.h"
#include "Reader.h"
#include "PyObjectBase.h"
#ifndef _PreComp_
#endif
/// Here the FreeCAD includes sorted by Base,App,Gui......
#include "Persistence.h"
using namespace Base;
TYPESYSTEM_SOURCE_ABSTRACT(Base::Persistence,Base::BaseClass)
//**************************************************************************
// Construction/Destruction
//**************************************************************************
// separator for other implementation aspects
unsigned int Persistence::getMemSize (void) const
{
// you have to implement this method in all descending classes!
assert(0);
return 0;
}
void Persistence::Save (Writer &/*writer*/) const
{
// you have to implement this method in all descending classes!
assert(0);
}
void Persistence::Restore(XMLReader &/*reader*/)
{
// you have to implement this method in all descending classes!
assert(0);
}
void Persistence::SaveDocFile (Writer &/*writer*/) const
{
}
void Persistence::RestoreDocFile(Reader &/*reader*/)
{
}
std::string Persistence::encodeAttribute(const std::string& str)
{
std::string tmp;
for (std::string::const_iterator it = str.begin(); it != str.end(); ++it) {
if (*it == '<')
tmp += "<";
else if (*it == '\"')
tmp += """;
else if (*it == '\'')
tmp += "'";
else if (*it == '&')
tmp += "&";
else if (*it == '>')
tmp += ">";
else if (*it == '\r')
tmp += " ";
else if (*it == '\n')
tmp += " ";
else if (*it == '\t')
tmp += "	";
else
tmp += *it;
}
return tmp;
}
void Persistence::dumpToStream(std::ostream& stream, int compression)
{
//we need to close the zipstream to get a good result, the only way to do this is to delete the ZipWriter.
//Hence the scope...
{
//create the writer
Base::ZipWriter writer(stream);
writer.setLevel(compression);
writer.putNextEntry("Persistence.xml");
writer.setMode("BinaryBrep");
//save the content (we need to encapsulte it with xml tags to be able to read single element xmls like happen for properties)
writer.Stream() << "<Content>" << std::endl;
Save(writer);
writer.Stream() << "</Content>";
writer.writeFiles();
}
}
void Persistence::restoreFromStream(std::istream& stream)
{
zipios::ZipInputStream zipstream(stream);
Base::XMLReader reader("", zipstream);
if (!reader.isValid())
throw Base::ValueError("Unable to construct reader");
reader.readElement("Content");
Restore(reader);
reader.readFiles(zipstream);
restoreFinished();
}
| lgpl-2.1 |
LLNL/spack | var/spack/repos/builtin/packages/r-irdisplay/package.py | 905 | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RIrdisplay(RPackage):
"""An interface to the rich display capabilities of Jupyter front-ends
(e.g. 'Jupyter Notebook') Designed to be used from a running IRkernel
session"""
homepage = "https://irkernel.github.io"
url = "https://cloud.r-project.org/src/contrib/IRdisplay_0.4.4.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/IRdisplay"
version('0.7.0', sha256='91eac9acdb92ed0fdc58e5da284aa4bb957ada5eef504fd89bec136747999089')
version('0.4.4', sha256='e83a0bc52800618bf9a3ac5ef3d432512e00f392b7216fd515fca319377584a6')
depends_on('r@3.0.1:', type=('build', 'run'))
depends_on('r-repr', type=('build', 'run'))
| lgpl-2.1 |
arildb/tiki-azure | lib/graph-engine/gd.php | 7415 | <?php
// (c) Copyright 2002-2013 by authors of the Tiki Wiki CMS Groupware Project
//
// All Rights Reserved. See copyright.txt for details and a complete list of authors.
// Licensed under the GNU LESSER GENERAL PUBLIC LICENSE. See license.txt for details.
// $Id: gd.php 44444 2013-01-05 21:24:24Z changi67 $
/* This library is LGPL
* written by Louis-Philippe Huberdeau
*
* vim: fdm=marker tabstop=4 shiftwidth=4 noet:
*
* This file contains the GD graphic renderer.
*/
require_once('lib/graph-engine/core.php');
class GD_GRenderer extends GRenderer // {{{1
{
var $gd;
var $styles;
var $colors;
var $fonts;
var $format;
var $width;
var $height;
var $imageMap;
function GD_GRenderer( $width = 0, $height = 0, $format = 'png' ) // {{{2
{
// Null size does not create a graphic.
$this->styles = array();
$this->colors = array();
$this->fonts = array();
if ( $width !== 0 && $height !== 0 ) {
$this->gd = imagecreate($width, $height);
$this->_getColor('white');
}
$this->format = $format;
$this->width = $width;
$this->height = $height;
}
function addLink( $target, $left, $top, $right, $bottom, $title = null ) // {{{2
{
$this->_convertPosition($left, $top);
$this->_convertPosition($right, $bottom);
$target = htmlspecialchars($target);
$title = htmlspecialchars($title);
$this->imageMap .= "<area shape=\"rect\" coords=\"$left,$top,$right,$bottom\" href=\"$target\" alt=\"$title\" title=\"$title\"/>\n";
}
function drawLine( $x1, $y1, $x2, $y2, $style ) // {{{2
{
$this->_convertPosition($x1, $y1);
$this->_convertPosition($x2, $y2);
imagesetthickness($this->gd, $style['line-width']);
imageline($this->gd, $x1, $y1, $x2, $y2, $style['line']);
}
function drawRectangle( $left, $top, $right, $bottom, $style ) // {{{2
{
if ( $top > $bottom ) {
// Filled rect has a problem when coordinates are inverted.
$a = $top;
$top = $bottom;
$bottom = $a;
}
if ( $left > $right ) {
// Filled rect has a problem when coordinates are inverted.
$a = $left;
$left = $right;
$right = $a;
}
$this->_convertPosition($left, $top);
$this->_convertPosition($right, $bottom);
if ( isset($style['fill']) )
imagefilledrectangle($this->gd, $left, $top, $right, $bottom, $style['fill']);
imagesetthickness($this->gd, $style['line-width']);
imagerectangle($this->gd, $left, $top, $right, $bottom, $style['line']);
}
function drawPie( $centerX, $centerY, $radius, $begin, $end, $style ) // {{{2
{
$radius = $radius * 2;
if ( $begin != 0 || $end != 360 ) {
$tmp = -$begin;
$begin = -$end;
$end = $tmp;
}
$this->_convertPosition($centerX, $centerY);
$radius = $radius * min($this->width, $this->height);
imagefilledarc($this->gd, $centerX, $centerY, $radius, $radius, $begin, $end, $style['fill'], IMG_ARC_PIE);
imagesetthickness($this->gd, $style['line-width']);
imagefilledarc($this->gd, $centerX, $centerY, $radius, $radius, $begin, $end, $style['line'], IMG_ARC_NOFILL | IMG_ARC_EDGED);
}
function drawText( $text, $left, $right, $height, $style ) // {{{2
{
$h = $height; // Creating duplicate (temp)
$this->_convertPosition($left, $height);
$this->_convertPosition($right, $h);
switch( $style['align'] ) {
case 'left':
$this->_drawLeftText($text, $left, $height, $style);
break;
case 'center':
$this->_drawCenterText($text, $left, $right, $height, $style);
break;
case 'right':
$this->_drawRightText($text, $right, $height, $style);
break;
}
}
function getTextWidth( $text, $style ) // {{{2
{
return imagefontwidth($style['font']) * strlen($text) / $this->width;
}
function getTextHeight( $style ) // {{{2
{
return imagefontheight($style['font']) / $this->height;
}
function getStyle( $name ) // {{{2
{
if ( isset($this->styles[$name]) )
return $this->styles[$name];
return $this->styles[$name] = $this->_findStyle($name);
}
function httpOutput( $filename ) // {{{2
{
switch( $this->format ) {
case 'png':
header("Content-type: image/png");
imagepng($this->gd);
break;
case 'jpg':
header("Content-type: image/jpeg");
imagejpeg($this->gd);
break;
default:
echo "Unknown Format: {$this->format}\n";
}
imagedestroy($this->gd);
}
function writeToStream( $stream ) // {{{2
{
ob_start();
switch( $this->format ) {
case 'png':
imagepng($this->gd);
break;
case 'jpg':
imagejpeg($this->gd);
break;
default:
echo "Unknown Format: {$this->format}\n";
}
fwrite($stream, ob_get_contents());
ob_end_clean();
imagedestroy($this->gd);
}
function getMapContent() // {{{2
{
return $this->imageMap;
}
function _convertLength( $value, $type ) // {{{2
{
// $type is either 'width' or 'height'
// $value is a 0-1 float
return floor($value * $this->$type);
}
function _convertPosition( &$x, &$y ) // {{{2
{
// Parameters passed by ref!
$x = $this->_convertLength($x, 'width');
$y = $this->_convertLength($y, 'height');
}
function _findStyle( $name ) // {{{2
{
$parts = explode('-', $name);
$style = array();
switch( $parts[0] ) {
case 'Thin':
$style['line-width'] = 1;
array_shift($parts);
break;
case 'Bold':
$style['line-width'] = 2;
array_shift($parts);
break;
case 'Bolder':
$style['line-width'] = 3;
array_shift($parts);
break;
case 'Large':
$style['font'] = 5;
array_shift($parts);
break;
case 'Small':
$style['font'] = 2;
array_shift($parts);
break;
case 'Normal':
array_shift($parts);
default:
if ( $parts[0] == 'Text' )
$style['font'] = 4;
else
$style['line-width'] = 1;
break;
}
switch( $parts[0] ) {
case 'LineStroke':
$style['line'] = $this->_getColor($parts[1]);
break;
case 'FillStroke':
$style['fill'] = $this->_getColor($parts[1]);
$style['line'] = $this->_getColor('Black');
break;
case 'Text':
if ( !isset($parts[1]) )
$parts[1] = null;
switch( $parts[1] ) {
case 'Center':
$style['align'] = 'center';
break;
case 'Right':
$style['align'] = 'right';
break;
case 'Left':
default:
$style['align'] = 'left';
break;
}
break;
default:
return GRenderer::getStyle($name);
}
return $style;
}
function _getColor( $name ) // {{{2
{
$name = strtolower($name);
if ( isset($this->colors[$name]) )
return $this->colors[$name];
return $this->colors[$name] = $this->_findColor($name);
}
function _findColor( $name ) // {{{2
{
$color = $this->_getRawColor($name);
return imagecolorallocate($this->gd, (int)$color['r'], (int)$color['g'], (int)$color['b']);
}
function _drawLeftText( $string, $left, $height, $style ) // {{{2
{
imagestring($this->gd, $style['font'], $left, $height, $string, $this->_getColor('Black'));
}
function _drawCenterText( $string, $left, $right, $height, $style ) // {{{2
{
$width = imagefontwidth($style['font']) * strlen($string);
$x = ( $right - $left ) / 2 + $left - $width / 2;
imagestring($this->gd, $style['font'], $x, $height, $string, $this->_getColor('Black'));
}
function _drawRightText( $string, $right, $height, $style ) // {{{2
{
$width = imagefontwidth($style['font']) * strlen($string);
$x = $right - $width;
imagestring($this->gd, $style['font'], $x, $height, $string, $this->_getColor('Black'));
}
} // }}}1
| lgpl-2.1 |
hellcoderz/thebeast | src/thebeast/pml/NotInTypeException.java | 476 | package thebeast.pml;
/**
* Created by IntelliJ IDEA. User: s0349492 Date: 21-Jan-2007 Time: 20:36:02
*/
public class NotInTypeException extends RuntimeException {
private String name;
private Type type;
public NotInTypeException(String name, Type type) {
super(name + " is not a member of " + type.getName());
this.name = name;
this.type = type;
}
public String getName() {
return name;
}
public Type getType() {
return type;
}
}
| lgpl-3.0 |
takeshineshiro/python-mode | pymode/libs/pylint/checkers/strings.py | 26724 | # Copyright (c) 2009-2010 Arista Networks, Inc. - James Lingard
# Copyright (c) 2004-2013 LOGILAB S.A. (Paris, FRANCE).
# Copyright 2012 Google Inc.
#
# http://www.logilab.fr/ -- mailto:contact@logilab.fr
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Checker for string formatting operations.
"""
import sys
import tokenize
import string
import numbers
import astroid
from pylint.interfaces import ITokenChecker, IAstroidChecker, IRawChecker
from pylint.checkers import BaseChecker, BaseTokenChecker
from pylint.checkers import utils
from pylint.checkers.utils import check_messages
import six
_PY3K = sys.version_info[:2] >= (3, 0)
_PY27 = sys.version_info[:2] == (2, 7)
MSGS = {
'E1300': ("Unsupported format character %r (%#02x) at index %d",
"bad-format-character",
"Used when a unsupported format character is used in a format\
string."),
'E1301': ("Format string ends in middle of conversion specifier",
"truncated-format-string",
"Used when a format string terminates before the end of a \
conversion specifier."),
'E1302': ("Mixing named and unnamed conversion specifiers in format string",
"mixed-format-string",
"Used when a format string contains both named (e.g. '%(foo)d') \
and unnamed (e.g. '%d') conversion specifiers. This is also \
used when a named conversion specifier contains * for the \
minimum field width and/or precision."),
'E1303': ("Expected mapping for format string, not %s",
"format-needs-mapping",
"Used when a format string that uses named conversion specifiers \
is used with an argument that is not a mapping."),
'W1300': ("Format string dictionary key should be a string, not %s",
"bad-format-string-key",
"Used when a format string that uses named conversion specifiers \
is used with a dictionary whose keys are not all strings."),
'W1301': ("Unused key %r in format string dictionary",
"unused-format-string-key",
"Used when a format string that uses named conversion specifiers \
is used with a dictionary that conWtains keys not required by the \
format string."),
'E1304': ("Missing key %r in format string dictionary",
"missing-format-string-key",
"Used when a format string that uses named conversion specifiers \
is used with a dictionary that doesn't contain all the keys \
required by the format string."),
'E1305': ("Too many arguments for format string",
"too-many-format-args",
"Used when a format string that uses unnamed conversion \
specifiers is given too many arguments."),
'E1306': ("Not enough arguments for format string",
"too-few-format-args",
"Used when a format string that uses unnamed conversion \
specifiers is given too few arguments"),
'W1302': ("Invalid format string",
"bad-format-string",
"Used when a PEP 3101 format string is invalid.",
{'minversion': (2, 7)}),
'W1303': ("Missing keyword argument %r for format string",
"missing-format-argument-key",
"Used when a PEP 3101 format string that uses named fields "
"doesn't receive one or more required keywords.",
{'minversion': (2, 7)}),
'W1304': ("Unused format argument %r",
"unused-format-string-argument",
"Used when a PEP 3101 format string that uses named "
"fields is used with an argument that "
"is not required by the format string.",
{'minversion': (2, 7)}),
'W1305': ("Format string contains both automatic field numbering "
"and manual field specification",
"format-combined-specification",
"Usen when a PEP 3101 format string contains both automatic "
"field numbering (e.g. '{}') and manual field "
"specification (e.g. '{0}').",
{'minversion': (2, 7)}),
'W1306': ("Missing format attribute %r in format specifier %r",
"missing-format-attribute",
"Used when a PEP 3101 format string uses an "
"attribute specifier ({0.length}), but the argument "
"passed for formatting doesn't have that attribute.",
{'minversion': (2, 7)}),
'W1307': ("Using invalid lookup key %r in format specifier %r",
"invalid-format-index",
"Used when a PEP 3101 format string uses a lookup specifier "
"({a[1]}), but the argument passed for formatting "
"doesn't contain or doesn't have that key as an attribute.",
{'minversion': (2, 7)})
}
OTHER_NODES = (astroid.Const, astroid.List, astroid.Backquote,
astroid.Lambda, astroid.Function,
astroid.ListComp, astroid.SetComp, astroid.GenExpr)
if _PY3K:
import _string
def split_format_field_names(format_string):
return _string.formatter_field_name_split(format_string)
else:
def _field_iterator_convertor(iterator):
for is_attr, key in iterator:
if isinstance(key, numbers.Number):
yield is_attr, int(key)
else:
yield is_attr, key
def split_format_field_names(format_string):
keyname, fielditerator = format_string._formatter_field_name_split()
# it will return longs, instead of ints, which will complicate
# the output
return keyname, _field_iterator_convertor(fielditerator)
def collect_string_fields(format_string):
""" Given a format string, return an iterator
of all the valid format fields. It handles nested fields
as well.
"""
formatter = string.Formatter()
try:
parseiterator = formatter.parse(format_string)
for result in parseiterator:
if all(item is None for item in result[1:]):
# not a replacement format
continue
name = result[1]
nested = result[2]
yield name
if nested:
for field in collect_string_fields(nested):
yield field
except ValueError:
# probably the format string is invalid
# should we check the argument of the ValueError?
raise utils.IncompleteFormatString(format_string)
def parse_format_method_string(format_string):
"""
Parses a PEP 3101 format string, returning a tuple of
(keys, num_args, manual_pos_arg),
where keys is the set of mapping keys in the format string, num_args
is the number of arguments required by the format string and
manual_pos_arg is the number of arguments passed with the position.
"""
keys = []
num_args = 0
manual_pos_arg = set()
for name in collect_string_fields(format_string):
if name and str(name).isdigit():
manual_pos_arg.add(str(name))
elif name:
keyname, fielditerator = split_format_field_names(name)
if isinstance(keyname, numbers.Number):
# In Python 2 it will return long which will lead
# to different output between 2 and 3
manual_pos_arg.add(str(keyname))
keyname = int(keyname)
keys.append((keyname, list(fielditerator)))
else:
num_args += 1
return keys, num_args, len(manual_pos_arg)
def get_args(callfunc):
""" Get the arguments from the given `CallFunc` node.
Return a tuple, where the first element is the
number of positional arguments and the second element
is the keyword arguments in a dict.
"""
positional = 0
named = {}
for arg in callfunc.args:
if isinstance(arg, astroid.Keyword):
named[arg.arg] = utils.safe_infer(arg.value)
else:
positional += 1
return positional, named
def get_access_path(key, parts):
""" Given a list of format specifiers, returns
the final access path (e.g. a.b.c[0][1]).
"""
path = []
for is_attribute, specifier in parts:
if is_attribute:
path.append(".{}".format(specifier))
else:
path.append("[{!r}]".format(specifier))
return str(key) + "".join(path)
class StringFormatChecker(BaseChecker):
"""Checks string formatting operations to ensure that the format string
is valid and the arguments match the format string.
"""
__implements__ = (IAstroidChecker,)
name = 'string'
msgs = MSGS
@check_messages(*(MSGS.keys()))
def visit_binop(self, node):
if node.op != '%':
return
left = node.left
args = node.right
if not (isinstance(left, astroid.Const)
and isinstance(left.value, six.string_types)):
return
format_string = left.value
try:
required_keys, required_num_args = \
utils.parse_format_string(format_string)
except utils.UnsupportedFormatCharacter as e:
c = format_string[e.index]
self.add_message('bad-format-character',
node=node, args=(c, ord(c), e.index))
return
except utils.IncompleteFormatString:
self.add_message('truncated-format-string', node=node)
return
if required_keys and required_num_args:
# The format string uses both named and unnamed format
# specifiers.
self.add_message('mixed-format-string', node=node)
elif required_keys:
# The format string uses only named format specifiers.
# Check that the RHS of the % operator is a mapping object
# that contains precisely the set of keys required by the
# format string.
if isinstance(args, astroid.Dict):
keys = set()
unknown_keys = False
for k, _ in args.items:
if isinstance(k, astroid.Const):
key = k.value
if isinstance(key, six.string_types):
keys.add(key)
else:
self.add_message('bad-format-string-key',
node=node, args=key)
else:
# One of the keys was something other than a
# constant. Since we can't tell what it is,
# supress checks for missing keys in the
# dictionary.
unknown_keys = True
if not unknown_keys:
for key in required_keys:
if key not in keys:
self.add_message('missing-format-string-key',
node=node, args=key)
for key in keys:
if key not in required_keys:
self.add_message('unused-format-string-key',
node=node, args=key)
elif isinstance(args, OTHER_NODES + (astroid.Tuple,)):
type_name = type(args).__name__
self.add_message('format-needs-mapping',
node=node, args=type_name)
# else:
# The RHS of the format specifier is a name or
# expression. It may be a mapping object, so
# there's nothing we can check.
else:
# The format string uses only unnamed format specifiers.
# Check that the number of arguments passed to the RHS of
# the % operator matches the number required by the format
# string.
if isinstance(args, astroid.Tuple):
num_args = len(args.elts)
elif isinstance(args, OTHER_NODES + (astroid.Dict, astroid.DictComp)):
num_args = 1
else:
# The RHS of the format specifier is a name or
# expression. It could be a tuple of unknown size, so
# there's nothing we can check.
num_args = None
if num_args is not None:
if num_args > required_num_args:
self.add_message('too-many-format-args', node=node)
elif num_args < required_num_args:
self.add_message('too-few-format-args', node=node)
class StringMethodsChecker(BaseChecker):
__implements__ = (IAstroidChecker,)
name = 'string'
msgs = {
'E1310': ("Suspicious argument in %s.%s call",
"bad-str-strip-call",
"The argument to a str.{l,r,}strip call contains a"
" duplicate character, "),
}
@check_messages(*(MSGS.keys()))
def visit_callfunc(self, node):
func = utils.safe_infer(node.func)
if (isinstance(func, astroid.BoundMethod)
and isinstance(func.bound, astroid.Instance)
and func.bound.name in ('str', 'unicode', 'bytes')):
if func.name in ('strip', 'lstrip', 'rstrip') and node.args:
arg = utils.safe_infer(node.args[0])
if not isinstance(arg, astroid.Const):
return
if len(arg.value) != len(set(arg.value)):
self.add_message('bad-str-strip-call', node=node,
args=(func.bound.name, func.name))
elif func.name == 'format':
if _PY27 or _PY3K:
self._check_new_format(node, func)
def _check_new_format(self, node, func):
""" Check the new string formatting. """
# TODO: skip (for now) format nodes which don't have
# an explicit string on the left side of the format operation.
# We do this because our inference engine can't properly handle
# redefinitions of the original string.
# For more details, see issue 287.
#
# Note that there may not be any left side at all, if the format method
# has been assigned to another variable. See issue 351. For example:
#
# fmt = 'some string {}'.format
# fmt('arg')
if (isinstance(node.func, astroid.Getattr)
and not isinstance(node.func.expr, astroid.Const)):
return
try:
strnode = next(func.bound.infer())
except astroid.InferenceError:
return
if not isinstance(strnode, astroid.Const):
return
if node.starargs or node.kwargs:
# TODO: Don't complicate the logic, skip these for now.
return
try:
positional, named = get_args(node)
except astroid.InferenceError:
return
try:
fields, num_args, manual_pos = parse_format_method_string(strnode.value)
except utils.IncompleteFormatString:
self.add_message('bad-format-string', node=node)
return
named_fields = set(field[0] for field in fields
if isinstance(field[0], six.string_types))
if num_args and manual_pos:
self.add_message('format-combined-specification',
node=node)
return
check_args = False
# Consider "{[0]} {[1]}" as num_args.
num_args += sum(1 for field in named_fields
if field == '')
if named_fields:
for field in named_fields:
if field not in named and field:
self.add_message('missing-format-argument-key',
node=node,
args=(field, ))
for field in named:
if field not in named_fields:
self.add_message('unused-format-string-argument',
node=node,
args=(field, ))
# num_args can be 0 if manual_pos is not.
num_args = num_args or manual_pos
if positional or num_args:
empty = any(True for field in named_fields
if field == '')
if named or empty:
# Verify the required number of positional arguments
# only if the .format got at least one keyword argument.
# This means that the format strings accepts both
# positional and named fields and we should warn
# when one of the them is missing or is extra.
check_args = True
else:
check_args = True
if check_args:
# num_args can be 0 if manual_pos is not.
num_args = num_args or manual_pos
if positional > num_args:
self.add_message('too-many-format-args', node=node)
elif positional < num_args:
self.add_message('too-few-format-args', node=node)
self._check_new_format_specifiers(node, fields, named)
def _check_new_format_specifiers(self, node, fields, named):
"""
Check attribute and index access in the format
string ("{0.a}" and "{0[a]}").
"""
for key, specifiers in fields:
# Obtain the argument. If it can't be obtained
# or infered, skip this check.
if key == '':
# {[0]} will have an unnamed argument, defaulting
# to 0. It will not be present in `named`, so use the value
# 0 for it.
key = 0
if isinstance(key, numbers.Number):
try:
argname = utils.get_argument_from_call(node, key)
except utils.NoSuchArgumentError:
continue
else:
if key not in named:
continue
argname = named[key]
if argname in (astroid.YES, None):
continue
try:
argument = next(argname.infer())
except astroid.InferenceError:
continue
if not specifiers or argument is astroid.YES:
# No need to check this key if it doesn't
# use attribute / item access
continue
if argument.parent and isinstance(argument.parent, astroid.Arguments):
# Ignore any object coming from an argument,
# because we can't infer its value properly.
continue
previous = argument
parsed = []
for is_attribute, specifier in specifiers:
if previous is astroid.YES:
break
parsed.append((is_attribute, specifier))
if is_attribute:
try:
previous = previous.getattr(specifier)[0]
except astroid.NotFoundError:
if (hasattr(previous, 'has_dynamic_getattr') and
previous.has_dynamic_getattr()):
# Don't warn if the object has a custom __getattr__
break
path = get_access_path(key, parsed)
self.add_message('missing-format-attribute',
args=(specifier, path),
node=node)
break
else:
warn_error = False
if hasattr(previous, 'getitem'):
try:
previous = previous.getitem(specifier)
except (IndexError, TypeError):
warn_error = True
else:
try:
# Lookup __getitem__ in the current node,
# but skip further checks, because we can't
# retrieve the looked object
previous.getattr('__getitem__')
break
except astroid.NotFoundError:
warn_error = True
if warn_error:
path = get_access_path(key, parsed)
self.add_message('invalid-format-index',
args=(specifier, path),
node=node)
break
try:
previous = next(previous.infer())
except astroid.InferenceError:
# can't check further if we can't infer it
break
class StringConstantChecker(BaseTokenChecker):
"""Check string literals"""
__implements__ = (ITokenChecker, IRawChecker)
name = 'string_constant'
msgs = {
'W1401': ('Anomalous backslash in string: \'%s\'. '
'String constant might be missing an r prefix.',
'anomalous-backslash-in-string',
'Used when a backslash is in a literal string but not as an '
'escape.'),
'W1402': ('Anomalous Unicode escape in byte string: \'%s\'. '
'String constant might be missing an r or u prefix.',
'anomalous-unicode-escape-in-string',
'Used when an escape like \\u is encountered in a byte '
'string where it has no effect.'),
}
# Characters that have a special meaning after a backslash in either
# Unicode or byte strings.
ESCAPE_CHARACTERS = 'abfnrtvx\n\r\t\\\'\"01234567'
# TODO(mbp): Octal characters are quite an edge case today; people may
# prefer a separate warning where they occur. \0 should be allowed.
# Characters that have a special meaning after a backslash but only in
# Unicode strings.
UNICODE_ESCAPE_CHARACTERS = 'uUN'
def process_module(self, module):
self._unicode_literals = 'unicode_literals' in module.future_imports
def process_tokens(self, tokens):
for (tok_type, token, (start_row, _), _, _) in tokens:
if tok_type == tokenize.STRING:
# 'token' is the whole un-parsed token; we can look at the start
# of it to see whether it's a raw or unicode string etc.
self.process_string_token(token, start_row)
def process_string_token(self, token, start_row):
for i, c in enumerate(token):
if c in '\'\"':
quote_char = c
break
# pylint: disable=undefined-loop-variable
prefix = token[:i].lower() # markers like u, b, r.
after_prefix = token[i:]
if after_prefix[:3] == after_prefix[-3:] == 3 * quote_char:
string_body = after_prefix[3:-3]
else:
string_body = after_prefix[1:-1] # Chop off quotes
# No special checks on raw strings at the moment.
if 'r' not in prefix:
self.process_non_raw_string_token(prefix, string_body, start_row)
def process_non_raw_string_token(self, prefix, string_body, start_row):
"""check for bad escapes in a non-raw string.
prefix: lowercase string of eg 'ur' string prefix markers.
string_body: the un-parsed body of the string, not including the quote
marks.
start_row: integer line number in the source.
"""
# Walk through the string; if we see a backslash then escape the next
# character, and skip over it. If we see a non-escaped character,
# alert, and continue.
#
# Accept a backslash when it escapes a backslash, or a quote, or
# end-of-line, or one of the letters that introduce a special escape
# sequence <http://docs.python.org/reference/lexical_analysis.html>
#
# TODO(mbp): Maybe give a separate warning about the rarely-used
# \a \b \v \f?
#
# TODO(mbp): We could give the column of the problem character, but
# add_message doesn't seem to have a way to pass it through at present.
i = 0
while True:
i = string_body.find('\\', i)
if i == -1:
break
# There must be a next character; having a backslash at the end
# of the string would be a SyntaxError.
next_char = string_body[i+1]
match = string_body[i:i+2]
if next_char in self.UNICODE_ESCAPE_CHARACTERS:
if 'u' in prefix:
pass
elif (_PY3K or self._unicode_literals) and 'b' not in prefix:
pass # unicode by default
else:
self.add_message('anomalous-unicode-escape-in-string',
line=start_row, args=(match, ))
elif next_char not in self.ESCAPE_CHARACTERS:
self.add_message('anomalous-backslash-in-string',
line=start_row, args=(match, ))
# Whether it was a valid escape or not, backslash followed by
# another character can always be consumed whole: the second
# character can never be the start of a new backslash escape.
i += 2
def register(linter):
"""required method to auto register this checker """
linter.register_checker(StringFormatChecker(linter))
linter.register_checker(StringMethodsChecker(linter))
linter.register_checker(StringConstantChecker(linter))
| lgpl-3.0 |
andrealmeid/ToT | node_modules/big-rat/abs.js | 166 | 'use strict'
var sign = require('./sign')
var neg = require('./neg')
module.exports = abs
function abs(a) {
if(sign(a) < 0) {
return neg(a)
}
return a
}
| unlicense |
adrpar/crate | sql-parser/src/main/java/io/crate/sql/tree/AlterTable.java | 2939 | /*
* Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.sql.tree;
import com.google.common.base.MoreObjects;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import java.util.List;
public class AlterTable extends Statement {
private final Table table;
private final Optional<GenericProperties> genericProperties;
private final List<String> resetProperties;
public AlterTable(Table table, GenericProperties genericProperties) {
this.table = table;
this.genericProperties = Optional.of(genericProperties);
this.resetProperties = ImmutableList.of();
}
public AlterTable(Table table, List<String> resetProperties) {
this.table = table;
this.resetProperties = resetProperties;
this.genericProperties = Optional.absent();
}
@Override
public <R, C> R accept(AstVisitor<R, C> visitor, C context) {
return visitor.visitAlterTable(this, context);
}
public Table table() {
return table;
}
public Optional<GenericProperties> genericProperties() {
return genericProperties;
}
public List<String> resetProperties() {
return resetProperties;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("table", table)
.add("properties", genericProperties).toString();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AlterTable that = (AlterTable) o;
if (!genericProperties.equals(that.genericProperties)) return false;
if (!table.equals(that.table)) return false;
return true;
}
@Override
public int hashCode() {
int result = table.hashCode();
result = 31 * result + genericProperties.hashCode();
return result;
}
}
| apache-2.0 |
leandrocr/kops | vendor/github.com/miekg/coredns/middleware/etcd/cname_test.go | 2542 | // +build etcd
package etcd
// etcd needs to be running on http://localhost:2379
import (
"testing"
"github.com/coredns/coredns/middleware/etcd/msg"
"github.com/coredns/coredns/middleware/pkg/dnsrecorder"
"github.com/coredns/coredns/middleware/test"
"github.com/miekg/dns"
)
// Check the ordering of returned cname.
func TestCnameLookup(t *testing.T) {
etc := newEtcdMiddleware()
for _, serv := range servicesCname {
set(t, etc, serv.Key, 0, serv)
defer delete(t, etc, serv.Key)
}
for _, tc := range dnsTestCasesCname {
m := tc.Msg()
rec := dnsrecorder.New(&test.ResponseWriter{})
_, err := etc.ServeDNS(ctxt, rec, m)
if err != nil {
t.Errorf("expected no error, got %v\n", err)
return
}
resp := rec.Msg
if !test.Header(t, tc, resp) {
t.Logf("%v\n", resp)
continue
}
if !test.Section(t, tc, test.Answer, resp.Answer) {
t.Logf("%v\n", resp)
}
if !test.Section(t, tc, test.Ns, resp.Ns) {
t.Logf("%v\n", resp)
}
if !test.Section(t, tc, test.Extra, resp.Extra) {
t.Logf("%v\n", resp)
}
}
}
var servicesCname = []*msg.Service{
{Host: "cname1.region2.skydns.test", Key: "a.server1.dev.region1.skydns.test."},
{Host: "cname2.region2.skydns.test", Key: "cname1.region2.skydns.test."},
{Host: "cname3.region2.skydns.test", Key: "cname2.region2.skydns.test."},
{Host: "cname4.region2.skydns.test", Key: "cname3.region2.skydns.test."},
{Host: "cname5.region2.skydns.test", Key: "cname4.region2.skydns.test."},
{Host: "cname6.region2.skydns.test", Key: "cname5.region2.skydns.test."},
{Host: "endpoint.region2.skydns.test", Key: "cname6.region2.skydns.test."},
{Host: "10.240.0.1", Key: "endpoint.region2.skydns.test."},
}
var dnsTestCasesCname = []test.Case{
{
Qname: "a.server1.dev.region1.skydns.test.", Qtype: dns.TypeSRV,
Answer: []dns.RR{
test.SRV("a.server1.dev.region1.skydns.test. 300 IN SRV 10 100 0 cname1.region2.skydns.test."),
},
Extra: []dns.RR{
test.CNAME("cname1.region2.skydns.test. 300 IN CNAME cname2.region2.skydns.test."),
test.CNAME("cname2.region2.skydns.test. 300 IN CNAME cname3.region2.skydns.test."),
test.CNAME("cname3.region2.skydns.test. 300 IN CNAME cname4.region2.skydns.test."),
test.CNAME("cname4.region2.skydns.test. 300 IN CNAME cname5.region2.skydns.test."),
test.CNAME("cname5.region2.skydns.test. 300 IN CNAME cname6.region2.skydns.test."),
test.CNAME("cname6.region2.skydns.test. 300 IN CNAME endpoint.region2.skydns.test."),
test.A("endpoint.region2.skydns.test. 300 IN A 10.240.0.1"),
},
},
}
| apache-2.0 |
gochist/horizon | openstack_dashboard/dashboards/project/images/images/forms.py | 11395 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing images.
"""
from django.conf import settings
from django.forms import ValidationError # noqa
from django.forms.widgets import HiddenInput # noqa
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard import api
from openstack_dashboard import policy
IMAGE_BACKEND_SETTINGS = getattr(settings, 'OPENSTACK_IMAGE_BACKEND', {})
IMAGE_FORMAT_CHOICES = IMAGE_BACKEND_SETTINGS.get('image_formats', [])
class CreateImageForm(forms.SelfHandlingForm):
name = forms.CharField(max_length="255", label=_("Name"), required=True)
description = forms.CharField(widget=forms.widgets.Textarea(
attrs={'class': 'modal-body-fixed-width'}),
label=_("Description"),
required=False)
source_type = forms.ChoiceField(
label=_('Image Source'),
required=False,
choices=[('url', _('Image Location')),
('file', _('Image File'))],
widget=forms.Select(attrs={
'class': 'switchable',
'data-slug': 'source'}))
copy_from = forms.CharField(max_length="255",
label=_("Image Location"),
help_text=_("An external (HTTP) URL to load "
"the image from."),
widget=forms.TextInput(attrs={
'class': 'switched',
'data-switch-on': 'source',
'data-source-url': _('Image Location')}),
required=False)
image_file = forms.FileField(label=_("Image File"),
help_text=_("A local image to upload."),
widget=forms.FileInput(attrs={
'class': 'switched',
'data-switch-on': 'source',
'data-source-file': _('Image File')}),
required=False)
disk_format = forms.ChoiceField(label=_('Format'),
required=True,
choices=[],
widget=forms.Select(attrs={'class':
'switchable'}))
architecture = forms.CharField(max_length="255", label=_("Architecture"),
required=False)
minimum_disk = forms.IntegerField(label=_("Minimum Disk (GB)"),
help_text=_('The minimum disk size'
' required to boot the'
' image. If unspecified, this'
' value defaults to 0'
' (no minimum).'),
required=False)
minimum_ram = forms.IntegerField(label=_("Minimum Ram (MB)"),
help_text=_('The minimum memory size'
' required to boot the'
' image. If unspecified, this'
' value defaults to 0 (no'
' minimum).'),
required=False)
is_public = forms.BooleanField(label=_("Public"), required=False)
protected = forms.BooleanField(label=_("Protected"), required=False)
def __init__(self, request, *args, **kwargs):
super(CreateImageForm, self).__init__(request, *args, **kwargs)
if (not settings.HORIZON_IMAGES_ALLOW_UPLOAD or
not policy.check((("image", "upload_image"),), request)):
self._hide_file_source_type()
if not policy.check((("image", "set_image_location"),), request):
self._hide_url_source_type()
if not policy.check((("image", "publicize_image"),), request):
self._hide_is_public()
self.fields['disk_format'].choices = IMAGE_FORMAT_CHOICES
def _hide_file_source_type(self):
self.fields['image_file'].widget = HiddenInput()
source_type = self.fields['source_type']
source_type.choices = [choice for choice in source_type.choices
if choice[0] != 'file']
if len(source_type.choices) == 1:
source_type.widget = HiddenInput()
def _hide_url_source_type(self):
self.fields['copy_from'].widget = HiddenInput()
source_type = self.fields['source_type']
source_type.choices = [choice for choice in source_type.choices
if choice[0] != 'url']
if len(source_type.choices) == 1:
source_type.widget = HiddenInput()
def _hide_is_public(self):
self.fields['is_public'].widget = HiddenInput()
self.fields['is_public'].initial = False
def clean(self):
data = super(CreateImageForm, self).clean()
# The image_file key can be missing based on particular upload
# conditions. Code defensively for it here...
image_file = data.get('image_file', None)
image_url = data.get('copy_from', None)
if not image_url and not image_file:
raise ValidationError(
_("A image or external image location must be specified."))
elif image_url and image_file:
raise ValidationError(
_("Can not specify both image and external image location."))
else:
return data
def handle(self, request, data):
# Glance does not really do anything with container_format at the
# moment. It requires it is set to the same disk_format for the three
# Amazon image types, otherwise it just treats them as 'bare.' As such
# we will just set that to be that here instead of bothering the user
# with asking them for information we can already determine.
if data['disk_format'] in ('ami', 'aki', 'ari',):
container_format = data['disk_format']
else:
container_format = 'bare'
meta = {'is_public': data['is_public'],
'protected': data['protected'],
'disk_format': data['disk_format'],
'container_format': container_format,
'min_disk': (data['minimum_disk'] or 0),
'min_ram': (data['minimum_ram'] or 0),
'name': data['name'],
'properties': {}}
if data['description']:
meta['properties']['description'] = data['description']
if data['architecture']:
meta['properties']['architecture'] = data['architecture']
if (settings.HORIZON_IMAGES_ALLOW_UPLOAD and
policy.check((("image", "upload_image"),), request) and
data.get('image_file', None)):
meta['data'] = self.files['image_file']
else:
meta['copy_from'] = data['copy_from']
try:
image = api.glance.image_create(request, **meta)
messages.success(request,
_('Your image %s has been queued for creation.') %
data['name'])
return image
except Exception:
exceptions.handle(request, _('Unable to create new image.'))
class UpdateImageForm(forms.SelfHandlingForm):
image_id = forms.CharField(widget=forms.HiddenInput())
name = forms.CharField(max_length="255", label=_("Name"))
description = forms.CharField(widget=forms.widgets.Textarea(),
label=_("Description"),
required=False)
kernel = forms.CharField(max_length="36", label=_("Kernel ID"),
required=False,
widget=forms.TextInput(
attrs={'readonly': 'readonly'}
))
ramdisk = forms.CharField(max_length="36", label=_("Ramdisk ID"),
required=False,
widget=forms.TextInput(
attrs={'readonly': 'readonly'}
))
architecture = forms.CharField(label=_("Architecture"), required=False,
widget=forms.TextInput(
attrs={'readonly': 'readonly'}
))
disk_format = forms.CharField(label=_("Format"),
widget=forms.TextInput(
attrs={'readonly': 'readonly'}
))
public = forms.BooleanField(label=_("Public"), required=False)
protected = forms.BooleanField(label=_("Protected"), required=False)
def __init__(self, request, *args, **kwargs):
super(UpdateImageForm, self).__init__(request, *args, **kwargs)
if not policy.check((("image", "publicize_image"),), request):
self.fields['public'].widget = forms.CheckboxInput(
attrs={'readonly': 'readonly'})
def handle(self, request, data):
image_id = data['image_id']
error_updating = _('Unable to update image "%s".')
if data['disk_format'] in ['aki', 'ari', 'ami']:
container_format = data['disk_format']
else:
container_format = 'bare'
meta = {'is_public': data['public'],
'protected': data['protected'],
'disk_format': data['disk_format'],
'container_format': container_format,
'name': data['name'],
'properties': {'description': data['description']}}
if data['kernel']:
meta['properties']['kernel_id'] = data['kernel']
if data['ramdisk']:
meta['properties']['ramdisk_id'] = data['ramdisk']
if data['architecture']:
meta['properties']['architecture'] = data['architecture']
# Ensure we do not delete properties that have already been
# set on an image.
meta['purge_props'] = False
try:
image = api.glance.image_update(request, image_id, **meta)
messages.success(request, _('Image was successfully updated.'))
return image
except Exception:
exceptions.handle(request, error_updating % image_id)
| apache-2.0 |
jomarko/drools | drools-cdi/src/test/java/org/drools/cdi/kproject/KieProjectRuntimeModulesTest.java | 8909 | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.cdi.kproject;
import org.drools.compiler.kie.builder.impl.FileKieModule;
import org.drools.compiler.kie.builder.impl.KieContainerImpl;
import org.drools.compiler.kie.builder.impl.KieModuleKieProject;
import org.drools.compiler.kie.builder.impl.ZipKieModule;
import org.drools.core.impl.KnowledgeBaseImpl;
import org.junit.Test;
import org.kie.api.KieBase;
import org.kie.api.KieServices;
import org.kie.api.builder.ReleaseId;
import org.kie.api.builder.model.KieModuleModel;
import org.kie.api.runtime.KieContainer;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
import java.util.stream.Collectors;
import static org.drools.core.util.IoUtils.readBytesFromInputStream;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
public class KieProjectRuntimeModulesTest extends AbstractKnowledgeTest {
@Test
public void createMultpleJarAndFileResources() throws IOException,
ClassNotFoundException,
InterruptedException {
KieModuleModel kProjModel1 = createKieModule( "jar1", true );
KieModuleModel kProjModel2 = createKieModule( "jar2", true );
KieModuleModel kProjModel3 = createKieModule( "jar3", true );
KieModuleModel kProjModel4 = createKieModule( "fol4", false );
ReleaseId releaseId1 = KieServices.Factory.get().newReleaseId("jar1",
"art1",
"1.0-SNAPSHOT");
ReleaseId releaseId2 = KieServices.Factory.get().newReleaseId("jar2",
"art1",
"1.0-SNAPSHOT");
ReleaseId releaseId3 = KieServices.Factory.get().newReleaseId("jar3",
"art1",
"1.0-SNAPSHOT");
ReleaseId releaseId4 = KieServices.Factory.get().newReleaseId("fol4",
"art1",
"1.0-SNAPSHOT");
java.io.File file1 = fileManager.newFile( "jar1-1.0-SNAPSHOT.jar" );
java.io.File file2 = fileManager.newFile( "jar2-1.0-SNAPSHOT.jar" );
java.io.File file3 = fileManager.newFile( "jar3-1.0-SNAPSHOT.jar" );
java.io.File fol4 = fileManager.newFile( "fol4-1.0-SNAPSHOT" );
ZipKieModule mod1 = new ZipKieModule(releaseId1,
kProjModel1,
file1 );
ZipKieModule mod2 = new ZipKieModule(releaseId2,
kProjModel2,
file2 );
ZipKieModule mod3 = new ZipKieModule(releaseId3,
kProjModel3,
file3 );
FileKieModule mod4 = new FileKieModule(releaseId4,
kProjModel4,
fol4 );
mod1.addKieDependency( mod2 );
mod1.addKieDependency( mod3 );
mod1.addKieDependency( mod4 );
KieModuleKieProject kProject = new KieModuleKieProject(mod1);
KieContainer kContainer = new KieContainerImpl( kProject,
null );
KieBase kBase = kContainer.getKieBase( "jar1.KBase1" );
ClassLoader cl = ((KnowledgeBaseImpl) kBase).getRootClassLoader();
Class cls = cl.loadClass( "org.drools.compiler.cdi.test.KProjectTestClassjar1" );
assertNotNull( cls );
cls = cl.loadClass( "org.drools.compiler.cdi.test.KProjectTestClassjar2" );
assertNotNull( cls );
cls = cl.loadClass( "org.drools.compiler.cdi.test.KProjectTestClassjar3" );
assertNotNull( cls );
testEntry( new KProjectTestClassImpl( "jar1",
kContainer ),
"jar1" );
testEntry( new KProjectTestClassImpl( "jar2",
kContainer ),
"jar2" );
testEntry( new KProjectTestClassImpl( "jar3",
kContainer ),
"jar3" );
testEntry( new KProjectTestClassImpl( "fol4",
kContainer ),
"fol4" );
}
@Test
public void createModuleAndFindResources() throws IOException,
ClassNotFoundException,
InterruptedException {
createKieModule( "fol4", false );
ReleaseId releaseId = KieServices.Factory.get().newReleaseId("fol4", "art1", "1.0-SNAPSHOT");
KieContainer kieContainer = KieServices.Factory.get().newKieContainer(releaseId);
assertNotNull(kieContainer);
InputStream is = kieContainer.getClassLoader().getResourceAsStream("/META-INF/beans.xml");
assertNotNull(is);
byte[] bytesFromStream = readBytesFromInputStream(is);
Enumeration<URL> foundResources = kieContainer.getClassLoader().getResources("/META-INF/beans.xml");
assertNotNull(foundResources);
List<URL> resourcesAsList = Collections.list(foundResources);
assertNotNull(resourcesAsList);
assertEquals(1, resourcesAsList.size());
URL resourceUrl = resourcesAsList.get(0);
byte[] bytesFromURL = readBytesFromInputStream(resourceUrl.openStream());
assertTrue(Arrays.equals(bytesFromStream, bytesFromURL));
String url = resourceUrl.toString();
assertNotNull(url);
assertEquals("mfs:/META-INF/beans.xml", url);
String file = resourceUrl.getFile();
assertNotNull(file);
assertEquals("/META-INF/beans.xml", file);
}
@Test
public void createModuleAndFindResourcesVerifyURL() throws IOException,
ClassNotFoundException,
InterruptedException {
createKieModule( "fol4", false );
ReleaseId releaseId = KieServices.Factory.get().newReleaseId("fol4", "art1", "1.0-SNAPSHOT");
KieContainer kieContainer = KieServices.Factory.get().newKieContainer(releaseId);
assertNotNull(kieContainer);
InputStream is = kieContainer.getClassLoader().getResourceAsStream("META-INF/beans.xml");
assertNotNull(is);
byte[] bytesFromStream = readBytesFromInputStream(is);
Enumeration<URL> foundResources = kieContainer.getClassLoader().getResources("META-INF/beans.xml");
assertNotNull(foundResources);
List<URL> resourcesAsList = Collections
.list(foundResources)
.stream()
/*
* This module depenency have beans.xml files
* which are found when calling `getResources` with a relative path.
*/
.filter(url -> !url.toString().contains("drools-cdi"))
.collect(Collectors.toList());
assertNotNull(resourcesAsList);
assertEquals(1, resourcesAsList.size());
URL resourceUrl = resourcesAsList.get(0);
byte[] bytesFromURL = readBytesFromInputStream(resourceUrl.openStream());
assertTrue(Arrays.equals(bytesFromStream, bytesFromURL));
String url = resourceUrl.toString();
assertNotNull(url);
assertEquals("mfs:/META-INF/beans.xml", url);
String file = resourceUrl.getFile();
assertNotNull(file);
assertEquals("/META-INF/beans.xml", file);
}
}
| apache-2.0 |
hupda-edpe/c | engine/src/test/java/org/camunda/bpm/engine/test/authorization/service/MyTaskService.java | 926 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.authorization.service;
import org.camunda.bpm.engine.delegate.DelegateExecution;
/**
* @author Roman Smirnov
*
*/
public class MyTaskService extends MyDelegationService {
public String assignTask(DelegateExecution execution) {
logAuthentication(execution);
logInstancesCount(execution);
return "demo";
}
}
| apache-2.0 |
terryturner/VRPinGMapFx | GMapsFX/src/main/java/com/lynden/gmapsfx/service/elevation/LocationElevationRequest.java | 1071 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.lynden.gmapsfx.service.elevation;
import com.lynden.gmapsfx.javascript.JavascriptObject;
import com.lynden.gmapsfx.javascript.object.GMapObjectType;
import com.lynden.gmapsfx.javascript.object.LatLong;
/** Creates a request that can be passed in to the {@link ElevationService} to
* get the elevations for a number of locations.
*
* @author Geoff Capper
*/
public class LocationElevationRequest extends JavascriptObject {
public LocationElevationRequest() {
super(GMapObjectType.OBJECT);
}
public LocationElevationRequest(LatLong[] locations) {
super(GMapObjectType.OBJECT);
getJSObject().setMember("locations", getJSObject().eval("[]"));
for (int i = 0; i < locations.length; i++) {
getJSObject().eval(getVariableName()+".locations.push("+locations[i].getVariableName()+")");
}
}
}
| apache-2.0 |
UP-nxt/cmis-ruby | lib/ext/hash/keys.rb | 5549 | class Hash
# Returns a new hash with all keys converted using the +block+ operation.
#
# hash = { name: 'Rob', age: '28' }
#
# hash.transform_keys { |key| key.to_s.upcase } # => {"NAME"=>"Rob", "AGE"=>"28"}
#
# If you do not provide a +block+, it will return an Enumerator
# for chaining with other methods:
#
# hash.transform_keys.with_index { |k, i| [k, i].join } # => {"name0"=>"Rob", "age1"=>"28"}
def transform_keys
return enum_for(:transform_keys) { size } unless block_given?
result = self.class.new
each_key do |key|
result[yield(key)] = self[key]
end
result
end
# Destructively converts all keys using the +block+ operations.
# Same as +transform_keys+ but modifies +self+.
def transform_keys!
return enum_for(:transform_keys!) { size } unless block_given?
keys.each do |key|
self[yield(key)] = delete(key)
end
self
end
# Returns a new hash with all keys converted to strings.
#
# hash = { name: 'Rob', age: '28' }
#
# hash.stringify_keys
# # => {"name"=>"Rob", "age"=>"28"}
def stringify_keys
transform_keys(&:to_s)
end
# Destructively converts all keys to strings. Same as
# +stringify_keys+, but modifies +self+.
def stringify_keys!
transform_keys!(&:to_s)
end
# Returns a new hash with all keys converted to symbols, as long as
# they respond to +to_sym+.
#
# hash = { 'name' => 'Rob', 'age' => '28' }
#
# hash.symbolize_keys
# # => {:name=>"Rob", :age=>"28"}
def symbolize_keys
transform_keys{ |key| key.to_sym rescue key }
end
alias_method :to_options, :symbolize_keys
# Destructively converts all keys to symbols, as long as they respond
# to +to_sym+. Same as +symbolize_keys+, but modifies +self+.
def symbolize_keys!
transform_keys!{ |key| key.to_sym rescue key }
end
alias_method :to_options!, :symbolize_keys!
# Validates all keys in a hash match <tt>*valid_keys</tt>, raising
# +ArgumentError+ on a mismatch.
#
# Note that keys are treated differently than HashWithIndifferentAccess,
# meaning that string and symbol keys will not match.
#
# { name: 'Rob', years: '28' }.assert_valid_keys(:name, :age) # => raises "ArgumentError: Unknown key: :years. Valid keys are: :name, :age"
# { name: 'Rob', age: '28' }.assert_valid_keys('name', 'age') # => raises "ArgumentError: Unknown key: :name. Valid keys are: 'name', 'age'"
# { name: 'Rob', age: '28' }.assert_valid_keys(:name, :age) # => passes, raises nothing
def assert_valid_keys(*valid_keys)
valid_keys.flatten!
each_key do |k|
unless valid_keys.include?(k)
raise ArgumentError.new("Unknown key: #{k.inspect}. Valid keys are: #{valid_keys.map(&:inspect).join(', ')}")
end
end
end
# Returns a new hash with all keys converted by the block operation.
# This includes the keys from the root hash and from all
# nested hashes and arrays.
#
# hash = { person: { name: 'Rob', age: '28' } }
#
# hash.deep_transform_keys{ |key| key.to_s.upcase }
# # => {"PERSON"=>{"NAME"=>"Rob", "AGE"=>"28"}}
def deep_transform_keys(&block)
_deep_transform_keys_in_object(self, &block)
end
# Destructively converts all keys by using the block operation.
# This includes the keys from the root hash and from all
# nested hashes and arrays.
def deep_transform_keys!(&block)
_deep_transform_keys_in_object!(self, &block)
end
# Returns a new hash with all keys converted to strings.
# This includes the keys from the root hash and from all
# nested hashes and arrays.
#
# hash = { person: { name: 'Rob', age: '28' } }
#
# hash.deep_stringify_keys
# # => {"person"=>{"name"=>"Rob", "age"=>"28"}}
def deep_stringify_keys
deep_transform_keys(&:to_s)
end
# Destructively converts all keys to strings.
# This includes the keys from the root hash and from all
# nested hashes and arrays.
def deep_stringify_keys!
deep_transform_keys!(&:to_s)
end
# Returns a new hash with all keys converted to symbols, as long as
# they respond to +to_sym+. This includes the keys from the root hash
# and from all nested hashes and arrays.
#
# hash = { 'person' => { 'name' => 'Rob', 'age' => '28' } }
#
# hash.deep_symbolize_keys
# # => {:person=>{:name=>"Rob", :age=>"28"}}
def deep_symbolize_keys
deep_transform_keys{ |key| key.to_sym rescue key }
end
# Destructively converts all keys to symbols, as long as they respond
# to +to_sym+. This includes the keys from the root hash and from all
# nested hashes and arrays.
def deep_symbolize_keys!
deep_transform_keys!{ |key| key.to_sym rescue key }
end
private
# support methods for deep transforming nested hashes and arrays
def _deep_transform_keys_in_object(object, &block)
case object
when Hash
object.each_with_object({}) do |(key, value), result|
result[yield(key)] = _deep_transform_keys_in_object(value, &block)
end
when Array
object.map {|e| _deep_transform_keys_in_object(e, &block) }
else
object
end
end
def _deep_transform_keys_in_object!(object, &block)
case object
when Hash
object.keys.each do |key|
value = object.delete(key)
object[yield(key)] = _deep_transform_keys_in_object!(value, &block)
end
object
when Array
object.map! {|e| _deep_transform_keys_in_object!(e, &block)}
else
object
end
end
end
| apache-2.0 |
koscejev/camel | camel-core/src/main/java/org/apache/camel/processor/RecipientList.java | 10987 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor;
import java.util.Iterator;
import java.util.concurrent.ExecutorService;
import org.apache.camel.AsyncCallback;
import org.apache.camel.AsyncProcessor;
import org.apache.camel.CamelContext;
import org.apache.camel.Endpoint;
import org.apache.camel.Exchange;
import org.apache.camel.Expression;
import org.apache.camel.Processor;
import org.apache.camel.impl.EmptyProducerCache;
import org.apache.camel.impl.ProducerCache;
import org.apache.camel.processor.aggregate.AggregationStrategy;
import org.apache.camel.processor.aggregate.UseLatestAggregationStrategy;
import org.apache.camel.spi.IdAware;
import org.apache.camel.support.ServiceSupport;
import org.apache.camel.util.AsyncProcessorHelper;
import org.apache.camel.util.ExchangeHelper;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.ServiceHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.camel.util.ObjectHelper.notNull;
/**
* Implements a dynamic <a
* href="http://camel.apache.org/recipient-list.html">Recipient List</a>
* pattern where the list of actual endpoints to send a message exchange to are
* dependent on some dynamic expression.
*
* @version
*/
public class RecipientList extends ServiceSupport implements AsyncProcessor, IdAware {
private static final Logger LOG = LoggerFactory.getLogger(RecipientList.class);
private static final String IGNORE_DELIMITER_MARKER = "false";
private final CamelContext camelContext;
private String id;
private ProducerCache producerCache;
private Expression expression;
private final String delimiter;
private boolean parallelProcessing;
private boolean parallelAggregate;
private boolean stopOnException;
private boolean ignoreInvalidEndpoints;
private boolean streaming;
private long timeout;
private int cacheSize;
private Processor onPrepare;
private boolean shareUnitOfWork;
private ExecutorService executorService;
private boolean shutdownExecutorService;
private ExecutorService aggregateExecutorService;
private AggregationStrategy aggregationStrategy = new UseLatestAggregationStrategy();
public RecipientList(CamelContext camelContext) {
// use comma by default as delimiter
this(camelContext, ",");
}
public RecipientList(CamelContext camelContext, String delimiter) {
notNull(camelContext, "camelContext");
ObjectHelper.notEmpty(delimiter, "delimiter");
this.camelContext = camelContext;
this.delimiter = delimiter;
}
public RecipientList(CamelContext camelContext, Expression expression) {
// use comma by default as delimiter
this(camelContext, expression, ",");
}
public RecipientList(CamelContext camelContext, Expression expression, String delimiter) {
notNull(camelContext, "camelContext");
ObjectHelper.notNull(expression, "expression");
ObjectHelper.notEmpty(delimiter, "delimiter");
this.camelContext = camelContext;
this.expression = expression;
this.delimiter = delimiter;
}
@Override
public String toString() {
return "RecipientList[" + (expression != null ? expression : "") + "]";
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public void process(Exchange exchange) throws Exception {
AsyncProcessorHelper.process(this, exchange);
}
public boolean process(Exchange exchange, AsyncCallback callback) {
if (!isStarted()) {
throw new IllegalStateException("RecipientList has not been started: " + this);
}
// use the evaluate expression result if exists
Object recipientList = exchange.removeProperty(Exchange.EVALUATE_EXPRESSION_RESULT);
if (recipientList == null && expression != null) {
// fallback and evaluate the expression
recipientList = expression.evaluate(exchange, Object.class);
}
return sendToRecipientList(exchange, recipientList, callback);
}
/**
* Sends the given exchange to the recipient list
*/
public boolean sendToRecipientList(Exchange exchange, Object recipientList, AsyncCallback callback) {
Iterator<Object> iter;
if (delimiter != null && delimiter.equalsIgnoreCase(IGNORE_DELIMITER_MARKER)) {
iter = ObjectHelper.createIterator(recipientList, null);
} else {
iter = ObjectHelper.createIterator(recipientList, delimiter);
}
RecipientListProcessor rlp = new RecipientListProcessor(exchange.getContext(), producerCache, iter, getAggregationStrategy(),
isParallelProcessing(), getExecutorService(), isShutdownExecutorService(),
isStreaming(), isStopOnException(), getTimeout(), getOnPrepare(), isShareUnitOfWork(), isParallelAggregate()) {
@Override
protected synchronized ExecutorService createAggregateExecutorService(String name) {
// use a shared executor service to avoid creating new thread pools
if (aggregateExecutorService == null) {
aggregateExecutorService = super.createAggregateExecutorService("RecipientList-AggregateTask");
}
return aggregateExecutorService;
}
};
rlp.setIgnoreInvalidEndpoints(isIgnoreInvalidEndpoints());
// start the service
try {
ServiceHelper.startService(rlp);
} catch (Exception e) {
exchange.setException(e);
callback.done(true);
return true;
}
AsyncProcessor target = rlp;
if (isShareUnitOfWork()) {
// wrap answer in a sub unit of work, since we share the unit of work
CamelInternalProcessor internalProcessor = new CamelInternalProcessor(rlp);
internalProcessor.addAdvice(new CamelInternalProcessor.SubUnitOfWorkProcessorAdvice());
target = internalProcessor;
}
// now let the multicast process the exchange
return target.process(exchange, callback);
}
protected Endpoint resolveEndpoint(Exchange exchange, Object recipient) {
// trim strings as end users might have added spaces between separators
if (recipient instanceof String) {
recipient = ((String)recipient).trim();
}
return ExchangeHelper.resolveEndpoint(exchange, recipient);
}
protected void doStart() throws Exception {
if (producerCache == null) {
if (cacheSize < 0) {
producerCache = new EmptyProducerCache(this, camelContext);
LOG.debug("RecipientList {} is not using ProducerCache", this);
} else if (cacheSize == 0) {
producerCache = new ProducerCache(this, camelContext);
LOG.debug("RecipientList {} using ProducerCache with default cache size", this);
} else {
producerCache = new ProducerCache(this, camelContext, cacheSize);
LOG.debug("RecipientList {} using ProducerCache with cacheSize={}", this, cacheSize);
}
}
ServiceHelper.startServices(aggregationStrategy, producerCache);
}
protected void doStop() throws Exception {
ServiceHelper.stopServices(producerCache, aggregationStrategy);
}
protected void doShutdown() throws Exception {
ServiceHelper.stopAndShutdownServices(producerCache, aggregationStrategy);
if (shutdownExecutorService && executorService != null) {
camelContext.getExecutorServiceManager().shutdownNow(executorService);
}
}
public boolean isStreaming() {
return streaming;
}
public void setStreaming(boolean streaming) {
this.streaming = streaming;
}
public boolean isIgnoreInvalidEndpoints() {
return ignoreInvalidEndpoints;
}
public void setIgnoreInvalidEndpoints(boolean ignoreInvalidEndpoints) {
this.ignoreInvalidEndpoints = ignoreInvalidEndpoints;
}
public boolean isParallelProcessing() {
return parallelProcessing;
}
public void setParallelProcessing(boolean parallelProcessing) {
this.parallelProcessing = parallelProcessing;
}
public boolean isParallelAggregate() {
return parallelAggregate;
}
public void setParallelAggregate(boolean parallelAggregate) {
this.parallelAggregate = parallelAggregate;
}
public boolean isStopOnException() {
return stopOnException;
}
public void setStopOnException(boolean stopOnException) {
this.stopOnException = stopOnException;
}
public ExecutorService getExecutorService() {
return executorService;
}
public void setExecutorService(ExecutorService executorService) {
this.executorService = executorService;
}
public boolean isShutdownExecutorService() {
return shutdownExecutorService;
}
public void setShutdownExecutorService(boolean shutdownExecutorService) {
this.shutdownExecutorService = shutdownExecutorService;
}
public AggregationStrategy getAggregationStrategy() {
return aggregationStrategy;
}
public void setAggregationStrategy(AggregationStrategy aggregationStrategy) {
this.aggregationStrategy = aggregationStrategy;
}
public long getTimeout() {
return timeout;
}
public void setTimeout(long timeout) {
this.timeout = timeout;
}
public Processor getOnPrepare() {
return onPrepare;
}
public void setOnPrepare(Processor onPrepare) {
this.onPrepare = onPrepare;
}
public boolean isShareUnitOfWork() {
return shareUnitOfWork;
}
public void setShareUnitOfWork(boolean shareUnitOfWork) {
this.shareUnitOfWork = shareUnitOfWork;
}
public int getCacheSize() {
return cacheSize;
}
public void setCacheSize(int cacheSize) {
this.cacheSize = cacheSize;
}
}
| apache-2.0 |
amckee23/drools | drools-workbench-models/drools-workbench-models-guided-template/src/test/java/org/drools/workbench/models/guided/template/backend/TemplateIntegrationTest.java | 5626 | /*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.drools.workbench.models.guided.template.backend;
import org.junit.Test;
import org.kie.api.io.ResourceType;
import org.kie.api.runtime.KieSession;
import org.kie.internal.utils.KieHelper;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class TemplateIntegrationTest {
private static final String template =
"<rule>\n" +
" <name>RecTemplate</name>\n" +
" <modelVersion>1.0</modelVersion>\n" +
" <attributes/>\n" +
" <metadataList/>\n" +
" <lhs>\n" +
" <fact>\n" +
" <constraintList>\n" +
" <constraints>\n" +
" <fieldConstraint>\n" +
" <value>$companyId</value>\n" +
" <operator>==</operator>\n" +
" <constraintValueType>7</constraintValueType>\n" +
" <expression>\n" +
" <parts/>\n" +
" <index>2147483647</index>\n" +
" </expression>\n" +
" <parameters/>\n" +
" <factType>Company</factType>\n" +
" <fieldName>companyId</fieldName>\n" +
" <fieldType>Long</fieldType>\n" +
" </fieldConstraint>\n" +
" </constraints>\n" +
" </constraintList>\n" +
" <factType>Company</factType>\n" +
" <boundName>$c</boundName>\n" +
" <isNegated>false</isNegated>\n" +
" <window>\n" +
" <parameters/>\n" +
" </window>\n" +
" </fact>\n" +
" </lhs>\n" +
" <rhs>\n" +
" <freeForm>\n" +
" <text>System.out.println("Found ----> " + $c);\n$c.setFound(true);</text>\n" +
" </freeForm>\n" +
" </rhs>\n" +
" <imports>\n" +
" <imports>\n" +
" <org.drools.workbench.models.datamodel.imports.Import>\n" +
" <type>" + Company.class.getCanonicalName() + "</type>\n" +
" </org.drools.workbench.models.datamodel.imports.Import>\n" +
" </imports>\n" +
" </imports>\n" +
" <packageName>com.sample</packageName>\n" +
" <isNegated>false</isNegated>\n" +
" <table>\n" +
" <entry>\n" +
" <string>__ID_KOL_NAME__</string>\n" +
" <list>\n" +
" <string>1</string>\n" +
" <string>0</string>\n" +
" <string>0</string>\n" +
" <string>0</string>\n" +
" </list>\n" +
" </entry>\n" +
" <entry>\n" +
" <string>$companyId</string>\n" +
" <list>\n" +
" <string>321</string>\n" +
" <string>123</string>\n" +
" <string>12345</string>\n" +
" <string>54321</string>\n" +
" </list>\n" +
" </entry>\n" +
" </table>\n" +
" <idCol>1</idCol>\n" +
" <rowsCount>4</rowsCount>\n" +
"</rule>\n";
@Test
public void test() {
String drl = "global java.util.List list\n" +
"rule \"String detector\"\n" +
" when\n" +
" $s : String( )\n" +
" then\n" +
" list.add($s);\n" +
"end";
final KieSession ksession = new KieHelper().addContent(template, ResourceType.TEMPLATE)
.build()
.newKieSession();
Company myCompany = new Company( 123, "myCompany" );
Company yourCompany = new Company( 456, "yourCompany" );
ksession.insert( myCompany );
ksession.insert( yourCompany );
ksession.fireAllRules();
assertTrue(myCompany.isFound());
assertFalse(yourCompany.isFound());
}
public static class Company {
private final int companyId;
private final String companyName;
private boolean found;
public Company( int companyId, String companyName ) {
this.companyId = companyId;
this.companyName = companyName;
}
public int getCompanyId() {
return companyId;
}
public String getCompanyName() {
return companyName;
}
public boolean isFound() {
return found;
}
public void setFound( boolean found ) {
this.found = found;
}
@Override
public String toString() {
return "Company id: " + getCompanyId() + "; Name: " + getCompanyName();
}
}
}
| apache-2.0 |
SHASHANKB/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/WholeStageCodegenSuite.scala | 8794 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import org.apache.spark.sql.{Column, Dataset, Row}
import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute
import org.apache.spark.sql.catalyst.expressions.{Add, Literal, Stack}
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext
import org.apache.spark.sql.execution.aggregate.HashAggregateExec
import org.apache.spark.sql.execution.joins.BroadcastHashJoinExec
import org.apache.spark.sql.execution.joins.SortMergeJoinExec
import org.apache.spark.sql.expressions.scalalang.typed
import org.apache.spark.sql.functions.{avg, broadcast, col, max}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types.{IntegerType, StringType, StructType}
class WholeStageCodegenSuite extends SparkPlanTest with SharedSQLContext {
test("range/filter should be combined") {
val df = spark.range(10).filter("id = 1").selectExpr("id + 1")
val plan = df.queryExecution.executedPlan
assert(plan.find(_.isInstanceOf[WholeStageCodegenExec]).isDefined)
assert(df.collect() === Array(Row(2)))
}
test("Aggregate should be included in WholeStageCodegen") {
val df = spark.range(10).groupBy().agg(max(col("id")), avg(col("id")))
val plan = df.queryExecution.executedPlan
assert(plan.find(p =>
p.isInstanceOf[WholeStageCodegenExec] &&
p.asInstanceOf[WholeStageCodegenExec].child.isInstanceOf[HashAggregateExec]).isDefined)
assert(df.collect() === Array(Row(9, 4.5)))
}
test("Aggregate with grouping keys should be included in WholeStageCodegen") {
val df = spark.range(3).groupBy("id").count().orderBy("id")
val plan = df.queryExecution.executedPlan
assert(plan.find(p =>
p.isInstanceOf[WholeStageCodegenExec] &&
p.asInstanceOf[WholeStageCodegenExec].child.isInstanceOf[HashAggregateExec]).isDefined)
assert(df.collect() === Array(Row(0, 1), Row(1, 1), Row(2, 1)))
}
test("BroadcastHashJoin should be included in WholeStageCodegen") {
val rdd = spark.sparkContext.makeRDD(Seq(Row(1, "1"), Row(1, "1"), Row(2, "2")))
val schema = new StructType().add("k", IntegerType).add("v", StringType)
val smallDF = spark.createDataFrame(rdd, schema)
val df = spark.range(10).join(broadcast(smallDF), col("k") === col("id"))
assert(df.queryExecution.executedPlan.find(p =>
p.isInstanceOf[WholeStageCodegenExec] &&
p.asInstanceOf[WholeStageCodegenExec].child.isInstanceOf[BroadcastHashJoinExec]).isDefined)
assert(df.collect() === Array(Row(1, 1, "1"), Row(1, 1, "1"), Row(2, 2, "2")))
}
test("Sort should be included in WholeStageCodegen") {
val df = spark.range(3, 0, -1).toDF().sort(col("id"))
val plan = df.queryExecution.executedPlan
assert(plan.find(p =>
p.isInstanceOf[WholeStageCodegenExec] &&
p.asInstanceOf[WholeStageCodegenExec].child.isInstanceOf[SortExec]).isDefined)
assert(df.collect() === Array(Row(1), Row(2), Row(3)))
}
test("MapElements should be included in WholeStageCodegen") {
import testImplicits._
val ds = spark.range(10).map(_.toString)
val plan = ds.queryExecution.executedPlan
assert(plan.find(p =>
p.isInstanceOf[WholeStageCodegenExec] &&
p.asInstanceOf[WholeStageCodegenExec].child.isInstanceOf[SerializeFromObjectExec]).isDefined)
assert(ds.collect() === 0.until(10).map(_.toString).toArray)
}
test("typed filter should be included in WholeStageCodegen") {
val ds = spark.range(10).filter(_ % 2 == 0)
val plan = ds.queryExecution.executedPlan
assert(plan.find(p =>
p.isInstanceOf[WholeStageCodegenExec] &&
p.asInstanceOf[WholeStageCodegenExec].child.isInstanceOf[FilterExec]).isDefined)
assert(ds.collect() === Array(0, 2, 4, 6, 8))
}
test("back-to-back typed filter should be included in WholeStageCodegen") {
val ds = spark.range(10).filter(_ % 2 == 0).filter(_ % 3 == 0)
val plan = ds.queryExecution.executedPlan
assert(plan.find(p =>
p.isInstanceOf[WholeStageCodegenExec] &&
p.asInstanceOf[WholeStageCodegenExec].child.isInstanceOf[FilterExec]).isDefined)
assert(ds.collect() === Array(0, 6))
}
test("simple typed UDAF should be included in WholeStageCodegen") {
import testImplicits._
val ds = Seq(("a", 10), ("b", 1), ("b", 2), ("c", 1)).toDS()
.groupByKey(_._1).agg(typed.sum(_._2))
val plan = ds.queryExecution.executedPlan
assert(plan.find(p =>
p.isInstanceOf[WholeStageCodegenExec] &&
p.asInstanceOf[WholeStageCodegenExec].child.isInstanceOf[HashAggregateExec]).isDefined)
assert(ds.collect() === Array(("a", 10.0), ("b", 3.0), ("c", 1.0)))
}
test("SPARK-19512 codegen for comparing structs is incorrect") {
// this would raise CompileException before the fix
spark.range(10)
.selectExpr("named_struct('a', id) as col1", "named_struct('a', id+2) as col2")
.filter("col1 = col2").count()
// this would raise java.lang.IndexOutOfBoundsException before the fix
spark.range(10)
.selectExpr("named_struct('a', id, 'b', id) as col1",
"named_struct('a',id+2, 'b',id+2) as col2")
.filter("col1 = col2").count()
}
test("SPARK-21441 SortMergeJoin codegen with CodegenFallback expressions should be disabled") {
withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "1") {
import testImplicits._
val df1 = Seq((1, 1), (2, 2), (3, 3)).toDF("key", "int")
val df2 = Seq((1, "1"), (2, "2"), (3, "3")).toDF("key", "str")
val df = df1.join(df2, df1("key") === df2("key"))
.filter("int = 2 or reflect('java.lang.Integer', 'valueOf', str) = 1")
.select("int")
val plan = df.queryExecution.executedPlan
assert(!plan.find(p =>
p.isInstanceOf[WholeStageCodegenExec] &&
p.asInstanceOf[WholeStageCodegenExec].child.children(0)
.isInstanceOf[SortMergeJoinExec]).isDefined)
assert(df.collect() === Array(Row(1), Row(2)))
}
}
def genGroupByCodeGenContext(caseNum: Int): CodegenContext = {
val caseExp = (1 to caseNum).map { i =>
s"case when id > $i and id <= ${i + 1} then 1 else 0 end as v$i"
}.toList
val keyExp = List(
"id",
"(id & 1023) as k1",
"cast(id & 1023 as double) as k2",
"cast(id & 1023 as int) as k3")
val ds = spark.range(10)
.selectExpr(keyExp:::caseExp: _*)
.groupBy("k1", "k2", "k3")
.sum()
val plan = ds.queryExecution.executedPlan
val wholeStageCodeGenExec = plan.find(p => p match {
case wp: WholeStageCodegenExec => wp.child match {
case hp: HashAggregateExec if (hp.child.isInstanceOf[ProjectExec]) => true
case _ => false
}
case _ => false
})
assert(wholeStageCodeGenExec.isDefined)
wholeStageCodeGenExec.get.asInstanceOf[WholeStageCodegenExec].doCodeGen()._1
}
test("SPARK-21603 check there is a too long generated function") {
withSQLConf(SQLConf.WHOLESTAGE_MAX_LINES_PER_FUNCTION.key -> "1500") {
val ctx = genGroupByCodeGenContext(30)
assert(ctx.isTooLongGeneratedFunction === true)
}
}
test("SPARK-21603 check there is not a too long generated function") {
withSQLConf(SQLConf.WHOLESTAGE_MAX_LINES_PER_FUNCTION.key -> "1500") {
val ctx = genGroupByCodeGenContext(1)
assert(ctx.isTooLongGeneratedFunction === false)
}
}
test("SPARK-21603 check there is not a too long generated function when threshold is Int.Max") {
withSQLConf(SQLConf.WHOLESTAGE_MAX_LINES_PER_FUNCTION.key -> Int.MaxValue.toString) {
val ctx = genGroupByCodeGenContext(30)
assert(ctx.isTooLongGeneratedFunction === false)
}
}
test("SPARK-21603 check there is a too long generated function when threshold is 0") {
withSQLConf(SQLConf.WHOLESTAGE_MAX_LINES_PER_FUNCTION.key -> "0") {
val ctx = genGroupByCodeGenContext(1)
assert(ctx.isTooLongGeneratedFunction === true)
}
}
}
| apache-2.0 |
anoordover/camel | components/camel-micrometer/src/main/java/org/apache/camel/component/micrometer/routepolicy/MicrometerRoutePolicyService.java | 1428 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.micrometer.routepolicy;
import io.micrometer.core.instrument.MeterRegistry;
import org.apache.camel.CamelContextAware;
import org.apache.camel.StaticService;
import org.apache.camel.api.management.ManagedResource;
import org.apache.camel.component.micrometer.json.AbstractMicrometerService;
/**
* Service holding the {@link MeterRegistry} which registers all metrics.
*/
@ManagedResource(description = "MicrometerRoutePolicy")
public final class MicrometerRoutePolicyService extends AbstractMicrometerService implements CamelContextAware, StaticService, MicrometerRoutePolicyMBean {
}
| apache-2.0 |
edolganov/live-chat-engine | components/db-main/src/och/comp/db/main/table/remtoken/UpdateRemTokenDate.java | 1130 | /*
* Copyright 2015 Evgeny Dolganov (evgenij.dolganov@gmail.com).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package och.comp.db.main.table.remtoken;
import static och.comp.db.main.table.MainTables.*;
import java.util.Date;
import och.comp.db.base.universal.UpdateRows;
import och.comp.db.base.universal.query.AndCondition;
import och.comp.db.main.table._f.LastVisited;
import och.comp.db.main.table._f.Uid;
public class UpdateRemTokenDate extends UpdateRows {
public UpdateRemTokenDate(String uid) {
super(rem_tokens,
new AndCondition(
new Uid(uid)),
new LastVisited(new Date()));
}
}
| apache-2.0 |
sushrutikhar/grill | lens-server-api/src/test/java/org/apache/lens/server/api/driver/MockDriver.java | 12134 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lens.server.api.driver;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.net.SocketException;
import java.net.SocketTimeoutException;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lens.api.query.QueryHandle;
import org.apache.lens.api.query.QueryPrepareHandle;
import org.apache.lens.api.query.ResultRow;
import org.apache.lens.server.api.driver.DriverQueryStatus.DriverQueryState;
import org.apache.lens.server.api.error.LensException;
import org.apache.lens.server.api.events.LensEventListener;
import org.apache.lens.server.api.query.AbstractQueryContext;
import org.apache.lens.server.api.query.PreparedQueryContext;
import org.apache.lens.server.api.query.QueryContext;
import org.apache.lens.server.api.query.collect.WaitingQueriesSelectionPolicy;
import org.apache.lens.server.api.query.constraint.QueryLaunchingConstraint;
import org.apache.lens.server.api.query.cost.FactPartitionBasedQueryCost;
import org.apache.lens.server.api.query.cost.QueryCost;
import org.apache.hadoop.conf.Configuration;
import org.apache.hive.service.cli.ColumnDescriptor;
import com.beust.jcommander.internal.Sets;
import com.google.common.collect.ImmutableSet;
import lombok.Getter;
/**
* The Class MockDriver.
*/
public class MockDriver extends AbstractLensDriver {
private static AtomicInteger mockDriverId = new AtomicInteger();
/**
* The conf.
*/
protected Configuration conf;
/**
* The query.
*/
String query;
/**
* The io test val.
*/
private int ioTestVal = -1;
private final int driverId;
/**
* Instantiates a new mock driver.
*/
public MockDriver() {
driverId = mockDriverId.incrementAndGet();
}
@Override
public String toString() {
return getFullyQualifiedName()+":"+driverId;
}
@Override
public Configuration getConf() {
return conf;
}
/*
* (non-Javadoc)
*
* @see org.apache.lens.server.api.driver.LensDriver#configure(org.apache.hadoop.conf.Configuration)
*/
@Override
public void configure(Configuration conf, String driverType, String driverName) throws LensException {
this.conf = conf;
ioTestVal = conf.getInt("mock.driver.test.val", -1);
this.conf.addResource(getDriverResourcePath("failing-query-driver-site.xml"));
loadQueryHook();
}
@Override
public String getFullyQualifiedName() {
return "mock/fail1";
}
/**
* The Class MockQueryPlan.
*/
public static class MockQueryPlan extends DriverQueryPlan {
/**
* The query.
*/
String query;
/**
* Instantiates a new mock query plan.
*
* @param query the query
*/
MockQueryPlan(String query) {
this.query = query;
setPrepareHandle(new QueryPrepareHandle(UUID.randomUUID()));
tableWeights.put("table1", 1.0);
tableWeights.put("table2", 2.0);
tableWeights.put("table3", 3.0);
}
@Override
public String getPlan() {
return query;
}
@Override
public QueryCost getCost() {
return new FactPartitionBasedQueryCost(0);
}
}
/*
* (non-Javadoc)
*
* @see org.apache.lens.server.api.driver.LensDriver#explain(java.lang.String,
* org.apache.hadoop.conf.Configuration)
*/
@Override
public DriverQueryPlan explain(AbstractQueryContext explainCtx) throws LensException {
return new MockQueryPlan(explainCtx.getUserQuery());
}
@Getter
private int updateCount = 0;
/*
* (non-Javadoc)
*
* @see org.apache.lens.server.api.driver.LensDriver#updateStatus(org.apache.lens.server.api.query.QueryContext)
*/
@Override
public void updateStatus(QueryContext context) throws LensException {
updateCount++;
if ("simulate status retries".equals(context.getUserQuery())) {
try {
if (updateCount < 3) {
throw new SocketTimeoutException("simulated timeout exception");
} else if (updateCount <= 5) {
throw new SocketException("simulated socket exception");
}
} catch (Exception e) {
throw new LensException(e);
}
}
if ("simulate status failure".equals(context.getUserQuery())) {
try {
throw new SocketTimeoutException("simulated timeout exception");
} catch (Exception e) {
throw new LensException(e);
}
}
if (context.getUserQuery().contains("autocancel")) {
if (!cancel) {
context.getDriverStatus().setState(DriverQueryState.RUNNING);
} else {
context.getDriverStatus().setState(DriverQueryState.CANCELED);
context.getDriverStatus().setDriverFinishTime(System.currentTimeMillis());
}
return;
}
context.getDriverStatus().setProgress(1.0);
context.getDriverStatus().setStatusMessage("Done");
context.getDriverStatus().setState(DriverQueryState.SUCCESSFUL);
context.getDriverStatus().setDriverFinishTime(System.currentTimeMillis());
}
boolean cancel = false;
/*
* (non-Javadoc)
*
* @see org.apache.lens.server.api.driver.LensDriver#cancelQuery(org.apache.lens.api.query.QueryHandle)
*/
@Override
public boolean cancelQuery(QueryHandle handle) throws LensException {
cancel = true;
return true;
}
/*
* (non-Javadoc)
*
* @see org.apache.lens.server.api.driver.LensDriver#closeQuery(org.apache.lens.api.query.QueryHandle)
*/
@Override
public void closeQuery(QueryHandle handle) throws LensException {
}
/*
* (non-Javadoc)
*
* @see org.apache.lens.server.api.driver.LensDriver#close()
*/
@Override
public void close() throws LensException {
}
/**
* Add a listener for driver events.
*
* @param driverEventListener the driver event listener
*/
@Override
public void registerDriverEventListener(LensEventListener<DriverEvent> driverEventListener) {
}
@Override
public ImmutableSet<QueryLaunchingConstraint> getQueryConstraints() {
return ImmutableSet.copyOf(Sets.<QueryLaunchingConstraint>newHashSet());
}
@Override
public ImmutableSet<WaitingQueriesSelectionPolicy> getWaitingQuerySelectionPolicies() {
return ImmutableSet.copyOf(Sets.<WaitingQueriesSelectionPolicy>newHashSet());
}
/*
* (non-Javadoc)
*
* @see org.apache.lens.server.api.driver.LensDriver#prepare(org.apache.lens.server.api.query.PreparedQueryContext)
*/
@Override
public void prepare(PreparedQueryContext pContext) throws LensException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see
* org.apache.lens.server.api.driver.LensDriver
* #explainAndPrepare(org.apache.lens.server.api.query.PreparedQueryContext)
*/
@Override
public DriverQueryPlan explainAndPrepare(PreparedQueryContext pContext) throws LensException {
DriverQueryPlan p = new MockQueryPlan(pContext.getSelectedDriverQuery());
p.setPrepareHandle(pContext.getPrepareHandle());
return p;
}
/*
* (non-Javadoc)
*
* @see org.apache.lens.server.api.driver.LensDriver#closePreparedQuery(org.apache.lens.api.query.QueryPrepareHandle)
*/
@Override
public void closePreparedQuery(QueryPrepareHandle handle) throws LensException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see org.apache.lens.server.api.driver.LensDriver#execute(org.apache.lens.server.api.query.QueryContext)
*/
@Override
public LensResultSet execute(QueryContext context) throws LensException {
this.query = context.getSelectedDriverQuery();
return new PersistentResultSet() {
@Override
public Integer size() throws LensException {
// TODO Auto-generated method stub
return null;
}
@Override
public Long getFileSize() throws LensException {
// TODO Auto-generated method stub
return null;
}
@Override
public LensResultSetMetadata getMetadata() throws LensException {
// TODO Auto-generated method stub
return new LensResultSetMetadata() {
@Override
public List<ColumnDescriptor> getColumns() {
// TODO Auto-generated method stub
return null;
}
};
}
@Override
public String getOutputPath() throws LensException {
// TODO Auto-generated method stub
return null;
}
};
}
/*
* (non-Javadoc)
*
* @see org.apache.lens.server.api.driver.LensDriver#executeAsync(org.apache.lens.server.api.query.QueryContext)
*/
@Override
public void executeAsync(QueryContext context) throws LensException {
cancel = false;
this.query = context.getSelectedDriverQuery();
}
/*
* (non-Javadoc)
*
* @see org.apache.lens.server.api.driver.LensDriver#fetchResultSet(org.apache.lens.server.api.query.QueryContext)
*/
@Override
public LensResultSet fetchResultSet(final QueryContext context) throws LensException {
return new InMemoryResultSet() {
@Override
public Integer size() throws LensException {
// TODO Auto-generated method stub
return null;
}
@Override
public LensResultSetMetadata getMetadata() throws LensException {
return new LensResultSetMetadata() {
@Override
public List<ColumnDescriptor> getColumns() {
// TODO Auto-generated method stub
return null;
}
};
}
@Override
public void setFetchSize(int size) throws LensException {
// TODO Auto-generated method stub
}
@Override
public ResultRow next() throws LensException {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean hasNext() throws LensException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean canBePurged() {
return true;
}
};
}
/*
* (non-Javadoc)
*
* @see org.apache.lens.server.api.driver.LensDriver#closeResultSet(org.apache.lens.api.query.QueryHandle)
*/
@Override
public void closeResultSet(QueryHandle handle) throws LensException {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see
* org.apache.lens.server.api.driver.LensDriver#registerForCompletionNotification
* (org.apache.lens.api.query.QueryHandle, long, org.apache.lens.server.api.driver.QueryDriverStatusUpdateListener)
*/
@Override
public void registerForCompletionNotification(QueryContext context,
long timeoutMillis, QueryCompletionListener listener) {
// TODO Auto-generated method stub
}
/*
* (non-Javadoc)
*
* @see java.io.Externalizable#readExternal(java.io.ObjectInput)
*/
@Override
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
ioTestVal = in.readInt();
}
/*
* (non-Javadoc)
*
* @see java.io.Externalizable#writeExternal(java.io.ObjectOutput)
*/
@Override
public void writeExternal(ObjectOutput out) throws IOException {
out.writeInt(ioTestVal);
}
public int getTestIOVal() {
return ioTestVal;
}
@Override
public QueryCost estimate(AbstractQueryContext qctx) throws LensException {
return new FactPartitionBasedQueryCost(0);
}
}
| apache-2.0 |
basoundr/roslyn | src/Compilers/Core/Portable/PEWriter/PeWriter.cs | 70173 | // Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Reflection.Metadata;
using System.Reflection.PortableExecutable;
using System.Text;
using System.Threading;
using Microsoft.CodeAnalysis;
using Roslyn.Utilities;
using EmitContext = Microsoft.CodeAnalysis.Emit.EmitContext;
using Microsoft.CodeAnalysis.CodeGen;
namespace Microsoft.Cci
{
internal sealed class PeWritingException : Exception
{
public PeWritingException(Exception inner)
: base(inner.Message, inner)
{ }
}
internal sealed class PeWriter
{
private const string ResourceSectionName = ".rsrc";
private const string RelocationSectionName = ".reloc";
/// <summary>
/// Minimal size of PDB path in Debug Directory. We pad the path to this minimal size to
/// allow some tools to patch the path without the need to rewrite the entire image.
/// This is a workaround put in place until these tools are retired.
/// </summary>
private readonly int _minPdbPath;
/// <summary>
/// True if we should attempt to generate a deterministic output (no timestamps or random data).
/// </summary>
private readonly bool _deterministic;
private readonly int _timeStamp;
private readonly string _pdbPathOpt;
private readonly bool _is32bit;
private readonly ModulePropertiesForSerialization _properties;
private readonly IEnumerable<IWin32Resource> _nativeResourcesOpt;
private readonly ResourceSection _nativeResourceSectionOpt;
private readonly BlobBuilder _win32ResourceWriter = new BlobBuilder(1024);
private PeWriter(
ModulePropertiesForSerialization properties,
IEnumerable<IWin32Resource> nativeResourcesOpt,
ResourceSection nativeResourceSectionOpt,
string pdbPathOpt,
bool deterministic)
{
_properties = properties;
_pdbPathOpt = pdbPathOpt;
_deterministic = deterministic;
// The PDB padding workaround is only needed for legacy tools that don't use deterministic build.
_minPdbPath = deterministic ? 0 : 260;
_nativeResourcesOpt = nativeResourcesOpt;
_nativeResourceSectionOpt = nativeResourceSectionOpt;
_is32bit = !_properties.Requires64bits;
// In the PE File Header this is a "Time/Date Stamp" whose description is "Time and date
// the file was created in seconds since January 1st 1970 00:00:00 or 0"
// However, when we want to make it deterministic we fill it in (later) with bits from the hash of the full PE file.
_timeStamp = _deterministic ? 0 : (int)(DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalSeconds;
}
private bool EmitPdb => _pdbPathOpt != null;
public static bool WritePeToStream(
EmitContext context,
CommonMessageProvider messageProvider,
Func<Stream> getPeStream,
Func<Stream> getPortablePdbStreamOpt,
PdbWriter nativePdbWriterOpt,
string pdbPathOpt,
bool allowMissingMethodBodies,
bool deterministic,
CancellationToken cancellationToken)
{
// If PDB writer is given, we have to have PDB path.
Debug.Assert(nativePdbWriterOpt == null || pdbPathOpt != null);
try
{
var peWriter = new PeWriter(context.Module.Properties, context.Module.Win32Resources, context.Module.Win32ResourceSection, pdbPathOpt, deterministic);
var mdWriter = FullMetadataWriter.Create(context, messageProvider, allowMissingMethodBodies, deterministic, getPortablePdbStreamOpt != null, cancellationToken);
return peWriter.WritePeToStream(mdWriter, getPeStream, getPortablePdbStreamOpt, nativePdbWriterOpt);
}
catch (Exception ex) when (!(ex is PdbWritingException || ex is ResourceException || ex is PermissionSetFileReadException || ex is OperationCanceledException))
{
throw new PeWritingException(ex);
}
}
private bool WritePeToStream(MetadataWriter mdWriter, Func<Stream> getPeStream, Func<Stream> getPortablePdbStreamOpt, PdbWriter nativePdbWriterOpt)
{
// TODO: we can precalculate the exact size of IL stream
var ilWriter = new BlobBuilder(32 * 1024);
var metadataWriter = new BlobBuilder(16 * 1024);
var mappedFieldDataWriter = new BlobBuilder();
var managedResourceWriter = new BlobBuilder(1024);
var debugMetadataWriterOpt = (getPortablePdbStreamOpt != null) ? new BlobBuilder(16 * 1024) : null;
nativePdbWriterOpt?.SetMetadataEmitter(mdWriter);
// Since we are producing a full assembly, we should not have a module version ID
// imposed ahead-of time. Instead we will compute a deterministic module version ID
// based on the contents of the generated stream.
Debug.Assert(_properties.PersistentIdentifier == default(Guid));
int sectionCount = 1;
if (_properties.RequiresStartupStub) sectionCount++; //.reloc
if (!IteratorHelper.EnumerableIsEmpty(_nativeResourcesOpt) || _nativeResourceSectionOpt != null) sectionCount++; //.rsrc;
int sizeOfPeHeaders = ComputeSizeOfPeHeaders(sectionCount);
int textSectionRva = BitArithmeticUtilities.Align(sizeOfPeHeaders, _properties.SectionAlignment);
int moduleVersionIdOffsetInMetadataStream;
int methodBodyStreamRva = textSectionRva + OffsetToILStream;
int pdbIdOffsetInPortablePdbStream;
int entryPointToken;
MetadataSizes metadataSizes;
mdWriter.SerializeMetadataAndIL(
metadataWriter,
debugMetadataWriterOpt,
nativePdbWriterOpt,
ilWriter,
mappedFieldDataWriter,
managedResourceWriter,
methodBodyStreamRva,
mdSizes => CalculateMappedFieldDataStreamRva(textSectionRva, mdSizes),
out moduleVersionIdOffsetInMetadataStream,
out pdbIdOffsetInPortablePdbStream,
out entryPointToken,
out metadataSizes);
ContentId nativePdbContentId;
if (nativePdbWriterOpt != null)
{
var assembly = mdWriter.Module.AsAssembly;
if (assembly != null && assembly.Kind == OutputKind.WindowsRuntimeMetadata)
{
// Dev12: If compiling to winmdobj, we need to add to PDB source spans of
// all types and members for better error reporting by WinMDExp.
nativePdbWriterOpt.WriteDefinitionLocations(mdWriter.Module.GetSymbolToLocationMap());
}
else
{
#if DEBUG
// validate that all definitions are writable
// if same scenario would happen in an winmdobj project
nativePdbWriterOpt.AssertAllDefinitionsHaveTokens(mdWriter.Module.GetSymbolToLocationMap());
#endif
}
nativePdbContentId = nativePdbWriterOpt.GetContentId();
// the writer shall not be used after this point for writing:
nativePdbWriterOpt = null;
}
else
{
nativePdbContentId = default(ContentId);
}
// write to Portable PDB stream:
ContentId portablePdbContentId;
Stream portablePdbStream = getPortablePdbStreamOpt?.Invoke();
if (portablePdbStream != null)
{
debugMetadataWriterOpt.WriteContentTo(portablePdbStream);
if (_deterministic)
{
portablePdbContentId = ContentId.FromHash(CryptographicHashProvider.ComputeSha1(portablePdbStream));
}
else
{
portablePdbContentId = new ContentId(Guid.NewGuid().ToByteArray(), BitConverter.GetBytes(_timeStamp));
}
// fill in the PDB id:
long previousPosition = portablePdbStream.Position;
CheckZeroDataInStream(portablePdbStream, pdbIdOffsetInPortablePdbStream, ContentId.Size);
portablePdbStream.Position = pdbIdOffsetInPortablePdbStream;
portablePdbStream.Write(portablePdbContentId.Guid, 0, portablePdbContentId.Guid.Length);
portablePdbStream.Write(portablePdbContentId.Stamp, 0, portablePdbContentId.Stamp.Length);
portablePdbStream.Position = previousPosition;
}
else
{
portablePdbContentId = default(ContentId);
}
// Only the size of the fixed part of the debug table goes here.
DirectoryEntry debugDirectory = default(DirectoryEntry);
DirectoryEntry importTable = default(DirectoryEntry);
DirectoryEntry importAddressTable = default(DirectoryEntry);
int entryPointAddress = 0;
if (EmitPdb || _deterministic)
{
debugDirectory = new DirectoryEntry(textSectionRva + ComputeOffsetToDebugTable(metadataSizes), ImageDebugDirectoryBaseSize);
}
if (_properties.RequiresStartupStub)
{
importAddressTable = new DirectoryEntry(textSectionRva, SizeOfImportAddressTable);
entryPointAddress = CalculateMappedFieldDataStreamRva(textSectionRva, metadataSizes) - (_is32bit ? 6 : 10); // TODO: constants
importTable = new DirectoryEntry(textSectionRva + ComputeOffsetToImportTable(metadataSizes), (_is32bit ? 66 : 70) + 13); // TODO: constants
}
var corHeaderDirectory = new DirectoryEntry(textSectionRva + SizeOfImportAddressTable, size: CorHeaderSize);
long ntHeaderTimestampPosition;
long metadataPosition;
List<SectionHeader> sectionHeaders = CreateSectionHeaders(metadataSizes, sectionCount);
CoffHeader coffHeader;
NtHeader ntHeader;
FillInNtHeader(sectionHeaders, entryPointAddress, corHeaderDirectory, importTable, importAddressTable, debugDirectory, out coffHeader, out ntHeader);
Stream peStream = getPeStream();
if (peStream == null)
{
return false;
}
WriteHeaders(peStream, ntHeader, coffHeader, sectionHeaders, out ntHeaderTimestampPosition);
WriteTextSection(
peStream,
sectionHeaders[0],
importTable.RelativeVirtualAddress,
importAddressTable.RelativeVirtualAddress,
entryPointToken,
metadataWriter,
ilWriter,
mappedFieldDataWriter,
managedResourceWriter,
metadataSizes,
nativePdbContentId,
portablePdbContentId,
out metadataPosition);
var resourceSection = sectionHeaders.FirstOrDefault(s => s.Name == ResourceSectionName);
if (resourceSection != null)
{
WriteResourceSection(peStream, resourceSection);
}
var relocSection = sectionHeaders.FirstOrDefault(s => s.Name == RelocationSectionName);
if (relocSection != null)
{
WriteRelocSection(peStream, relocSection, entryPointAddress);
}
if (_deterministic)
{
var mvidPosition = metadataPosition + moduleVersionIdOffsetInMetadataStream;
WriteDeterministicGuidAndTimestamps(peStream, mvidPosition, ntHeaderTimestampPosition);
}
return true;
}
private List<SectionHeader> CreateSectionHeaders(MetadataSizes metadataSizes, int sectionCount)
{
var sectionHeaders = new List<SectionHeader>();
SectionHeader lastSection;
int sizeOfPeHeaders = ComputeSizeOfPeHeaders(sectionCount);
int sizeOfTextSection = ComputeSizeOfTextSection(metadataSizes);
sectionHeaders.Add(lastSection = new SectionHeader(
characteristics: SectionCharacteristics.MemRead |
SectionCharacteristics.MemExecute |
SectionCharacteristics.ContainsCode,
name: ".text",
numberOfLinenumbers: 0,
numberOfRelocations: 0,
pointerToLinenumbers: 0,
pointerToRawData: BitArithmeticUtilities.Align(sizeOfPeHeaders, _properties.FileAlignment),
pointerToRelocations: 0,
relativeVirtualAddress: BitArithmeticUtilities.Align(sizeOfPeHeaders, _properties.SectionAlignment),
sizeOfRawData: BitArithmeticUtilities.Align(sizeOfTextSection, _properties.FileAlignment),
virtualSize: sizeOfTextSection
));
int resourcesRva = BitArithmeticUtilities.Align(lastSection.RelativeVirtualAddress + lastSection.VirtualSize, _properties.SectionAlignment);
int sizeOfWin32Resources = this.ComputeSizeOfWin32Resources(resourcesRva);
if (sizeOfWin32Resources > 0)
{
sectionHeaders.Add(lastSection = new SectionHeader(
characteristics: SectionCharacteristics.MemRead |
SectionCharacteristics.ContainsInitializedData,
name: ResourceSectionName,
numberOfLinenumbers: 0,
numberOfRelocations: 0,
pointerToLinenumbers: 0,
pointerToRawData: lastSection.PointerToRawData + lastSection.SizeOfRawData,
pointerToRelocations: 0,
relativeVirtualAddress: resourcesRva,
sizeOfRawData: BitArithmeticUtilities.Align(sizeOfWin32Resources, _properties.FileAlignment),
virtualSize: sizeOfWin32Resources
));
}
if (_properties.RequiresStartupStub)
{
var size = (_properties.Requires64bits && !_properties.RequiresAmdInstructionSet) ? 14 : 12; // TODO: constants
sectionHeaders.Add(lastSection = new SectionHeader(
characteristics: SectionCharacteristics.MemRead |
SectionCharacteristics.MemDiscardable |
SectionCharacteristics.ContainsInitializedData,
name: RelocationSectionName,
numberOfLinenumbers: 0,
numberOfRelocations: 0,
pointerToLinenumbers: 0,
pointerToRawData: lastSection.PointerToRawData + lastSection.SizeOfRawData,
pointerToRelocations: 0,
relativeVirtualAddress: BitArithmeticUtilities.Align(lastSection.RelativeVirtualAddress + lastSection.VirtualSize, _properties.SectionAlignment),
sizeOfRawData: BitArithmeticUtilities.Align(size, _properties.FileAlignment),
virtualSize: size));
}
Debug.Assert(sectionHeaders.Count == sectionCount);
return sectionHeaders;
}
private const string CorEntryPointDll = "mscoree.dll";
private string CorEntryPointName => (_properties.ImageCharacteristics & Characteristics.Dll) != 0 ? "_CorDllMain" : "_CorExeMain";
private int SizeOfImportAddressTable => _properties.RequiresStartupStub ? (_is32bit ? 2 * sizeof(uint) : 2 * sizeof(ulong)) : 0;
// (_is32bit ? 66 : 70);
private int SizeOfImportTable =>
sizeof(uint) + // RVA
sizeof(uint) + // 0
sizeof(uint) + // 0
sizeof(uint) + // name RVA
sizeof(uint) + // import address table RVA
20 + // ?
(_is32bit ? 3 * sizeof(uint) : 2 * sizeof(ulong)) + // import lookup table
sizeof(ushort) + // hint
CorEntryPointName.Length +
1; // NUL
private static int SizeOfNameTable =>
CorEntryPointDll.Length + 1 + sizeof(ushort);
private int SizeOfRuntimeStartupStub => _is32bit ? 8 : 16;
private int CalculateOffsetToMappedFieldDataStream(MetadataSizes metadataSizes)
{
int result = ComputeOffsetToImportTable(metadataSizes);
if (_properties.RequiresStartupStub)
{
result += SizeOfImportTable + SizeOfNameTable;
result = BitArithmeticUtilities.Align(result, _is32bit ? 4 : 8); //optional padding to make startup stub's target address align on word or double word boundary
result += SizeOfRuntimeStartupStub;
}
return result;
}
private int CalculateMappedFieldDataStreamRva(int textSectionRva, MetadataSizes metadataSizes)
{
return textSectionRva + CalculateOffsetToMappedFieldDataStream(metadataSizes);
}
/// <summary>
/// Compute a deterministic Guid and timestamp based on the contents of the stream, and replace
/// the 16 zero bytes at the given position and one or two 4-byte values with that computed Guid and timestamp.
/// </summary>
/// <param name="peStream">PE stream.</param>
/// <param name="mvidPosition">Position in the stream of 16 zero bytes to be replaced by a Guid</param>
/// <param name="ntHeaderTimestampPosition">Position in the stream of four zero bytes to be replaced by a timestamp</param>
private static void WriteDeterministicGuidAndTimestamps(
Stream peStream,
long mvidPosition,
long ntHeaderTimestampPosition)
{
Debug.Assert(mvidPosition != 0);
Debug.Assert(ntHeaderTimestampPosition != 0);
var previousPosition = peStream.Position;
// Compute and write deterministic guid data over the relevant portion of the stream
peStream.Position = 0;
var contentId = ContentId.FromHash(CryptographicHashProvider.ComputeSha1(peStream));
// The existing Guid should be zero.
CheckZeroDataInStream(peStream, mvidPosition, contentId.Guid.Length);
peStream.Position = mvidPosition;
peStream.Write(contentId.Guid, 0, contentId.Guid.Length);
// The existing timestamp should be zero.
CheckZeroDataInStream(peStream, ntHeaderTimestampPosition, contentId.Stamp.Length);
peStream.Position = ntHeaderTimestampPosition;
peStream.Write(contentId.Stamp, 0, contentId.Stamp.Length);
peStream.Position = previousPosition;
}
[Conditional("DEBUG")]
private static void CheckZeroDataInStream(Stream stream, long position, int bytes)
{
stream.Position = position;
for (int i = 0; i < bytes; i++)
{
int value = stream.ReadByte();
Debug.Assert(value == 0);
}
}
private int ComputeOffsetToDebugTable(MetadataSizes metadataSizes)
{
Debug.Assert(metadataSizes.MetadataSize % 4 == 0);
Debug.Assert(metadataSizes.ResourceDataSize % 4 == 0);
return
ComputeOffsetToMetadata(metadataSizes.ILStreamSize) +
metadataSizes.MetadataSize +
metadataSizes.ResourceDataSize +
metadataSizes.StrongNameSignatureSize;
}
private int ComputeOffsetToImportTable(MetadataSizes metadataSizes)
{
return
ComputeOffsetToDebugTable(metadataSizes) +
ComputeSizeOfDebugDirectory();
}
private const int CorHeaderSize =
sizeof(int) + // header size
sizeof(short) + // major runtime version
sizeof(short) + // minor runtime version
sizeof(long) + // metadata directory
sizeof(int) + // COR flags
sizeof(int) + // entry point
sizeof(long) + // resources directory
sizeof(long) + // strong name signature directory
sizeof(long) + // code manager table directory
sizeof(long) + // vtable fixups directory
sizeof(long) + // export address table jumps directory
sizeof(long); // managed-native header directory
private int OffsetToILStream => SizeOfImportAddressTable + CorHeaderSize;
private int ComputeOffsetToMetadata(int ilStreamLength)
{
return OffsetToILStream + BitArithmeticUtilities.Align(ilStreamLength, 4);
}
/// <summary>
/// The size of a single entry in the "Debug Directory (Image Only)"
/// </summary>
private const int ImageDebugDirectoryEntrySize =
sizeof(uint) + // Characteristics
sizeof(uint) + // TimeDataStamp
sizeof(uint) + // Version
sizeof(uint) + // Type
sizeof(uint) + // SizeOfData
sizeof(uint) + // AddressOfRawData
sizeof(uint); // PointerToRawData
/// <summary>
/// The size of our debug directory: one entry for debug information, and an optional second one indicating
/// that the timestamp is deterministic (i.e. not really a timestamp)
/// </summary>
private int ImageDebugDirectoryBaseSize =>
(_deterministic ? ImageDebugDirectoryEntrySize : 0) +
(EmitPdb ? ImageDebugDirectoryEntrySize : 0);
private int ComputeSizeOfDebugDirectoryData()
{
// The debug directory data is only needed if this.EmitPdb.
return (!EmitPdb) ? 0 :
4 + // 4B signature "RSDS"
16 + // GUID
sizeof(uint) + // Age
Math.Max(BlobUtilities.GetUTF8ByteCount(_pdbPathOpt) + 1, _minPdbPath);
}
private int ComputeSizeOfDebugDirectory()
{
return ImageDebugDirectoryBaseSize + ComputeSizeOfDebugDirectoryData();
}
private int ComputeSizeOfPeHeaders(int sectionCount)
{
int sizeOfPeHeaders = 128 + 4 + 20 + 224 + 40 * sectionCount; // TODO: constants
if (!_is32bit)
{
sizeOfPeHeaders += 16;
}
return sizeOfPeHeaders;
}
private int ComputeSizeOfTextSection(MetadataSizes metadataSizes)
{
Debug.Assert(metadataSizes.MappedFieldDataSize % MetadataWriter.MappedFieldDataAlignment == 0);
return CalculateOffsetToMappedFieldDataStream(metadataSizes) + metadataSizes.MappedFieldDataSize;
}
private int ComputeSizeOfWin32Resources(int resourcesRva)
{
this.SerializeWin32Resources(resourcesRva);
int result = 0;
if (_win32ResourceWriter.Count > 0)
{
result += BitArithmeticUtilities.Align(_win32ResourceWriter.Count, 4);
} // result += Align(this.win32ResourceWriter.Length+1, 8);
return result;
}
private CorHeader CreateCorHeader(MetadataSizes metadataSizes, int textSectionRva, int entryPointToken)
{
int metadataRva = textSectionRva + ComputeOffsetToMetadata(metadataSizes.ILStreamSize);
int resourcesRva = metadataRva + metadataSizes.MetadataSize;
int signatureRva = resourcesRva + metadataSizes.ResourceDataSize;
return new CorHeader(
entryPointTokenOrRelativeVirtualAddress: entryPointToken,
flags: _properties.GetCorHeaderFlags(),
metadataDirectory: new DirectoryEntry(metadataRva, metadataSizes.MetadataSize),
resourcesDirectory: new DirectoryEntry(resourcesRva, metadataSizes.ResourceDataSize),
strongNameSignatureDirectory: new DirectoryEntry(signatureRva, metadataSizes.StrongNameSignatureSize));
}
private void FillInNtHeader(
List<SectionHeader> sectionHeaders,
int entryPointAddress,
DirectoryEntry corHeader,
DirectoryEntry importTable,
DirectoryEntry importAddressTable,
DirectoryEntry debugTable,
out CoffHeader coffHeader,
out NtHeader ntHeader)
{
short sectionCount = (short)sectionHeaders.Count;
coffHeader = new CoffHeader(
machine: (_properties.Machine == 0) ? Machine.I386 : _properties.Machine,
numberOfSections: sectionCount,
timeDateStamp: _timeStamp,
pointerToSymbolTable: 0,
numberOfSymbols: 0,
sizeOfOptionalHeader: (short)(_is32bit ? 224 : 240), // TODO: constants
characteristics: _properties.ImageCharacteristics);
SectionHeader codeSection = sectionHeaders.FirstOrDefault(sh => (sh.Characteristics & SectionCharacteristics.ContainsCode) != 0);
SectionHeader dataSection = sectionHeaders.FirstOrDefault(sh => (sh.Characteristics & SectionCharacteristics.ContainsInitializedData) != 0);
ntHeader = new NtHeader();
ntHeader.Magic = _is32bit ? PEMagic.PE32 : PEMagic.PE32Plus;
ntHeader.MajorLinkerVersion = _properties.LinkerMajorVersion;
ntHeader.MinorLinkerVersion = _properties.LinkerMinorVersion;
ntHeader.AddressOfEntryPoint = entryPointAddress;
ntHeader.BaseOfCode = codeSection?.RelativeVirtualAddress ?? 0;
ntHeader.BaseOfData = dataSection?.RelativeVirtualAddress ?? 0;
ntHeader.ImageBase = _properties.BaseAddress;
ntHeader.FileAlignment = _properties.FileAlignment;
ntHeader.MajorSubsystemVersion = _properties.MajorSubsystemVersion;
ntHeader.MinorSubsystemVersion = _properties.MinorSubsystemVersion;
ntHeader.Subsystem = _properties.Subsystem;
ntHeader.DllCharacteristics = _properties.DllCharacteristics;
ntHeader.SizeOfStackReserve = _properties.SizeOfStackReserve;
ntHeader.SizeOfStackCommit = _properties.SizeOfStackCommit;
ntHeader.SizeOfHeapReserve = _properties.SizeOfHeapReserve;
ntHeader.SizeOfHeapCommit = _properties.SizeOfHeapCommit;
ntHeader.SizeOfCode = codeSection?.SizeOfRawData ?? 0;
ntHeader.SizeOfInitializedData = sectionHeaders.Sum(
sectionHeader => (sectionHeader.Characteristics & SectionCharacteristics.ContainsInitializedData) != 0 ? sectionHeader.SizeOfRawData : 0);
ntHeader.SizeOfHeaders = BitArithmeticUtilities.Align(ComputeSizeOfPeHeaders(sectionCount), _properties.FileAlignment);
var lastSection = sectionHeaders.Last();
ntHeader.SizeOfImage = BitArithmeticUtilities.Align(lastSection.RelativeVirtualAddress + lastSection.VirtualSize, _properties.SectionAlignment);
ntHeader.SizeOfUninitializedData = 0;
ntHeader.ImportAddressTable = importAddressTable;
ntHeader.CliHeaderTable = corHeader;
ntHeader.ImportTable = importTable;
var relocSection = sectionHeaders.FirstOrDefault(sectionHeader => sectionHeader.Name == RelocationSectionName);
if (relocSection != null)
{
ntHeader.BaseRelocationTable = new DirectoryEntry(relocSection.RelativeVirtualAddress, relocSection.VirtualSize);
}
ntHeader.DebugTable = debugTable;
var resourceSection = sectionHeaders.FirstOrDefault(sectionHeader => sectionHeader.Name == ResourceSectionName);
if (resourceSection != null)
{
ntHeader.ResourceTable = new DirectoryEntry(resourceSection.RelativeVirtualAddress, resourceSection.VirtualSize);
}
}
////
//// Resource Format.
////
////
//// Resource directory consists of two counts, following by a variable length
//// array of directory entries. The first count is the number of entries at
//// beginning of the array that have actual names associated with each entry.
//// The entries are in ascending order, case insensitive strings. The second
//// count is the number of entries that immediately follow the named entries.
//// This second count identifies the number of entries that have 16-bit integer
//// Ids as their name. These entries are also sorted in ascending order.
////
//// This structure allows fast lookup by either name or number, but for any
//// given resource entry only one form of lookup is supported, not both.
//// This is consistent with the syntax of the .RC file and the .RES file.
////
//typedef struct _IMAGE_RESOURCE_DIRECTORY {
// DWORD Characteristics;
// DWORD TimeDateStamp;
// WORD MajorVersion;
// WORD MinorVersion;
// WORD NumberOfNamedEntries;
// WORD NumberOfIdEntries;
//// IMAGE_RESOURCE_DIRECTORY_ENTRY DirectoryEntries[];
//} IMAGE_RESOURCE_DIRECTORY, *PIMAGE_RESOURCE_DIRECTORY;
//#define IMAGE_RESOURCE_NAME_IS_STRING 0x80000000
//#define IMAGE_RESOURCE_DATA_IS_DIRECTORY 0x80000000
////
//// Each directory contains the 32-bit Name of the entry and an offset,
//// relative to the beginning of the resource directory of the data associated
//// with this directory entry. If the name of the entry is an actual text
//// string instead of an integer Id, then the high order bit of the name field
//// is set to one and the low order 31-bits are an offset, relative to the
//// beginning of the resource directory of the string, which is of type
//// IMAGE_RESOURCE_DIRECTORY_STRING. Otherwise the high bit is clear and the
//// low-order 16-bits are the integer Id that identify this resource directory
//// entry. If the directory entry is yet another resource directory (i.e. a
//// subdirectory), then the high order bit of the offset field will be
//// set to indicate this. Otherwise the high bit is clear and the offset
//// field points to a resource data entry.
////
//typedef struct _IMAGE_RESOURCE_DIRECTORY_ENTRY {
// union {
// struct {
// DWORD NameOffset:31;
// DWORD NameIsString:1;
// } DUMMYSTRUCTNAME;
// DWORD Name;
// WORD Id;
// } DUMMYUNIONNAME;
// union {
// DWORD OffsetToData;
// struct {
// DWORD OffsetToDirectory:31;
// DWORD DataIsDirectory:1;
// } DUMMYSTRUCTNAME2;
// } DUMMYUNIONNAME2;
//} IMAGE_RESOURCE_DIRECTORY_ENTRY, *PIMAGE_RESOURCE_DIRECTORY_ENTRY;
////
//// For resource directory entries that have actual string names, the Name
//// field of the directory entry points to an object of the following type.
//// All of these string objects are stored together after the last resource
//// directory entry and before the first resource data object. This minimizes
//// the impact of these variable length objects on the alignment of the fixed
//// size directory entry objects.
////
//typedef struct _IMAGE_RESOURCE_DIRECTORY_STRING {
// WORD Length;
// CHAR NameString[ 1 ];
//} IMAGE_RESOURCE_DIRECTORY_STRING, *PIMAGE_RESOURCE_DIRECTORY_STRING;
//typedef struct _IMAGE_RESOURCE_DIR_STRING_U {
// WORD Length;
// WCHAR NameString[ 1 ];
//} IMAGE_RESOURCE_DIR_STRING_U, *PIMAGE_RESOURCE_DIR_STRING_U;
////
//// Each resource data entry describes a leaf node in the resource directory
//// tree. It contains an offset, relative to the beginning of the resource
//// directory of the data for the resource, a size field that gives the number
//// of bytes of data at that offset, a CodePage that should be used when
//// decoding code point values within the resource data. Typically for new
//// applications the code page would be the unicode code page.
////
//typedef struct _IMAGE_RESOURCE_DATA_ENTRY {
// DWORD OffsetToData;
// DWORD Size;
// DWORD CodePage;
// DWORD Reserved;
//} IMAGE_RESOURCE_DATA_ENTRY, *PIMAGE_RESOURCE_DATA_ENTRY;
private class Directory
{
internal readonly string Name;
internal readonly int ID;
internal ushort NumberOfNamedEntries;
internal ushort NumberOfIdEntries;
internal readonly List<object> Entries;
internal Directory(string name, int id)
{
this.Name = name;
this.ID = id;
this.Entries = new List<object>();
}
}
private static int CompareResources(IWin32Resource left, IWin32Resource right)
{
int result = CompareResourceIdentifiers(left.TypeId, left.TypeName, right.TypeId, right.TypeName);
return (result == 0) ? CompareResourceIdentifiers(left.Id, left.Name, right.Id, right.Name) : result;
}
//when comparing a string vs ordinal, the string should always be less than the ordinal. Per the spec,
//entries identified by string must precede those identified by ordinal.
private static int CompareResourceIdentifiers(int xOrdinal, string xString, int yOrdinal, string yString)
{
if (xString == null)
{
if (yString == null)
{
return xOrdinal - yOrdinal;
}
else
{
return 1;
}
}
else if (yString == null)
{
return -1;
}
else
{
return String.Compare(xString, yString, StringComparison.OrdinalIgnoreCase);
}
}
//sort the resources by ID least to greatest then by NAME.
//Where strings and ordinals are compared, strings are less than ordinals.
internal static IEnumerable<IWin32Resource> SortResources(IEnumerable<IWin32Resource> resources)
{
return resources.OrderBy(CompareResources);
}
//Win32 resources are supplied to the compiler in one of two forms, .RES (the output of the resource compiler),
//or .OBJ (the output of running cvtres.exe on a .RES file). A .RES file is parsed and processed into
//a set of objects implementing IWin32Resources. These are then ordered and the final image form is constructed
//and written to the resource section. Resources in .OBJ form are already very close to their final output
//form. Rather than reading them and parsing them into a set of objects similar to those produced by
//processing a .RES file, we process them like the native linker would, copy the relevant sections from
//the .OBJ into our output and apply some fixups.
private void SerializeWin32Resources(int resourcesRva)
{
if (_nativeResourceSectionOpt != null)
{
SerializeWin32Resources(_nativeResourceSectionOpt, resourcesRva);
return;
}
if (IteratorHelper.EnumerableIsEmpty(_nativeResourcesOpt))
{
return;
}
SerializeWin32Resources(_nativeResourcesOpt, resourcesRva);
}
private void SerializeWin32Resources(IEnumerable<IWin32Resource> theResources, int resourcesRva)
{
theResources = SortResources(theResources);
Directory typeDirectory = new Directory(string.Empty, 0);
Directory nameDirectory = null;
Directory languageDirectory = null;
int lastTypeID = int.MinValue;
string lastTypeName = null;
int lastID = int.MinValue;
string lastName = null;
uint sizeOfDirectoryTree = 16;
//EDMAURER note that this list is assumed to be sorted lowest to highest
//first by typeId, then by Id.
foreach (IWin32Resource r in theResources)
{
bool typeDifferent = (r.TypeId < 0 && r.TypeName != lastTypeName) || r.TypeId > lastTypeID;
if (typeDifferent)
{
lastTypeID = r.TypeId;
lastTypeName = r.TypeName;
if (lastTypeID < 0)
{
Debug.Assert(typeDirectory.NumberOfIdEntries == 0, "Not all Win32 resources with types encoded as strings precede those encoded as ints");
typeDirectory.NumberOfNamedEntries++;
}
else
{
typeDirectory.NumberOfIdEntries++;
}
sizeOfDirectoryTree += 24;
typeDirectory.Entries.Add(nameDirectory = new Directory(lastTypeName, lastTypeID));
}
if (typeDifferent || (r.Id < 0 && r.Name != lastName) || r.Id > lastID)
{
lastID = r.Id;
lastName = r.Name;
if (lastID < 0)
{
Debug.Assert(nameDirectory.NumberOfIdEntries == 0, "Not all Win32 resources with names encoded as strings precede those encoded as ints");
nameDirectory.NumberOfNamedEntries++;
}
else
{
nameDirectory.NumberOfIdEntries++;
}
sizeOfDirectoryTree += 24;
nameDirectory.Entries.Add(languageDirectory = new Directory(lastName, lastID));
}
languageDirectory.NumberOfIdEntries++;
sizeOfDirectoryTree += 8;
languageDirectory.Entries.Add(r);
}
var dataWriter = new BlobBuilder();
//'dataWriter' is where opaque resource data goes as well as strings that are used as type or name identifiers
this.WriteDirectory(typeDirectory, _win32ResourceWriter, 0, 0, sizeOfDirectoryTree, resourcesRva, dataWriter);
_win32ResourceWriter.LinkSuffix(dataWriter);
_win32ResourceWriter.WriteByte(0);
while ((_win32ResourceWriter.Count % 4) != 0)
{
_win32ResourceWriter.WriteByte(0);
}
}
private void WriteDirectory(Directory directory, BlobBuilder writer, uint offset, uint level, uint sizeOfDirectoryTree, int virtualAddressBase, BlobBuilder dataWriter)
{
writer.WriteUInt32(0); // Characteristics
writer.WriteUInt32(0); // Timestamp
writer.WriteUInt32(0); // Version
writer.WriteUInt16(directory.NumberOfNamedEntries);
writer.WriteUInt16(directory.NumberOfIdEntries);
uint n = (uint)directory.Entries.Count;
uint k = offset + 16 + n * 8;
for (int i = 0; i < n; i++)
{
int id;
string name;
uint nameOffset = (uint)dataWriter.Position + sizeOfDirectoryTree;
uint directoryOffset = k;
Directory subDir = directory.Entries[i] as Directory;
if (subDir != null)
{
id = subDir.ID;
name = subDir.Name;
if (level == 0)
{
k += SizeOfDirectory(subDir);
}
else
{
k += 16 + 8 * (uint)subDir.Entries.Count;
}
}
else
{
//EDMAURER write out an IMAGE_RESOURCE_DATA_ENTRY followed
//immediately by the data that it refers to. This results
//in a layout different than that produced by pulling the resources
//from an OBJ. In that case all of the data bits of a resource are
//contiguous in .rsrc$02. After processing these will end up at
//the end of .rsrc following all of the directory
//info and IMAGE_RESOURCE_DATA_ENTRYs
IWin32Resource r = (IWin32Resource)directory.Entries[i];
id = level == 0 ? r.TypeId : level == 1 ? r.Id : (int)r.LanguageId;
name = level == 0 ? r.TypeName : level == 1 ? r.Name : null;
dataWriter.WriteUInt32((uint)(virtualAddressBase + sizeOfDirectoryTree + 16 + dataWriter.Position));
byte[] data = new List<byte>(r.Data).ToArray();
dataWriter.WriteUInt32((uint)data.Length);
dataWriter.WriteUInt32(r.CodePage);
dataWriter.WriteUInt32(0);
dataWriter.WriteBytes(data);
while ((dataWriter.Count % 4) != 0)
{
dataWriter.WriteByte(0);
}
}
if (id >= 0)
{
writer.WriteInt32(id);
}
else
{
if (name == null)
{
name = string.Empty;
}
writer.WriteUInt32(nameOffset | 0x80000000);
dataWriter.WriteUInt16((ushort)name.Length);
dataWriter.WriteUTF16(name);
}
if (subDir != null)
{
writer.WriteUInt32(directoryOffset | 0x80000000);
}
else
{
writer.WriteUInt32(nameOffset);
}
}
k = offset + 16 + n * 8;
for (int i = 0; i < n; i++)
{
Directory subDir = directory.Entries[i] as Directory;
if (subDir != null)
{
this.WriteDirectory(subDir, writer, k, level + 1, sizeOfDirectoryTree, virtualAddressBase, dataWriter);
if (level == 0)
{
k += SizeOfDirectory(subDir);
}
else
{
k += 16 + 8 * (uint)subDir.Entries.Count;
}
}
}
}
private static uint SizeOfDirectory(Directory/*!*/ directory)
{
uint n = (uint)directory.Entries.Count;
uint size = 16 + 8 * n;
for (int i = 0; i < n; i++)
{
Directory subDir = directory.Entries[i] as Directory;
if (subDir != null)
{
size += 16 + 8 * (uint)subDir.Entries.Count;
}
}
return size;
}
private void SerializeWin32Resources(ResourceSection resourceSections, int resourcesRva)
{
var sectionWriter = _win32ResourceWriter.ReserveBytes(resourceSections.SectionBytes.Length);
sectionWriter.WriteBytes(resourceSections.SectionBytes);
var readStream = new MemoryStream(resourceSections.SectionBytes);
var reader = new BinaryReader(readStream);
foreach (int addressToFixup in resourceSections.Relocations)
{
sectionWriter.Offset = addressToFixup;
reader.BaseStream.Position = addressToFixup;
sectionWriter.WriteUInt32(reader.ReadUInt32() + (uint)resourcesRva);
}
}
//#define IMAGE_FILE_RELOCS_STRIPPED 0x0001 // Relocation info stripped from file.
//#define IMAGE_FILE_EXECUTABLE_IMAGE 0x0002 // File is executable (i.e. no unresolved external references).
//#define IMAGE_FILE_LINE_NUMS_STRIPPED 0x0004 // Line numbers stripped from file.
//#define IMAGE_FILE_LOCAL_SYMS_STRIPPED 0x0008 // Local symbols stripped from file.
//#define IMAGE_FILE_AGGRESIVE_WS_TRIM 0x0010 // Aggressively trim working set
//#define IMAGE_FILE_LARGE_ADDRESS_AWARE 0x0020 // App can handle >2gb addresses
//#define IMAGE_FILE_BYTES_REVERSED_LO 0x0080 // Bytes of machine word are reversed.
//#define IMAGE_FILE_32BIT_MACHINE 0x0100 // 32 bit word machine.
//#define IMAGE_FILE_DEBUG_STRIPPED 0x0200 // Debugging info stripped from file in .DBG file
//#define IMAGE_FILE_REMOVABLE_RUN_FROM_SWAP 0x0400 // If Image is on removable media, copy and run from the swap file.
//#define IMAGE_FILE_NET_RUN_FROM_SWAP 0x0800 // If Image is on Net, copy and run from the swap file.
//#define IMAGE_FILE_SYSTEM 0x1000 // System File.
//#define IMAGE_FILE_DLL 0x2000 // File is a DLL.
//#define IMAGE_FILE_UP_SYSTEM_ONLY 0x4000 // File should only be run on a UP machine
//#define IMAGE_FILE_BYTES_REVERSED_HI 0x8000 // Bytes of machine word are reversed.
private static readonly byte[] s_dosHeader = new byte[]
{
0x4d, 0x5a, 0x90, 0x00, 0x03, 0x00, 0x00, 0x00,
0x04, 0x00, 0x00, 0x00, 0xff, 0xff, 0x00, 0x00,
0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00, 0x00,
0x0e, 0x1f, 0xba, 0x0e, 0x00, 0xb4, 0x09, 0xcd,
0x21, 0xb8, 0x01, 0x4c, 0xcd, 0x21, 0x54, 0x68,
0x69, 0x73, 0x20, 0x70, 0x72, 0x6f, 0x67, 0x72,
0x61, 0x6d, 0x20, 0x63, 0x61, 0x6e, 0x6e, 0x6f,
0x74, 0x20, 0x62, 0x65, 0x20, 0x72, 0x75, 0x6e,
0x20, 0x69, 0x6e, 0x20, 0x44, 0x4f, 0x53, 0x20,
0x6d, 0x6f, 0x64, 0x65, 0x2e, 0x0d, 0x0d, 0x0a,
0x24, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
};
private void WriteHeaders(Stream peStream, NtHeader ntHeader, CoffHeader coffHeader, List<SectionHeader> sectionHeaders, out long ntHeaderTimestampPosition)
{
var writer = PooledBlobBuilder.GetInstance();
// MS-DOS stub (128 bytes)
writer.WriteBytes(s_dosHeader);
// PE Signature "PE\0\0"
writer.WriteUInt32(0x00004550);
// COFF Header (20 bytes)
writer.WriteUInt16((ushort)coffHeader.Machine);
writer.WriteUInt16((ushort)coffHeader.NumberOfSections);
ntHeaderTimestampPosition = writer.Position + peStream.Position;
writer.WriteUInt32((uint)coffHeader.TimeDateStamp);
writer.WriteUInt32((uint)coffHeader.PointerToSymbolTable);
writer.WriteUInt32((uint)coffHeader.NumberOfSymbols);
writer.WriteUInt16((ushort)(_is32bit ? 224 : 240)); // SizeOfOptionalHeader
writer.WriteUInt16((ushort)coffHeader.Characteristics);
// PE Headers:
writer.WriteUInt16((ushort)(_is32bit ? PEMagic.PE32 : PEMagic.PE32Plus)); // 2
writer.WriteByte(ntHeader.MajorLinkerVersion); // 3
writer.WriteByte(ntHeader.MinorLinkerVersion); // 4
writer.WriteUInt32((uint)ntHeader.SizeOfCode); // 8
writer.WriteUInt32((uint)ntHeader.SizeOfInitializedData); // 12
writer.WriteUInt32((uint)ntHeader.SizeOfUninitializedData); // 16
writer.WriteUInt32((uint)ntHeader.AddressOfEntryPoint); // 20
writer.WriteUInt32((uint)ntHeader.BaseOfCode); // 24
if (_is32bit)
{
writer.WriteUInt32((uint)ntHeader.BaseOfData); // 28
writer.WriteUInt32((uint)ntHeader.ImageBase); // 32
}
else
{
writer.WriteUInt64(ntHeader.ImageBase); // 32
}
// NT additional fields:
writer.WriteUInt32((uint)ntHeader.SectionAlignment); // 36
writer.WriteUInt32((uint)ntHeader.FileAlignment); // 40
writer.WriteUInt16(ntHeader.MajorOperatingSystemVersion); // 42
writer.WriteUInt16(ntHeader.MinorOperatingSystemVersion); // 44
writer.WriteUInt16(ntHeader.MajorImageVersion); // 46
writer.WriteUInt16(ntHeader.MinorImageVersion); // 48
writer.WriteUInt16(ntHeader.MajorSubsystemVersion); // MajorSubsystemVersion 50
writer.WriteUInt16(ntHeader.MinorSubsystemVersion); // MinorSubsystemVersion 52
// Win32VersionValue (reserved, should be 0)
writer.WriteUInt32(0); // 56
writer.WriteUInt32((uint)ntHeader.SizeOfImage); // 60
writer.WriteUInt32((uint)ntHeader.SizeOfHeaders); // 64
writer.WriteUInt32(ntHeader.Checksum); // 68
writer.WriteUInt16((ushort)ntHeader.Subsystem); // 70
writer.WriteUInt16((ushort)ntHeader.DllCharacteristics);
if (_is32bit)
{
writer.WriteUInt32((uint)ntHeader.SizeOfStackReserve); // 76
writer.WriteUInt32((uint)ntHeader.SizeOfStackCommit); // 80
writer.WriteUInt32((uint)ntHeader.SizeOfHeapReserve); // 84
writer.WriteUInt32((uint)ntHeader.SizeOfHeapCommit); // 88
}
else
{
writer.WriteUInt64(ntHeader.SizeOfStackReserve); // 80
writer.WriteUInt64(ntHeader.SizeOfStackCommit); // 88
writer.WriteUInt64(ntHeader.SizeOfHeapReserve); // 96
writer.WriteUInt64(ntHeader.SizeOfHeapCommit); // 104
}
// LoaderFlags
writer.WriteUInt32(0); // 92|108
// The number of data-directory entries in the remainder of the header.
writer.WriteUInt32(16); // 96|112
// directory entries:
writer.WriteUInt32((uint)ntHeader.ExportTable.RelativeVirtualAddress); // 100|116
writer.WriteUInt32((uint)ntHeader.ExportTable.Size); // 104|120
writer.WriteUInt32((uint)ntHeader.ImportTable.RelativeVirtualAddress); // 108|124
writer.WriteUInt32((uint)ntHeader.ImportTable.Size); // 112|128
writer.WriteUInt32((uint)ntHeader.ResourceTable.RelativeVirtualAddress); // 116|132
writer.WriteUInt32((uint)ntHeader.ResourceTable.Size); // 120|136
writer.WriteUInt32((uint)ntHeader.ExceptionTable.RelativeVirtualAddress); // 124|140
writer.WriteUInt32((uint)ntHeader.ExceptionTable.Size); // 128|144
writer.WriteUInt32((uint)ntHeader.CertificateTable.RelativeVirtualAddress); // 132|148
writer.WriteUInt32((uint)ntHeader.CertificateTable.Size); // 136|152
writer.WriteUInt32((uint)ntHeader.BaseRelocationTable.RelativeVirtualAddress); // 140|156
writer.WriteUInt32((uint)ntHeader.BaseRelocationTable.Size); // 144|160
writer.WriteUInt32((uint)ntHeader.DebugTable.RelativeVirtualAddress); // 148|164
writer.WriteUInt32((uint)ntHeader.DebugTable.Size); // 152|168
writer.WriteUInt32((uint)ntHeader.CopyrightTable.RelativeVirtualAddress); // 156|172
writer.WriteUInt32((uint)ntHeader.CopyrightTable.Size); // 160|176
writer.WriteUInt32((uint)ntHeader.GlobalPointerTable.RelativeVirtualAddress); // 164|180
writer.WriteUInt32((uint)ntHeader.GlobalPointerTable.Size); // 168|184
writer.WriteUInt32((uint)ntHeader.ThreadLocalStorageTable.RelativeVirtualAddress); // 172|188
writer.WriteUInt32((uint)ntHeader.ThreadLocalStorageTable.Size); // 176|192
writer.WriteUInt32((uint)ntHeader.LoadConfigTable.RelativeVirtualAddress); // 180|196
writer.WriteUInt32((uint)ntHeader.LoadConfigTable.Size); // 184|200
writer.WriteUInt32((uint)ntHeader.BoundImportTable.RelativeVirtualAddress); // 188|204
writer.WriteUInt32((uint)ntHeader.BoundImportTable.Size); // 192|208
writer.WriteUInt32((uint)ntHeader.ImportAddressTable.RelativeVirtualAddress); // 196|212
writer.WriteUInt32((uint)ntHeader.ImportAddressTable.Size); // 200|216
writer.WriteUInt32((uint)ntHeader.DelayImportTable.RelativeVirtualAddress); // 204|220
writer.WriteUInt32((uint)ntHeader.DelayImportTable.Size); // 208|224
writer.WriteUInt32((uint)ntHeader.CliHeaderTable.RelativeVirtualAddress); // 212|228
writer.WriteUInt32((uint)ntHeader.CliHeaderTable.Size); // 216|232
writer.WriteUInt64(0); // 224|240
// Section Headers
foreach (var sectionHeader in sectionHeaders)
{
WriteSectionHeader(sectionHeader, writer);
}
writer.WriteContentTo(peStream);
writer.Free();
}
private static void WriteSectionHeader(SectionHeader sectionHeader, BlobBuilder writer)
{
if (sectionHeader.VirtualSize == 0)
{
return;
}
for (int j = 0, m = sectionHeader.Name.Length; j < 8; j++)
{
if (j < m)
{
writer.WriteByte((byte)sectionHeader.Name[j]);
}
else
{
writer.WriteByte(0);
}
}
writer.WriteUInt32((uint)sectionHeader.VirtualSize);
writer.WriteUInt32((uint)sectionHeader.RelativeVirtualAddress);
writer.WriteUInt32((uint)sectionHeader.SizeOfRawData);
writer.WriteUInt32((uint)sectionHeader.PointerToRawData);
writer.WriteUInt32((uint)sectionHeader.PointerToRelocations);
writer.WriteUInt32((uint)sectionHeader.PointerToLinenumbers);
writer.WriteUInt16(sectionHeader.NumberOfRelocations);
writer.WriteUInt16(sectionHeader.NumberOfLinenumbers);
writer.WriteUInt32((uint)sectionHeader.Characteristics);
}
private void WriteTextSection(
Stream peStream,
SectionHeader textSection,
int importTableRva,
int importAddressTableRva,
int entryPointToken,
BlobBuilder metadataWriter,
BlobBuilder ilWriter,
BlobBuilder mappedFieldDataWriter,
BlobBuilder managedResourceWriter,
MetadataSizes metadataSizes,
ContentId nativePdbContentId,
ContentId portablePdbContentId,
out long metadataPosition)
{
// TODO: zero out all bytes:
peStream.Position = textSection.PointerToRawData;
if (_properties.RequiresStartupStub)
{
WriteImportAddressTable(peStream, importTableRva);
}
var corHeader = CreateCorHeader(metadataSizes, textSection.RelativeVirtualAddress, entryPointToken);
WriteCorHeader(peStream, corHeader);
// IL:
ilWriter.Align(4);
ilWriter.WriteContentTo(peStream);
// metadata:
metadataPosition = peStream.Position;
Debug.Assert(metadataWriter.Count % 4 == 0);
metadataWriter.WriteContentTo(peStream);
// managed resources:
Debug.Assert(managedResourceWriter.Count % 4 == 0);
managedResourceWriter.WriteContentTo(peStream);
// strong name signature:
WriteSpaceForHash(peStream, metadataSizes.StrongNameSignatureSize);
if (EmitPdb || _deterministic)
{
WriteDebugTable(peStream, textSection, nativePdbContentId, portablePdbContentId, metadataSizes);
}
if (_properties.RequiresStartupStub)
{
WriteImportTable(peStream, importTableRva, importAddressTableRva);
WriteNameTable(peStream);
WriteRuntimeStartupStub(peStream, importAddressTableRva);
}
// mapped field data:
mappedFieldDataWriter.WriteContentTo(peStream);
// TODO: zero out all bytes:
int alignedPosition = textSection.PointerToRawData + textSection.SizeOfRawData;
if (peStream.Position != alignedPosition)
{
peStream.Position = alignedPosition - 1;
peStream.WriteByte(0);
}
}
private void WriteImportAddressTable(Stream peStream, int importTableRva)
{
var writer = new BlobBuilder(SizeOfImportAddressTable);
int ilRVA = importTableRva + 40;
int hintRva = ilRVA + (_is32bit ? 12 : 16);
// Import Address Table
if (_is32bit)
{
writer.WriteUInt32((uint)hintRva); // 4
writer.WriteUInt32(0); // 8
}
else
{
writer.WriteUInt64((uint)hintRva); // 8
writer.WriteUInt64(0); // 16
}
Debug.Assert(writer.Count == SizeOfImportAddressTable);
writer.WriteContentTo(peStream);
}
private void WriteImportTable(Stream peStream, int importTableRva, int importAddressTableRva)
{
var writer = new BlobBuilder(SizeOfImportTable);
int ilRVA = importTableRva + 40;
int hintRva = ilRVA + (_is32bit ? 12 : 16);
int nameRva = hintRva + 12 + 2;
// Import table
writer.WriteUInt32((uint)ilRVA); // 4
writer.WriteUInt32(0); // 8
writer.WriteUInt32(0); // 12
writer.WriteUInt32((uint)nameRva); // 16
writer.WriteUInt32((uint)importAddressTableRva); // 20
writer.WriteBytes(0, 20); // 40
// Import Lookup table
if (_is32bit)
{
writer.WriteUInt32((uint)hintRva); // 44
writer.WriteUInt32(0); // 48
writer.WriteUInt32(0); // 52
}
else
{
writer.WriteUInt64((uint)hintRva); // 48
writer.WriteUInt64(0); // 56
}
// Hint table
writer.WriteUInt16(0); // Hint 54|58
foreach (char ch in CorEntryPointName)
{
writer.WriteByte((byte)ch); // 65|69
}
writer.WriteByte(0); // 66|70
Debug.Assert(writer.Count == SizeOfImportTable);
writer.WriteContentTo(peStream);
}
private static void WriteNameTable(Stream peStream)
{
var writer = new BlobBuilder(SizeOfNameTable);
foreach (char ch in CorEntryPointDll)
{
writer.WriteByte((byte)ch);
}
writer.WriteByte(0);
writer.WriteUInt16(0);
Debug.Assert(writer.Count == SizeOfNameTable);
writer.WriteContentTo(peStream);
}
private static void WriteCorHeader(Stream peStream, CorHeader corHeader)
{
var writer = new BlobBuilder(CorHeaderSize);
writer.WriteUInt32(CorHeaderSize);
writer.WriteUInt16(corHeader.MajorRuntimeVersion);
writer.WriteUInt16(corHeader.MinorRuntimeVersion);
writer.WriteUInt32((uint)corHeader.MetadataDirectory.RelativeVirtualAddress);
writer.WriteUInt32((uint)corHeader.MetadataDirectory.Size);
writer.WriteUInt32((uint)corHeader.Flags);
writer.WriteUInt32((uint)corHeader.EntryPointTokenOrRelativeVirtualAddress);
writer.WriteUInt32((uint)(corHeader.ResourcesDirectory.Size == 0 ? 0 : corHeader.ResourcesDirectory.RelativeVirtualAddress)); // 28
writer.WriteUInt32((uint)corHeader.ResourcesDirectory.Size);
writer.WriteUInt32((uint)(corHeader.StrongNameSignatureDirectory.Size == 0 ? 0 : corHeader.StrongNameSignatureDirectory.RelativeVirtualAddress)); // 36
writer.WriteUInt32((uint)corHeader.StrongNameSignatureDirectory.Size);
writer.WriteUInt32((uint)corHeader.CodeManagerTableDirectory.RelativeVirtualAddress);
writer.WriteUInt32((uint)corHeader.CodeManagerTableDirectory.Size);
writer.WriteUInt32((uint)corHeader.VtableFixupsDirectory.RelativeVirtualAddress);
writer.WriteUInt32((uint)corHeader.VtableFixupsDirectory.Size);
writer.WriteUInt32((uint)corHeader.ExportAddressTableJumpsDirectory.RelativeVirtualAddress);
writer.WriteUInt32((uint)corHeader.ExportAddressTableJumpsDirectory.Size);
writer.WriteUInt64(0);
Debug.Assert(writer.Count == CorHeaderSize);
Debug.Assert(writer.Count % 4 == 0);
writer.WriteContentTo(peStream);
}
private static void WriteSpaceForHash(Stream peStream, int strongNameSignatureSize)
{
while (strongNameSignatureSize > 0)
{
peStream.WriteByte(0);
strongNameSignatureSize--;
}
}
/// <summary>
/// Write one entry in the "Debug Directory (Image Only)"
/// See https://msdn.microsoft.com/en-us/windows/hardware/gg463119.aspx
/// section 5.1.1 (pages 71-72).
/// </summary>
private static void WriteDebugTableEntry(
PooledBlobBuilder writer,
byte[] stamp,
uint version, // major and minor version, combined
uint debugType,
uint sizeOfData,
uint addressOfRawData,
uint pointerToRawData
)
{
writer.WriteUInt32(0); // characteristics
Debug.Assert(stamp.Length == 4);
writer.WriteBytes(stamp);
writer.WriteUInt32(version);
writer.WriteUInt32(debugType);
writer.WriteUInt32(sizeOfData);
writer.WriteUInt32(addressOfRawData);
writer.WriteUInt32(pointerToRawData);
}
private readonly static byte[] s_zeroStamp = new byte[4]; // four bytes of zero
/// <summary>
/// Write the entire "Debug Directory (Image Only)" along with data that it points to.
/// </summary>
private void WriteDebugTable(Stream peStream, SectionHeader textSection, ContentId nativePdbContentId, ContentId portablePdbContentId, MetadataSizes metadataSizes)
{
int tableSize = ImageDebugDirectoryBaseSize;
Debug.Assert(tableSize != 0);
Debug.Assert(nativePdbContentId.IsDefault || portablePdbContentId.IsDefault);
Debug.Assert(!EmitPdb || (nativePdbContentId.IsDefault ^ portablePdbContentId.IsDefault));
var writer = PooledBlobBuilder.GetInstance();
int dataSize = ComputeSizeOfDebugDirectoryData();
if (this.EmitPdb)
{
const int IMAGE_DEBUG_TYPE_CODEVIEW = 2; // from PE spec
uint dataOffset = (uint)(ComputeOffsetToDebugTable(metadataSizes) + tableSize);
WriteDebugTableEntry(writer,
stamp: nativePdbContentId.Stamp ?? portablePdbContentId.Stamp,
version: portablePdbContentId.IsDefault ? (uint)0 : ('P' << 24 | 'M' << 16 | 0x01 << 8 | 0x00),
debugType: IMAGE_DEBUG_TYPE_CODEVIEW,
sizeOfData: (uint)dataSize,
addressOfRawData: (uint)textSection.RelativeVirtualAddress + dataOffset, // RVA of the data
pointerToRawData: (uint)textSection.PointerToRawData + dataOffset); // position of the data in the PE stream
}
if (_deterministic)
{
const int IMAGE_DEBUG_TYPE_NO_TIMESTAMP = 16; // from PE spec
WriteDebugTableEntry(writer,
stamp: s_zeroStamp,
version: 0,
debugType: IMAGE_DEBUG_TYPE_NO_TIMESTAMP,
sizeOfData: 0,
addressOfRawData: 0,
pointerToRawData: 0);
}
// We should now have written all and precisely the data we said we'd write for the table entries.
Debug.Assert(writer.Count == tableSize);
// ====================
// The following is additional data beyond the debug directory at the offset `dataOffset`
// pointed to by the ImageDebugTypeCodeView entry.
if (EmitPdb)
{
writer.WriteByte((byte)'R');
writer.WriteByte((byte)'S');
writer.WriteByte((byte)'D');
writer.WriteByte((byte)'S');
// PDB id:
writer.WriteBytes(nativePdbContentId.Guid ?? portablePdbContentId.Guid);
// age
writer.WriteUInt32(PdbWriter.Age);
// UTF-8 encoded zero-terminated path to PDB
int pathStart = writer.Position;
writer.WriteUTF8(_pdbPathOpt, allowUnpairedSurrogates: true);
writer.WriteByte(0);
// padding:
writer.WriteBytes(0, Math.Max(0, _minPdbPath - (writer.Position - pathStart)));
}
// We should now have written all and precisely the data we said we'd write for the table and its data.
Debug.Assert(writer.Count == tableSize + dataSize);
writer.WriteContentTo(peStream);
writer.Free();
}
private void WriteRuntimeStartupStub(Stream peStream, int importAddressTableRva)
{
var writer = new BlobBuilder(16);
// entry point code, consisting of a jump indirect to _CorXXXMain
if (_is32bit)
{
//emit 0's (nops) to pad the entry point code so that the target address is aligned on a 4 byte boundary.
for (uint i = 0, n = (uint)(BitArithmeticUtilities.Align((uint)peStream.Position, 4) - peStream.Position); i < n; i++)
{
writer.WriteByte(0);
}
writer.WriteUInt16(0);
writer.WriteByte(0xff);
writer.WriteByte(0x25); //4
writer.WriteUInt32((uint)importAddressTableRva + (uint)_properties.BaseAddress); //8
}
else
{
//emit 0's (nops) to pad the entry point code so that the target address is aligned on a 8 byte boundary.
for (uint i = 0, n = (uint)(BitArithmeticUtilities.Align((uint)peStream.Position, 8) - peStream.Position); i < n; i++)
{
writer.WriteByte(0);
}
writer.WriteUInt32(0);
writer.WriteUInt16(0);
writer.WriteByte(0xff);
writer.WriteByte(0x25); //8
writer.WriteUInt64((ulong)importAddressTableRva + _properties.BaseAddress); //16
}
writer.WriteContentTo(peStream);
}
private void WriteRelocSection(Stream peStream, SectionHeader relocSection, int entryPointAddress)
{
peStream.Position = relocSection.PointerToRawData;
var writer = new BlobBuilder(relocSection.SizeOfRawData);
writer.WriteUInt32((((uint)entryPointAddress + 2) / 0x1000) * 0x1000);
writer.WriteUInt32(_properties.Requires64bits && !_properties.RequiresAmdInstructionSet ? 14u : 12u);
uint offsetWithinPage = ((uint)entryPointAddress + 2) % 0x1000;
uint relocType = _properties.Requires64bits ? 10u : 3u;
ushort s = (ushort)((relocType << 12) | offsetWithinPage);
writer.WriteUInt16(s);
if (_properties.Requires64bits && !_properties.RequiresAmdInstructionSet)
{
writer.WriteUInt32(relocType << 12);
}
writer.WriteUInt16(0); // next chunk's RVA
writer.PadTo(relocSection.SizeOfRawData);
writer.WriteContentTo(peStream);
}
private void WriteResourceSection(Stream peStream, SectionHeader resourceSection)
{
peStream.Position = resourceSection.PointerToRawData;
_win32ResourceWriter.PadTo(resourceSection.SizeOfRawData);
_win32ResourceWriter.WriteContentTo(peStream);
}
}
}
| apache-2.0 |
lukiano/finagle | finagle-core/src/test/scala/com/twitter/finagle/util/AsyncLatchTest.scala | 1397 | package com.twitter.finagle.util
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import org.junit.runner.RunWith
@RunWith(classOf[JUnitRunner])
class AsyncLatchTest extends FunSuite {
test("when count=0, AsyncLatch should execute waiters immediately") {
val latch = new AsyncLatch(0)
var didCall = false
latch await {
didCall = true
}
}
test("when count>0, AsyncLatch should execute waiters when count has reached 0") {
val latch = new AsyncLatch(1)
var didCall = false
latch await {
didCall = true
}
assert(!didCall)
latch.decr()
assert(didCall)
}
test("when count>0, AsyncLatch should not re-execute waiters when the count increases again") {
val latch = new AsyncLatch(1)
var count0 = 0
var count1 = 0
latch await {
count0 += 1
}
assert(count0 == 0)
latch.decr()
assert(count0 == 1)
assert(count1 == 0)
latch.incr()
latch await {
count1 += 1
}
assert(count0 == 1)
assert(count1 == 0)
latch.decr()
assert(count0 == 1)
assert(count1 == 1)
}
test("when count>0, AsyncLatch should return count on increment") {
val latch = new AsyncLatch(0)
assert(latch.incr() == 1)
}
test("when count>0, AsyncLatch should return count on decrement") {
val latch = new AsyncLatch(1)
assert(latch.decr() == 0)
}
}
| apache-2.0 |
alexksikes/elasticsearch | src/main/java/org/elasticsearch/common/lucene/docset/MatchDocIdSet.java | 4481 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene.docset;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FilteredDocIdSetIterator;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.Nullable;
import java.io.IOException;
/**
* A {@link DocIdSet} that works on a "doc" level by checking if it matches or not.
*/
public abstract class MatchDocIdSet extends DocIdSet implements Bits {
private final int maxDoc;
private final Bits acceptDocs;
protected MatchDocIdSet(int maxDoc, @Nullable Bits acceptDocs) {
this.maxDoc = maxDoc;
this.acceptDocs = acceptDocs;
}
/**
* Does this document match?
*/
protected abstract boolean matchDoc(int doc);
@Override
public DocIdSetIterator iterator() throws IOException {
if (acceptDocs == null) {
return new NoAcceptDocsIterator(maxDoc);
} else if (acceptDocs instanceof FixedBitSet) {
return new FixedBitSetIterator(((DocIdSet) acceptDocs).iterator());
} else {
return new BothIterator(maxDoc, acceptDocs);
}
}
@Override
public Bits bits() throws IOException {
return this;
}
@Override
public boolean get(int index) {
return matchDoc(index);
}
@Override
public int length() {
return maxDoc;
}
class NoAcceptDocsIterator extends DocIdSetIterator {
private final int maxDoc;
private int doc = -1;
NoAcceptDocsIterator(int maxDoc) {
this.maxDoc = maxDoc;
}
@Override
public int docID() {
return doc;
}
@Override
public int nextDoc() {
do {
doc++;
if (doc >= maxDoc) {
return doc = NO_MORE_DOCS;
}
} while (!matchDoc(doc));
return doc;
}
@Override
public int advance(int target) {
for (doc = target; doc < maxDoc; doc++) {
if (matchDoc(doc)) {
return doc;
}
}
return doc = NO_MORE_DOCS;
}
@Override
public long cost() {
return maxDoc;
}
}
class FixedBitSetIterator extends FilteredDocIdSetIterator {
FixedBitSetIterator(DocIdSetIterator innerIter) {
super(innerIter);
}
@Override
protected boolean match(int doc) {
return matchDoc(doc);
}
}
class BothIterator extends DocIdSetIterator {
private final int maxDoc;
private final Bits acceptDocs;
private int doc = -1;
BothIterator(int maxDoc, Bits acceptDocs) {
this.maxDoc = maxDoc;
this.acceptDocs = acceptDocs;
}
@Override
public int docID() {
return doc;
}
@Override
public int nextDoc() {
do {
doc++;
if (doc >= maxDoc) {
return doc = NO_MORE_DOCS;
}
} while (!(matchDoc(doc) && acceptDocs.get(doc)));
return doc;
}
@Override
public int advance(int target) {
for (doc = target; doc < maxDoc; doc++) {
if (matchDoc(doc) && acceptDocs.get(doc)) {
return doc;
}
}
return doc = NO_MORE_DOCS;
}
@Override
public long cost() {
return maxDoc;
}
}
}
| apache-2.0 |
Eric-Zhong/boxbilling | src/bb-modules/Example/Api/Admin.php | 774 | <?php
/**
* BoxBilling
*
* @copyright BoxBilling, Inc (http://www.boxbilling.com)
* @license Apache-2.0
*
* Copyright BoxBilling, Inc
* This source file is subject to the Apache-2.0 License that is bundled
* with this source code in the file LICENSE
*/
/**
* Example module Admin API
*
* API can be access only by admins
*/
namespace Box\Mod\Example\Api;
class Admin extends \Api_Abstract
{
/**
* Return list of example objects
*
* @return string[]
*/
public function get_something($data)
{
$result = array(
'apple',
'google',
'facebook',
);
if(isset($data['microsoft'])) {
$result[] = 'microsoft';
}
return $result;
}
} | apache-2.0 |
mdaniel/intellij-community | java/java-tests/testData/refactoring/makeClassStatic/Simple1_after.java | 374 | public class YoYo {
int y;
static class YoYoYo {
private final YoYo anObject;
public YoYoYo(YoYo anObject) {
this.anObject = anObject;
}
void foo (){
YoYo yoYoy = anObject;
int t = anObject.y;
int t1 = yoYoy.y;
anObject.new Other();
}
}
class Other {}
}
| apache-2.0 |
neurodebian/htcondor | src/condor_startd.V6/VMManager.cpp | 5212 | /***************************************************************
*
* Copyright (C) 1990-2007, Condor Team, Computer Sciences Department,
* University of Wisconsin-Madison, WI.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
***************************************************************/
#include "condor_common.h"
#include "startd.h"
#include "VMMachine.h"
#include "VMManager.h"
#include "vm_common.h"
#include "ipv6_addrinfo.h"
#define VM_UNREGISTER_TIMEOUT 3*vm_register_interval
extern ResMgr* resmgr;
VMManager *vmmanager = NULL;
/* Interfaces for class VMManager */
VMManager::VMManager()
{
m_vm_unrg_tid = -1;
host_usable = 1;
m_vm_registered_num = 0;
allowed_vm_list = NULL;
}
VMManager::~VMManager()
{
cancelUnRegisterTimer();
/* remove all registered VMs */
m_virtualmachines.Rewind();
VMMachine *i;
while( m_virtualmachines.Next(i) ) {
// Destructor will detach each virtual machine from vmmanger
delete(i);
if( m_vm_registered_num == 0 ) {
if( host_usable == 0 ) {
host_usable = 1;
if( resmgr ) {
resmgr->eval_and_update_all();
}
}
}
}
if( allowed_vm_list )
delete(allowed_vm_list);
}
int
VMManager::numOfVM(void)
{
return m_vm_registered_num;
}
bool
VMManager::isRegistered(char *addr, int update_time)
{
m_virtualmachines.Rewind();
VMMachine *i;
bool result = FALSE;
while( m_virtualmachines.Next(i) ) {
result = i->match(addr);
if( result ) {
if( update_time )
i->updateTimeStamp();
return TRUE;
}
}
return FALSE;
}
void
VMManager::attach(VMMachine *o)
{
m_virtualmachines.Append(o);
m_vm_registered_num++;
startUnRegisterTimer();
dprintf( D_ALWAYS,"Virtual machine(%s) is attached\n", o->getVMSinful());
}
void
VMManager::detach(VMMachine *o)
{
m_virtualmachines.Delete(o);
m_vm_registered_num--;
if( m_vm_registered_num == 0 )
cancelUnRegisterTimer();
dprintf(D_ALWAYS,"Virtual machine(%s) is detached\n", o->getVMSinful());
}
void
VMManager::allNotify( char *except_ip, int cmd, void *data )
{
m_virtualmachines.Rewind();
VMMachine *i;
while( m_virtualmachines.Next(i) ) {
if( except_ip && i->match(except_ip) ) {
i->updateTimeStamp();
continue;
}
i->sendEventToVM(cmd, data);
}
}
void
VMManager::checkRegisterTimeout(void)
{
int timeout = VM_UNREGISTER_TIMEOUT;
m_virtualmachines.Rewind();
VMMachine *i;
time_t now;
now = time(NULL);
while( m_virtualmachines.Next(i) ) {
if( ( now - i->getTimeStamp() ) > timeout ) {
// Destructor will detach timeout virtual machine from vmmanger
delete(i);
if( m_vm_registered_num == 0 ) {
if( host_usable == 0 ) {
host_usable = 1;
if( resmgr ) {
resmgr->eval_and_update_all();
}
}
}
}
}
}
void
VMManager::printAllElements(void)
{
m_virtualmachines.Rewind();
VMMachine *i;
while( m_virtualmachines.Next(i) ) {
i->print();
}
}
void
VMManager::startUnRegisterTimer(void)
{
if( m_vm_unrg_tid >= 0 ) {
//Unregister Timer already started
return;
}
m_vm_unrg_tid = daemonCore->Register_Timer(VM_UNREGISTER_TIMEOUT,
VM_UNREGISTER_TIMEOUT,
(TimerHandlercpp)&VMManager::checkRegisterTimeout,
"poll_registered_vm", this);
if( m_vm_unrg_tid < 0 ) {
EXCEPT("Can't register DaemonCore Timer");
}
dprintf( D_FULLDEBUG, "Starting vm unregister timer.\n");
}
void
VMManager::cancelUnRegisterTimer(void)
{
int rval;
if( m_vm_unrg_tid != -1 ) {
rval = daemonCore->Cancel_Timer(m_vm_unrg_tid);
if( rval < 0 ) {
dprintf( D_ALWAYS, "Failed to cancel vm unregister timer (%d): daemonCore error\n", m_vm_unrg_tid);
}else
dprintf( D_FULLDEBUG, "Canceled vm unregister timer (%d)\n", m_vm_unrg_tid);
}
m_vm_unrg_tid = -1;
}
void
vmapi_create_vmmanager(char *list)
{
StringList tmplist;
if( !list )
return;
if( vmmanager ) delete(vmmanager);
tmplist.initializeFromString(list);
if( tmplist.number() == 0 )
return;
char *vm_name;
StringList *vm_list;
vm_list = new StringList();
tmplist.rewind();
while( (vm_name = tmplist.next()) ) {
// checking valid IP
addrinfo_iterator iter;
int ret;
ret = ipv6_getaddrinfo(vm_name, NULL, iter);
if (ret != 0) continue;
addrinfo* ai = iter.next();
if (ai) {
condor_sockaddr addr(ai->ai_addr);
vm_list->append(addr.to_ip_string().Value());
}
}
if( vm_list->number() > 0 ) {
vmmanager = new VMManager();
vmmanager->allowed_vm_list = vm_list;
}else {
dprintf( D_ALWAYS, "There is no valid name of virtual machine\n");
delete(vm_list);
}
}
void
vmapi_destroy_vmmanager(void)
{
if( vmmanager ) {
delete(vmmanager);
vmmanager = NULL;
}
}
bool
vmapi_is_host_machine(void)
{
if( vmmanager )
return TRUE;
else
return FALSE;
}
| apache-2.0 |
sreekantch/JellyFish | db/migrate/20150106035615_add_index_on_order_item_port_and_host.rb | 129 | class AddIndexOnOrderItemPortAndHost < ActiveRecord::Migration
def change
add_index :order_items, [:port, :host]
end
end
| apache-2.0 |
adessaigne/camel | components/camel-xmlsecurity/src/test/java/org/apache/camel/component/xmlsecurity/XmlSignatureTest.java | 81164 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.xmlsecurity;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.nio.charset.Charset;
import java.security.Key;
import java.security.KeyException;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.KeyStore;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.SecureRandom;
import java.security.cert.Certificate;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import javax.xml.crypto.AlgorithmMethod;
import javax.xml.crypto.KeySelector;
import javax.xml.crypto.KeySelectorException;
import javax.xml.crypto.KeySelectorResult;
import javax.xml.crypto.URIDereferencer;
import javax.xml.crypto.XMLCryptoContext;
import javax.xml.crypto.XMLStructure;
import javax.xml.crypto.dsig.CanonicalizationMethod;
import javax.xml.crypto.dsig.SignatureMethod;
import javax.xml.crypto.dsig.XMLSignature;
import javax.xml.crypto.dsig.keyinfo.KeyInfo;
import javax.xml.crypto.dsig.keyinfo.KeyInfoFactory;
import javax.xml.crypto.dsig.keyinfo.KeyValue;
import javax.xml.crypto.dsig.spec.XPathFilterParameterSpec;
import javax.xml.crypto.dsig.spec.XPathType;
import javax.xml.namespace.NamespaceContext;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.w3c.dom.Attr;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.xml.sax.SAXException;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.ProducerTemplate;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.component.xmlsecurity.api.KeyAccessor;
import org.apache.camel.component.xmlsecurity.api.ValidationFailedHandler;
import org.apache.camel.component.xmlsecurity.api.XmlSignature2Message;
import org.apache.camel.component.xmlsecurity.api.XmlSignatureChecker;
import org.apache.camel.component.xmlsecurity.api.XmlSignatureConstants;
import org.apache.camel.component.xmlsecurity.api.XmlSignatureException;
import org.apache.camel.component.xmlsecurity.api.XmlSignatureFormatException;
import org.apache.camel.component.xmlsecurity.api.XmlSignatureHelper;
import org.apache.camel.component.xmlsecurity.api.XmlSignatureHelper.XPathAndFilter;
import org.apache.camel.component.xmlsecurity.api.XmlSignatureInvalidContentHashException;
import org.apache.camel.component.xmlsecurity.api.XmlSignatureInvalidException;
import org.apache.camel.component.xmlsecurity.api.XmlSignatureInvalidKeyException;
import org.apache.camel.component.xmlsecurity.api.XmlSignatureInvalidValueException;
import org.apache.camel.component.xmlsecurity.api.XmlSignatureProperties;
import org.apache.camel.component.xmlsecurity.processor.XmlSignatureConfiguration;
import org.apache.camel.component.xmlsecurity.util.EnvelopingXmlSignatureChecker;
import org.apache.camel.component.xmlsecurity.util.SameDocumentUriDereferencer;
import org.apache.camel.component.xmlsecurity.util.TestKeystore;
import org.apache.camel.component.xmlsecurity.util.TimestampProperty;
import org.apache.camel.component.xmlsecurity.util.ValidationFailedHandlerIgnoreManifestFailures;
import org.apache.camel.component.xmlsecurity.util.XmlSignature2Message2MessageWithTimestampProperty;
import org.apache.camel.impl.DefaultCamelContext;
import org.apache.camel.spi.Registry;
import org.apache.camel.support.SimpleRegistry;
import org.apache.camel.support.processor.validation.SchemaValidationException;
import org.apache.camel.test.junit5.CamelTestSupport;
import org.apache.camel.test.junit5.TestSupport;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
public class XmlSignatureTest extends CamelTestSupport {
protected static String payload;
private static boolean includeNewLine = true;
private KeyPair keyPair;
static {
if (TestSupport.getJavaMajorVersion() >= 9
|| TestSupport.isJava18_261_later()) {
includeNewLine = false;
}
payload = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ (includeNewLine ? "\n" : "")
+ "<root xmlns=\"http://test/test\"><test>Test Message</test></root>";
}
@Override
protected Registry createCamelRegistry() throws Exception {
Registry registry = new SimpleRegistry();
registry.bind("accessor", getKeyAccessor(keyPair.getPrivate()));
registry.bind("canonicalizationMethod1", getCanonicalizationMethod());
registry.bind("selector", KeySelector.singletonKeySelector(keyPair.getPublic()));
registry.bind("selectorKeyValue", getKeyValueKeySelector());
registry.bind("transformsXPath2", getTransformsXPath2());
registry.bind("transformsXsltXPath", getTransformsXsltXpath());
registry.bind("uriDereferencer", getSameDocumentUriDereferencer());
registry.bind("baseUri", getBaseUri());
registry.bind("cryptoContextProperties", getCrytoContextProperties());
registry.bind("keyAccessorDefault", getDefaultKeyAccessor());
registry.bind("keySelectorDefault", getDefaultKeySelector());
registry.bind("envelopingSignatureChecker", getEnvelopingXmlSignatureChecker());
registry.bind("xmlSignature2MessageWithTimestampProperty", getXmlSignature2MessageWithTimestampdProperty());
registry.bind("validationFailedHandlerIgnoreManifestFailures", getValidationFailedHandlerIgnoreManifestFailures());
registry.bind("signatureProperties", getSignatureProperties());
registry.bind("nodesearchxpath", getNodeSerachXPath());
Map<String, String> namespaceMap = Collections.singletonMap("ns", "http://test");
List<XPathFilterParameterSpec> xpaths = Collections
.singletonList(XmlSignatureHelper.getXpathFilter("/ns:root/a/@ID", namespaceMap));
registry.bind("xpathsToIdAttributes", xpaths);
registry.bind("parentXpathBean", getParentXPathBean());
return registry;
}
@Override
protected RouteBuilder[] createRouteBuilders() throws Exception {
return new RouteBuilder[] { new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: enveloping XML signature
onException(XmlSignatureException.class).handled(true).to("mock:exception");
from("direct:enveloping").to(getSignerEndpointURIEnveloping()).to("mock:signed")
.to(getVerifierEndpointURIEnveloping())
.to("mock:result");
// END SNIPPET: enveloping XML signature
}
}, new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: enveloping XML signature with plain text
// message body
onException(XmlSignatureException.class).handled(true).to("mock:exception");
from("direct:plaintext")
.to("xmlsecurity-sign:plaintext?keyAccessor=#accessor&plainText=true&plainTextEncoding=UTF-8")
.to("xmlsecurity-verify:plaintext?keySelector=#selector").to("mock:result");
// END SNIPPET: enveloping XML signature with plain text message
// body
}
}, new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: enveloped XML signature
onException(XmlSignatureException.class).handled(true).to("mock:exception");
from("direct:enveloped").to(getSignerEndpointURIEnveloped()).to("mock:signed")
.to(getVerifierEndpointURIEnveloped())
.to("mock:result");
// END SNIPPET: enveloped XML signature
}
}, new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: canonicalization
// we can set the configuration properties explicitly on the
// endpoint instances.
context.getEndpoint("xmlsecurity-sign:canonicalization?canonicalizationMethod=#canonicalizationMethod1",
XmlSignerEndpoint.class).getConfiguration().setKeyAccessor(getKeyAccessor(keyPair.getPrivate()));
context.getEndpoint("xmlsecurity-sign:canonicalization?canonicalizationMethod=#canonicalizationMethod1",
XmlSignerEndpoint.class).getConfiguration()
.setSignatureAlgorithm("http://www.w3.org/2001/04/xmldsig-more#rsa-sha256");
context.getEndpoint("xmlsecurity-verify:canonicalization", XmlVerifierEndpoint.class).getConfiguration()
.setKeySelector(
KeySelector.singletonKeySelector(keyPair.getPublic()));
from("direct:canonicalization").to(
"xmlsecurity-sign:canonicalization?canonicalizationMethod=#canonicalizationMethod1",
"xmlsecurity-verify:canonicalization", "mock:result");
// END SNIPPET: canonicalization
}
}, new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: signature and digest algorithm
from("direct:signaturedigestalgorithm")
.to("xmlsecurity-sign:signaturedigestalgorithm?keyAccessor=#accessor"
+ "&signatureAlgorithm=http://www.w3.org/2001/04/xmldsig-more#rsa-sha512&digestAlgorithm=http://www.w3.org/2001/04/xmlenc#sha512",
"xmlsecurity-verify:signaturedigestalgorithm?keySelector=#selector")
.to("mock:result");
// END SNIPPET: signature and digest algorithm
}
}, new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: transforms XPath2
from("direct:transformsXPath2").to(
"xmlsecurity-sign:transformsXPath2?keyAccessor=#accessor&transformMethods=#transformsXPath2",
"xmlsecurity-verify:transformsXPath2?keySelector=#selector").to("mock:result");
// END SNIPPET: transform XPath
}
}, new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: transforms XSLT,XPath
onException(XmlSignatureException.class).handled(false).to("mock:exception");
from("direct:transformsXsltXPath").to(
"xmlsecurity-sign:transformsXsltXPath?keyAccessor=#accessor&transformMethods=#transformsXsltXPath",
"xmlsecurity-verify:transformsXsltXPath?keySelector=#selector").to("mock:result");
// END SNIPPET: transforms XSLT,XPath
}
}, new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: transforms XSLT,XPath - secure Validation
// disabled
from("direct:transformsXsltXPathSecureValDisabled")
.to("xmlsecurity-sign:transformsXsltXPathSecureValDisabled?keyAccessor=#accessor&transformMethods=#transformsXsltXPath",
"xmlsecurity-verify:transformsXsltXPathSecureValDisabled?keySelector=#selector&secureValidation=false")
.to("mock:result");
// END SNIPPET: transforms XSLT,XPath - secure Validation
// disabled
}
}, new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: cryptocontextprops
onException(XmlSignatureException.class).handled(false).to("mock:exception");
from("direct:cryptocontextprops")
.to("xmlsecurity-verify:cryptocontextprops?keySelector=#selectorKeyValue&cryptoContextProperties=#cryptoContextProperties")
.to("mock:result");
// END SNIPPET: cryptocontextprops
}
}, new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: URI dereferencer
from("direct:uridereferencer")
.to("xmlsecurity-sign:uriderferencer?keyAccessor=#accessor&uriDereferencer=#uriDereferencer")
.to("xmlsecurity-verify:uridereferencer?keySelector=#selector&uriDereferencer=#uriDereferencer")
.to("mock:result");
// END SNIPPET: URI dereferencer
}
}, new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: keyAccessorKeySelectorDefault
from("direct:keyAccessorKeySelectorDefault")
.to("xmlsecurity-sign:keyAccessorKeySelectorDefault?keyAccessor=#keyAccessorDefault&addKeyInfoReference=true")
.to("xmlsecurity-verify:keyAccessorKeySelectorDefault?keySelector=#keySelectorDefault")
.to("mock:result");
// END SNIPPET: keyAccessorKeySelectorDefault
}
}, new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: xmlSignatureChecker
onException(XmlSignatureInvalidException.class).handled(false).to("mock:exception");
from("direct:xmlSignatureChecker")
.to("xmlsecurity-verify:xmlSignatureChecker?keySelector=#selectorKeyValue&xmlSignatureChecker=#envelopingSignatureChecker")
.to("mock:result");
// END SNIPPET: xmlSignatureChecker
}
}, new RouteBuilder() {
public void configure() throws Exception { //
// START SNIPPET: properties
from("direct:props")
.to("xmlsecurity-sign:properties?keyAccessor=#accessor&properties=#signatureProperties")
.to("xmlsecurity-verify:properties?keySelector=#selector&xmlSignature2Message=#xmlSignature2MessageWithTimestampProperty")
.to("mock:result");
// END SNIPPET: properties
}
}, new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: verify output node search element name
onException(XmlSignatureException.class).handled(true).to("mock:exception");
from("direct:outputnodesearchelementname").to(
"xmlsecurity-verify:outputnodesearchelementname?keySelector=#selectorKeyValue"
+ "&outputNodeSearchType=ElementName&outputNodeSearch={http://test/test}root&removeSignatureElements=true")
.to("mock:result");
// END SNIPPET: verify output node search element name
}
}, new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: verify output node search xpath
onException(XmlSignatureException.class).handled(true).to("mock:exception");
from("direct:outputnodesearchxpath")
.to("xmlsecurity-verify:outputnodesearchxpath?keySelector=#selectorKeyValue&outputNodeSearchType=XPath&outputNodeSearch=#nodesearchxpath&removeSignatureElements=true")
.to("mock:result");
// END SNIPPET: verify output node search xpath
}
}, new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: validationFailedHandler
from("direct:validationFailedHandler")
.to("xmlsecurity-verify:validationFailedHandler?keySelector=#selectorKeyValue&validationFailedHandler=#validationFailedHandlerIgnoreManifestFailures")
.to("mock:result");
// END SNIPPET: validationFailedHandler
}
}, new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: further parameters
from("direct:furtherparams")
.to("xmlsecurity-sign:furtherparams?keyAccessor=#accessor&prefixForXmlSignatureNamespace=digsig&disallowDoctypeDecl=false")
.to("xmlsecurity-verify:bfurtherparams?keySelector=#selector&disallowDoctypeDecl=false")
.to("mock:result");
// END SNIPPET: further parameters
}
}, new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: signer invalid keyexception
onException(XmlSignatureInvalidKeyException.class).handled(true).to("mock:exception");
from("direct:signexceptioninvalidkey").to(
"xmlsecurity-sign:signexceptioninvalidkey?signatureAlgorithm=http://www.w3.org/2001/04/xmldsig-more#rsa-sha512")
.to("mock:result");
// END SNIPPET: signer invalid keyexception
}
}, new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: signer exceptions
onException(XmlSignatureException.class).handled(true).to("mock:exception");
from("direct:signexceptions")
.to("xmlsecurity-sign:signexceptions?keyAccessor=#accessor&signatureAlgorithm=http://www.w3.org/2001/04/xmldsig-more#rsa-sha512")
.to("mock:result");
// END SNIPPET: signer exceptions
}
}, new RouteBuilder() {
public void configure() throws Exception {
onException(XmlSignatureException.class).handled(true).to("mock:exception");
from("direct:noSuchAlgorithmException")
.to("xmlsecurity-sign:noSuchAlgorithmException?keyAccessor=#accessor&signatureAlgorithm=wrongalgorithm&digestAlgorithm=http://www.w3.org/2001/04/xmlenc#sha512")
.to("mock:result");
}
}, new RouteBuilder() {
public void configure() throws Exception {
onException(XmlSignatureException.class).handled(false).to("mock:exception");
from("direct:verifyexceptions").to("xmlsecurity-verify:verifyexceptions?keySelector=#selector")
.to("mock:result");
}
}, new RouteBuilder() {
public void configure() throws Exception {
onException(XmlSignatureException.class).handled(false).to("mock:exception");
from("direct:verifyInvalidKeyException")
.to("xmlsecurity-verify:verifyInvalidKeyException?keySelector=#selector").to(
"mock:result");
}
}, new RouteBuilder() {
public void configure() throws Exception {
onException(XmlSignatureException.class).handled(false).to("mock:exception");
from("direct:invalidhash").to(
"xmlsecurity-verify:invalidhash?keySelector=#selectorKeyValue&baseUri=#baseUri&secureValidation=false")
.to(
"mock:result");
}
}, createDetachedRoute(), createRouteForEnvelopedWithParentXpath() };
}
RouteBuilder createDetachedRoute() {
return new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: detached XML signature
onException(Exception.class).handled(false).to("mock:exception");
from("direct:detached")
.to("xmlsecurity-sign:detached?keyAccessor=#keyAccessorDefault&xpathsToIdAttributes=#xpathsToIdAttributes&"//
+ "schemaResourceUri=org/apache/camel/component/xmlsecurity/Test.xsd&signatureId=&clearHeaders=false")
.to("mock:result")
.to("xmlsecurity-verify:detached?keySelector=#keySelectorDefault&schemaResourceUri=org/apache/camel/component/xmlsecurity/Test.xsd")
.to("mock:verified");
// END SNIPPET: detached XML signature
}
};
}
private RouteBuilder createRouteForEnvelopedWithParentXpath() {
return new RouteBuilder() {
public void configure() throws Exception {
// START SNIPPET: enveloped XML signature with parent XPath
onException(XmlSignatureException.class).handled(false).to("mock:exception");
from("direct:envelopedParentXpath")
.to("xmlsecurity-sign:enveloped?keyAccessor=#accessor&parentXpath=#parentXpathBean")
.to("mock:signed").to(getVerifierEndpointURIEnveloped()).to("mock:result");
// END SNIPPET: enveloped XML signature with parent XPath
}
};
}
@Test
public void testEnvelopingSignature() throws Exception {
setupMock();
sendBody("direct:enveloping", payload);
assertMockEndpointsSatisfied();
}
@Test
public void testEnvelopedSignatureWithTransformHeader() throws Exception {
setupMock(payload);
sendBody("direct:enveloped", payload, Collections.<String, Object> singletonMap(
XmlSignatureConstants.HEADER_TRANSFORM_METHODS,
"http://www.w3.org/2000/09/xmldsig#enveloped-signature,http://www.w3.org/TR/2001/REC-xml-c14n-20010315"));
assertMockEndpointsSatisfied();
}
@Test
public void testEnvelopingSignatureWithPlainText() throws Exception {
String text = "plain test text";
setupMock(text);
sendBody("direct:plaintext", text);
assertMockEndpointsSatisfied();
}
@Test
public void testEnvelopingSignatureWithPlainTextSetByHeaders() throws Exception {
String text = "plain test text";
setupMock(text);
Map<String, Object> headers = new TreeMap<>();
headers.put(XmlSignatureConstants.HEADER_MESSAGE_IS_PLAIN_TEXT, Boolean.TRUE);
headers.put(XmlSignatureConstants.HEADER_PLAIN_TEXT_ENCODING, "UTF-8");
sendBody("direct:enveloping", text, headers);
assertMockEndpointsSatisfied();
}
@Test
public void testExceptionSignatureForPlainTextWithWrongEncoding() throws Exception {
String text = "plain test text";
MockEndpoint mock = setupExceptionMock();
Map<String, Object> headers = new TreeMap<>();
headers.put(XmlSignatureConstants.HEADER_MESSAGE_IS_PLAIN_TEXT, Boolean.TRUE);
headers.put(XmlSignatureConstants.HEADER_PLAIN_TEXT_ENCODING, "wrongEncoding");
sendBody("direct:enveloping", text, headers);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureException.class, UnsupportedEncodingException.class);
}
@Test
public void testEnvelopedSignature() throws Exception {
setupMock(payload);
sendBody("direct:enveloped", payload);
assertMockEndpointsSatisfied();
}
@Test
public void testExceptionEnvelopedSignatureWithWrongParent() throws Exception {
// payload root element renamed to a -> parent name in route definition
// does not fit
String payload
= "<?xml version=\"1.0\" encoding=\"UTF-8\"?><a xmlns=\"http://test/test\"><test>Test Message</test></a>";
MockEndpoint mock = setupExceptionMock();
sendBody("direct:enveloped", payload);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureFormatException.class, null);
}
@Test
public void testExceptionEnvelopedSignatureWithPlainTextPayload() throws Exception {
// payload root element renamed to a -> parent name in route definition
// does not fit
String payload = "plain text Message";
Map<String, Object> headers = new HashMap<>(1);
headers.put(XmlSignatureConstants.HEADER_MESSAGE_IS_PLAIN_TEXT, Boolean.TRUE);
MockEndpoint mock = setupExceptionMock();
sendBody("direct:enveloped", payload, headers);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureFormatException.class, null);
}
/**
* The parameter can also be configured via {@link XmlSignatureConfiguration#setOmitXmlDeclaration(Boolean)}
*/
@Test
public void testOmitXmlDeclarationViaHeader() throws Exception {
String payloadOut = "<root xmlns=\"http://test/test\"><test>Test Message</test></root>";
setupMock(payloadOut);
Map<String, Object> headers = new TreeMap<>();
headers.put(XmlSignatureConstants.HEADER_OMIT_XML_DECLARATION, Boolean.TRUE);
InputStream payload = XmlSignatureTest.class
.getResourceAsStream("/org/apache/camel/component/xmlsecurity/ExampleEnvelopedXmlSig.xml");
assertNotNull(payload, "Cannot load payload");
sendBody("direct:outputnodesearchelementname", payload, headers);
assertMockEndpointsSatisfied();
}
@Test
public void testkeyAccessorKeySelectorDefault() throws Exception {
setupMock();
sendBody("direct:keyAccessorKeySelectorDefault", payload);
assertMockEndpointsSatisfied();
}
@Test
public void testSetCanonicalizationMethodInRouteDefinition() throws Exception {
setupMock();
sendBody("direct:canonicalization", payload);
assertMockEndpointsSatisfied();
}
@Test
public void testSetDigestAlgorithmInRouteDefinition() throws Exception {
setupMock();
sendBody("direct:signaturedigestalgorithm", payload);
assertMockEndpointsSatisfied();
}
@Test
public void testSetTransformMethodXpath2InRouteDefinition() throws Exception {
// example from http://www.w3.org/TR/2002/REC-xmldsig-filter2-20021108/
String payload = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ (includeNewLine ? "\n" : "")
+ "<Document xmlns=\"http://test/test\"> "
+ "<ToBeSigned> "
+ " <!-- comment --> "
+ " <Data>1</Data> "
+ " <NotToBeSigned> "
+ " <ReallyToBeSigned> "
+ " <!-- comment --> "
+ " <Data>2</Data> "
+ " </ReallyToBeSigned> "
+ " </NotToBeSigned> "
+ " </ToBeSigned> "
+ " <ToBeSigned> "
+ " <Data>3</Data> "
+ " <NotToBeSigned> "
+ " <Data>4</Data> "
+ " </NotToBeSigned> "
+ " </ToBeSigned> " + "</Document>";
setupMock(payload);
sendBody("direct:transformsXPath2", payload);
assertMockEndpointsSatisfied();
}
// Secure Validation is enabled and so this should fail
@Test
public void testSetTransformMethodXsltXpathInRouteDefinition() throws Exception {
// byte[] encoded = Base64.encode("Test Message".getBytes("UTF-8"));
// String contentBase64 = new String(encoded, "UTF-8");
// String payload =
// "<?xml version=\"1.0\" encoding=\"UTF-8\"?><root xmlns=\"http://test/test\"><test></test></root>";
MockEndpoint mock = setupExceptionMock();
sendBody("direct:transformsXsltXPath", payload);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureException.class, null);
}
@Test
public void testSetTransformMethodXsltXpathInRouteDefinitionSecValDisabled() throws Exception {
setupMock();
sendBody("direct:transformsXsltXPathSecureValDisabled", payload);
assertMockEndpointsSatisfied();
}
@Test
public void testProperties() throws Exception {
setupMock();
sendBody("direct:props", payload);
assertMockEndpointsSatisfied();
}
@Test
public void testVerifyOutputNodeSearchElementName() throws Exception {
setupMock();
InputStream payload = XmlSignatureTest.class
.getResourceAsStream("/org/apache/camel/component/xmlsecurity/ExampleEnvelopedXmlSig.xml");
assertNotNull(payload, "Cannot load payload");
sendBody("direct:outputnodesearchelementname", payload);
assertMockEndpointsSatisfied();
}
@Test
public void testVerifyExceptionOutputNodeSearchElementNameInvalidFormat1() throws Exception {
XmlVerifierEndpoint endpoint = context
.getEndpoint("xmlsecurity-verify:outputnodesearchelementname?keySelector=#selectorKeyValue"
+ "&outputNodeSearchType=ElementName&outputNodeSearch={http://test/test}root&removeSignatureElements=true",
XmlVerifierEndpoint.class);
endpoint.getConfiguration().setOutputNodeSearch("{wrongformat"); // closing '}' missing
MockEndpoint mock = setupExceptionMock();
InputStream payload = XmlSignatureTest.class
.getResourceAsStream("/org/apache/camel/component/xmlsecurity/ExampleEnvelopedXmlSig.xml");
assertNotNull(payload, "Cannot load payload");
sendBody("direct:outputnodesearchelementname", payload);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureException.class, null);
}
@Test
public void testVerifyExceptionOutputNodeSearchElementNameInvalidFormat2() throws Exception {
context.getEndpoint(
"xmlsecurity-verify:outputnodesearchelementname?keySelector=#selectorKeyValue"
+ "&outputNodeSearchType=ElementName&outputNodeSearch={http://test/test}root&removeSignatureElements=true",
XmlVerifierEndpoint.class).getConfiguration().setOutputNodeSearch("{wrongformat}");
// local name missing
MockEndpoint mock = setupExceptionMock();
InputStream payload = XmlSignatureTest.class
.getResourceAsStream("/org/apache/camel/component/xmlsecurity/ExampleEnvelopedXmlSig.xml");
assertNotNull(payload, "Cannot load payload");
sendBody("direct:outputnodesearchelementname", payload);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureException.class, null);
}
@Test
public void testExceptionVerifyOutputNodeSearchWrongElementName() throws Exception {
MockEndpoint mock = setupExceptionMock();
InputStream payload = XmlSignatureTest.class
.getResourceAsStream("/org/apache/camel/component/xmlsecurity/ExampleEnvelopingDigSig.xml");
assertNotNull(payload, "Cannot load payload");
sendBody("direct:outputnodesearchelementname", payload);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureException.class, null);
}
@Test
public void testExceptionVerifyOutputNodeSearchElementNameMoreThanOneOutputElement() throws Exception {
MockEndpoint mock = setupExceptionMock();
InputStream payload = XmlSignatureTest.class
.getResourceAsStream(
"/org/apache/camel/component/xmlsecurity/ExampleEnvelopingDigSigWithSeveralElementsWithNameRoot.xml");
assertNotNull(payload, "Cannot load payload");
sendBody("direct:outputnodesearchelementname", payload);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureException.class, null);
}
@Test
public void testVerifyOutputNodeSearchXPath() throws Exception {
setupMock();
InputStream payload = XmlSignatureTest.class
.getResourceAsStream("/org/apache/camel/component/xmlsecurity/ExampleEnvelopedXmlSig.xml");
assertNotNull(payload, "Cannot load payload");
sendBody("direct:outputnodesearchxpath", payload);
assertMockEndpointsSatisfied();
}
@Test
public void testExceptionVerifyOutputNodeSearchXPathWithNoResultNode() throws Exception {
MockEndpoint mock = setupExceptionMock();
InputStream payload = XmlSignatureTest.class
.getResourceAsStream("/org/apache/camel/component/xmlsecurity/ExampleEnvelopingDigSig.xml");
assertNotNull(payload, "Cannot load payload");
sendBody("direct:outputnodesearchxpath", payload);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureException.class, null);
}
@Test
public void testExceptionVerifyOutputNodeSearchXPathMoreThanOneOutputElement() throws Exception {
MockEndpoint mock = setupExceptionMock();
InputStream payload = XmlSignatureTest.class
.getResourceAsStream(
"/org/apache/camel/component/xmlsecurity/ExampleEnvelopingDigSigWithSeveralElementsWithNameRoot.xml");
assertNotNull(payload, "Cannot load payload");
sendBody("direct:outputnodesearchxpath", payload);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureException.class, null);
}
@Test
public void testInvalidKeyException() throws Exception {
MockEndpoint mock = setupExceptionMock();
// wrong key type
setUpKeys("DSA", 512);
context.getEndpoint(
"xmlsecurity-sign:signexceptioninvalidkey?signatureAlgorithm=http://www.w3.org/2001/04/xmldsig-more#rsa-sha512",
XmlSignerEndpoint.class).getConfiguration().setKeyAccessor(getKeyAccessor(keyPair.getPrivate()));
sendBody("direct:signexceptioninvalidkey", payload);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureInvalidKeyException.class, null);
}
@Test
public void testSignatureFormatException() throws Exception {
MockEndpoint mock = setupExceptionMock();
sendBody("direct:signexceptions", "wrongFormatedPayload");
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureFormatException.class, null);
}
@Test
public void testNoSuchAlgorithmException() throws Exception {
MockEndpoint mock = setupExceptionMock();
sendBody("direct:noSuchAlgorithmException", payload);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureException.class, NoSuchAlgorithmException.class);
}
@Test
public void testVerifyFormatExceptionNoXml() throws Exception {
MockEndpoint mock = setupExceptionMock();
sendBody("direct:verifyexceptions", "wrongFormatedPayload");
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureFormatException.class, null);
}
@Test
public void testVerifyFormatExceptionNoXmlWithoutSignatureElement() throws Exception {
MockEndpoint mock = setupExceptionMock();
sendBody("direct:verifyexceptions", "<?xml version=\"1.0\" encoding=\"UTF-8\"?><NoSignature></NoSignature>");
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureFormatException.class, null);
}
@Test
@Disabled("Cannot resolve <Reference URI=\"testFile.txt\">")
public void testVerifyInvalidContentHashException() throws Exception {
MockEndpoint mock = setupExceptionMock();
InputStream payload
= XmlSignatureTest.class.getResourceAsStream("/org/apache/camel/component/xmlsecurity/ExampleDetached.xml");
assertNotNull(payload, "Cannot load payload");
sendBody("direct:invalidhash", payload);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureInvalidContentHashException.class, null);
}
@Test
public void testVerifyMantifestInvalidContentHashException() throws Exception {
MockEndpoint mock = setupExceptionMock();
InputStream payload = XmlSignatureTest.class
.getResourceAsStream("/org/apache/camel/component/xmlsecurity/ManifestTest_TamperedContent.xml");
assertNotNull(payload, "Cannot load payload");
sendBody("direct:invalidhash", payload);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureInvalidContentHashException.class, null);
}
@Test
public void testVerifySetCryptoContextProperties() throws Exception {
// although the content referenced by the manifest was tempered, this is
// not detected by
// the core validation because the manifest validation is switched off
// by the crypto context properties
setupMock("some text tampered");
InputStream payload = XmlSignatureTest.class
.getResourceAsStream("/org/apache/camel/component/xmlsecurity/ManifestTest_TamperedContent.xml");
assertNotNull(payload, "Cannot load payload");
sendBody("direct:cryptocontextprops", payload);
assertMockEndpointsSatisfied();
}
@Test
@Disabled("Not all JDKs have provider to verify this key")
public void testVerifySignatureInvalidValueException() throws Exception {
MockEndpoint mock = setupExceptionMock();
setUpKeys("DSA", 512);
context.getEndpoint("xmlsecurity-verify:verifyexceptions", XmlVerifierEndpoint.class).getConfiguration().setKeySelector(
KeySelector.singletonKeySelector(keyPair.getPublic()));
// payload needs DSA key
InputStream payload = XmlSignatureTest.class
.getResourceAsStream("/org/apache/camel/component/xmlsecurity/ExampleEnvelopingDigSig.xml");
assertNotNull(payload, "Cannot load payload");
sendBody("direct:verifyexceptions", payload);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureInvalidValueException.class, null);
}
@Test
public void testVerifyInvalidKeyException() throws Exception {
MockEndpoint mock = setupExceptionMock();
InputStream payload = XmlSignatureTest.class
.getResourceAsStream("/org/apache/camel/component/xmlsecurity/ExampleEnvelopingDigSig.xml");
assertNotNull(payload, "Cannot load payload");
sendBody("direct:verifyInvalidKeyException", payload);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureInvalidKeyException.class, null);
}
@Test
public void testUriDereferencerAndBaseUri() throws Exception {
setupMock();
sendBody("direct:uridereferencer", payload);
assertMockEndpointsSatisfied();
}
@Test
public void testVerifyXmlSignatureChecker() throws Exception {
MockEndpoint mock = setupExceptionMock();
InputStream payload = XmlSignatureTest.class
.getResourceAsStream("/org/apache/camel/component/xmlsecurity/ExampleEnvelopedXmlSig.xml");
assertNotNull(payload, "Cannot load payload");
sendBody("direct:xmlSignatureChecker", payload);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureInvalidException.class, null);
}
@Test
public void testVerifyValidationFailedHandler() throws Exception {
setupMock("some text tampered");
InputStream payload = XmlSignatureTest.class
.getResourceAsStream("/org/apache/camel/component/xmlsecurity/ManifestTest_TamperedContent.xml");
assertNotNull(payload, "Cannot load payload");
sendBody("direct:validationFailedHandler", payload);
assertMockEndpointsSatisfied();
}
@Test
public void testFurtherParameters() throws Exception {
setupMock(payload);
String payloadWithDTDoctype = "<?xml version=\'1.0\'?>" + "<!DOCTYPE Signature SYSTEM "
+ "\"src/test/resources/org/apache/camel/component/xmlsecurity/xmldsig-core-schema.dtd\" [ <!ENTITY dsig "
+ "\"http://www.w3.org/2000/09/xmldsig#\"> ]>"
+ "<root xmlns=\"http://test/test\"><test>Test Message</test></root>";
sendBody("direct:furtherparams", payloadWithDTDoctype);
assertMockEndpointsSatisfied();
}
@Test
public void testReferenceUriWithIdAttributeInTheEnvelopedCase() throws Exception {
XmlSignerEndpoint endpoint = getDetachedSignerEndpoint();
endpoint.getConfiguration().setParentLocalName("root");
endpoint.getConfiguration().setParentNamespace("http://test");
endpoint.getConfiguration().setXpathsToIdAttributes(null);
String detachedPayload = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ (includeNewLine ? "\n" : "")
+ "<ns:root xmlns:ns=\"http://test\"><a ID=\"myID\"><b>bValue</b></a></ns:root>";
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
sendBody("direct:detached", detachedPayload,
Collections.singletonMap(XmlSignatureConstants.HEADER_CONTENT_REFERENCE_URI, (Object) "#myID"));
assertMockEndpointsSatisfied();
String expectedPartContent = "<ds:Reference URI=\"#myID\">";
checkBodyContains(mock, expectedPartContent);
}
@Test
public void testDetachedSignature() throws Exception {
testDetachedSignatureInternal();
}
@Test
public void testDetachedSignatureWitTransformHeader() throws Exception {
testDetachedSignatureInternal(Collections.singletonMap(XmlSignatureConstants.HEADER_TRANSFORM_METHODS,
(Object) "http://www.w3.org/2000/09/xmldsig#enveloped-signature,http://www.w3.org/TR/2001/REC-xml-c14n-20010315"));
}
@Test
public void testSignatureIdAtributeNull() throws Exception {
// the signature Id parameter must be empty, this is set in the URI
// already
Element sigEle = testDetachedSignatureInternal();
Attr attr = sigEle.getAttributeNode("Id");
assertNull(attr, "Signature element contains Id attribute");
}
@Test
public void testSignatureIdAttribute() throws Exception {
String signatureId = "sigId";
XmlSignerEndpoint endpoint = getDetachedSignerEndpoint();
endpoint.getConfiguration().setSignatureId(signatureId);
Element sigEle = testDetachedSignatureInternal();
String value = sigEle.getAttribute("Id");
assertNotNull("Signature Id is null", value);
assertEquals(signatureId, value);
}
@Test
public void testSignatureIdAttributeGenerated() throws Exception {
String signatureId = null;
XmlSignerEndpoint endpoint = getDetachedSignerEndpoint();
endpoint.getConfiguration().setSignatureId(signatureId);
Element sigEle = testDetachedSignatureInternal();
String value = sigEle.getAttribute("Id");
assertNotNull("Signature Id is null", value);
assertTrue(value.startsWith("_"), "Signature Id value does not start with '_'");
}
private Element testDetachedSignatureInternal()
throws InterruptedException, XPathExpressionException, SAXException, IOException,
ParserConfigurationException {
return testDetachedSignatureInternal(Collections.<String, Object> emptyMap());
}
private Element testDetachedSignatureInternal(Map<String, Object> headers)
throws InterruptedException, XPathExpressionException, SAXException, IOException,
ParserConfigurationException {
String detachedPayload = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ (includeNewLine ? "\n" : "")
+ "<ns:root xmlns:ns=\"http://test\"><a ID=\"myID\"><b>bValue</b></a></ns:root>";
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
MockEndpoint mockVerified = getMockEndpoint("mock:verified");
mockVerified.expectedBodiesReceived(detachedPayload);
sendBody("direct:detached", detachedPayload, headers);
assertMockEndpointsSatisfied();
Map<String, String> namespaceMap = new TreeMap<>();
namespaceMap.put("ns", "http://test");
namespaceMap.put("ds", XMLSignature.XMLNS);
Object obj = checkXpath(mock, "ns:root/ds:Signature", namespaceMap);
Element sigEle = (Element) obj;
return sigEle;
}
@Test
public void testDetachedSignatureComplexSchema() throws Exception {
String xpath1exp = "/ns:root/test/ns1:B/C/@ID";
String xpath2exp = "/ns:root/test/@ID";
testDetached2Xpaths(xpath1exp, xpath2exp);
}
/**
* Checks that the processor sorts the xpath expressions in such a way that elements with deeper hierarchy level are
* signed first.
*
*/
@Test
public void testDetachedSignatureWrongXPathOrder() throws Exception {
String xpath2exp = "/ns:root/test/ns1:B/C/@ID";
String xpath1exp = "/ns:root/test/@ID";
testDetached2Xpaths(xpath1exp, xpath2exp);
}
void testDetached2Xpaths(String xpath1exp, String xpath2exp)
throws InterruptedException, XPathExpressionException, SAXException,
IOException, ParserConfigurationException {
String detachedPayload = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ (includeNewLine ? "\n" : "")
+ "<ns:root xmlns:ns=\"http://test\"><test ID=\"myID\"><b>bValue</b><ts:B xmlns:ts=\"http://testB\"><C ID=\"cID\"><D>dvalue</D></C></ts:B></test></ns:root>";
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
MockEndpoint mockVerified = getMockEndpoint("mock:verified");
mockVerified.expectedBodiesReceived(detachedPayload);
Map<String, Object> headers = new TreeMap<>();
headers.put(XmlSignatureConstants.HEADER_SCHEMA_RESOURCE_URI, "org/apache/camel/component/xmlsecurity/TestComplex.xsd");
Map<String, String> namespaceMap = new TreeMap<>();
namespaceMap.put("ns", "http://test");
namespaceMap.put("ns1", "http://testB");
XPathFilterParameterSpec xpath1 = XmlSignatureHelper.getXpathFilter(xpath1exp, namespaceMap);
XPathFilterParameterSpec xpath2 = XmlSignatureHelper.getXpathFilter(xpath2exp, namespaceMap);
List<XPathFilterParameterSpec> xpaths = new ArrayList<>();
xpaths.add(xpath1);
xpaths.add(xpath2);
headers.put(XmlSignatureConstants.HEADER_XPATHS_TO_ID_ATTRIBUTES, xpaths);
sendBody("direct:detached", detachedPayload, headers);
assertMockEndpointsSatisfied();
Map<String, String> namespaceMap2 = new TreeMap<>();
namespaceMap2.put("ns", "http://test");
namespaceMap2.put("ds", XMLSignature.XMLNS);
namespaceMap2.put("nsB", "http://testB");
checkXpath(mock, "ns:root/test/nsB:B/ds:Signature", namespaceMap2);
checkXpath(mock, "ns:root/ds:Signature", namespaceMap2);
}
@Test
public void testExceptionEnvelopedAndDetached() throws Exception {
String detachedPayload = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + //
"<ns:root xmlns:ns=\"http://test\"><a ID=\"myID\"><b>bValue</b></a></ns:root>";
XmlSignerEndpoint endpoint = getDetachedSignerEndpoint();
String parentLocalName = "parent";
endpoint.getConfiguration().setParentLocalName(parentLocalName);
MockEndpoint mock = setupExceptionMock();
mock.expectedMessageCount(1);
sendBody("direct:detached", detachedPayload);
assertMockEndpointsSatisfied();
checkThrownException(
mock,
XmlSignatureException.class,
"The configuration of the XML signer component is wrong. The parent local name "
+ parentLocalName
+ " for an enveloped signature and the XPATHs to ID attributes for a detached signature are specified. You must not specify both parameters.",
null);
}
@Test
public void testExceptionSchemaValidation() throws Exception {
String detachedPayload = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ (includeNewLine ? "\n" : "")
+ "<ns:root xmlns:ns=\"http://test\"><a ID=\"myID\"><error>bValue</error></a></ns:root>";
MockEndpoint mock = setupExceptionMock();
mock.expectedMessageCount(1);
sendBody("direct:detached", detachedPayload);
assertMockEndpointsSatisfied();
checkThrownException(mock, SchemaValidationException.class, null);
}
@Test
public void testEceptionDetachedNoXmlSchema() throws Exception {
String detachedPayload = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ (includeNewLine ? "\n" : "")
+ "<ns:root xmlns:ns=\"http://test\"><a ID=\"myID\"><b>bValue</b></a></ns:root>";
XmlSignerEndpoint endpoint = getDetachedSignerEndpoint();
endpoint.getConfiguration().setSchemaResourceUri(null);
MockEndpoint mock = setupExceptionMock();
mock.expectedMessageCount(1);
sendBody("direct:detached", detachedPayload);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureException.class,
"The configruation of the XML Signature component is wrong: No XML schema specified in the detached case",
null);
}
@Test
public void testExceptionDetachedXpathInvalid() throws Exception {
String wrongXPath = "n1:p/a"; // namespace prefix is not defined
MockEndpoint mock = testXpath(wrongXPath);
checkThrownException(mock, XmlSignatureException.class,
"The configured xpath expression " + wrongXPath + " is invalid.",
XPathExpressionException.class);
}
@Test
public void testExceptionDetachedXPathNoIdAttribute() throws Exception {
String value = "not id";
String detachedPayload = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ (includeNewLine ? "\n" : "")
+ "<ns:root xmlns:ns=\"http://test\"><a ID=\"myID\" stringAttr=\"" + value
+ "\"><b>bValue</b></a></ns:root>";
String xPath = "a/@stringAttr";
MockEndpoint mock = testXpath(xPath, detachedPayload);
checkThrownException(mock, XmlSignatureException.class,
"Wrong configured xpath expression for ID attributes: The evaluation of the xpath expression " + xPath
+ " resulted in an attribute which is not of type ID. The attribute value is "
+ value + ".",
null);
}
@Test
public void testExceptionDetachedXpathNoAttribute() throws Exception {
String xPath = "a"; // Element a
MockEndpoint mock = testXpath(xPath);
checkThrownException(mock, XmlSignatureException.class,
"Wrong configured xpath expression for ID attributes: The evaluation of the xpath expression " + xPath
+ " returned a node which was not of type Attribute.",
null);
}
@Test
public void testExceptionDetachedXPathNoResult() throws Exception {
String xPath = "a/@stringAttr"; // for this xpath there is no result
MockEndpoint mock = testXpath(xPath);
checkThrownException(
mock,
XmlSignatureException.class,
"No element to sign found in the detached case. No node found for the configured xpath expressions "
+ xPath
+ ". Either the configuration of the XML signature component is wrong or the incoming message has not the correct structure.",
null);
}
private MockEndpoint testXpath(String xPath) throws InterruptedException {
String detachedPayload = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ (includeNewLine ? "\n" : "")
+ "<ns:root xmlns:ns=\"http://test\"><a ID=\"myID\"><b>bValue</b></a></ns:root>";
return testXpath(xPath, detachedPayload);
}
private MockEndpoint testXpath(String xPath, String detachedPayload) throws InterruptedException {
MockEndpoint mock = setupExceptionMock();
mock.expectedMessageCount(1);
List<XPathFilterParameterSpec> list = Collections.singletonList(XmlSignatureHelper.getXpathFilter(xPath, null));
sendBody("direct:detached", detachedPayload,
Collections.singletonMap(XmlSignatureConstants.HEADER_XPATHS_TO_ID_ATTRIBUTES, (Object) list));
assertMockEndpointsSatisfied();
return mock;
}
@Test
public void testExceptionDetachedNoParent() throws Exception {
String detachedPayload = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ (includeNewLine ? "\n" : "")
+ "<ns:root ID=\"rootId\" xmlns:ns=\"http://test\"><a ID=\"myID\"><b>bValue</b></a></ns:root>";
String xPath = "//@ID";
String localName = "root";
String namespaceURI = "http://test";
String referenceUri = "#rootId";
MockEndpoint mock = testXpath(xPath, detachedPayload);
checkThrownException(mock, XmlSignatureException.class,
"Either the configuration of the XML Signature component is wrong or the incoming document has an invalid structure: The element "
+ localName + "{" + namespaceURI
+ "} which is referenced by the reference URI " + referenceUri
+ " has no parent element. The element must have a parent element in the configured detached case.",
null);
}
@Test
public void testOutputXmlEncodingEnveloping() throws Exception {
String inputEncoding = "UTF-8";
String signerEncoding = "UTF-16";
String outputEncoding = "ISO-8859-1"; // latin 1
String signerEndpointUri = getSignerEndpointURIEnveloping();
String verifierEndpointUri = getVerifierEndpointURIEnveloping();
String directStart = "direct:enveloping";
checkOutputEncoding(inputEncoding, signerEncoding, outputEncoding, signerEndpointUri, verifierEndpointUri, directStart);
}
String getVerifierEndpointURIEnveloping() {
return "xmlsecurity-verify:enveloping?keySelector=#selector";
}
String getSignerEndpointURIEnveloping() {
return "xmlsecurity-sign:enveloping?keyAccessor=#accessor&schemaResourceUri=";
}
@Test
public void testOutputXmlEncodingEnveloped() throws Exception {
String inputEncoding = "UTF-8";
String signerEncoding = "UTF-16";
String outputEncoding = "ISO-8859-1"; // latin 1
String signerEndpointUri = getSignerEndpointURIEnveloped();
String verifierEndpointUri = getVerifierEndpointURIEnveloped();
String directStart = "direct:enveloped";
checkOutputEncoding(inputEncoding, signerEncoding, outputEncoding, signerEndpointUri, verifierEndpointUri, directStart);
}
String getVerifierEndpointURIEnveloped() {
return "xmlsecurity-verify:enveloped?keySelector=#selector";
}
String getSignerEndpointURIEnveloped() {
return "xmlsecurity-sign:enveloped?keyAccessor=#accessor&parentLocalName=root&parentNamespace=http://test/test";
}
private byte[] getPayloadForEncoding(String encoding) {
String s = "<?xml version=\"1.0\" encoding=\"" + encoding + "\"?>"
+ (includeNewLine ? "\n" : "")
+ "<root xmlns=\"http://test/test\"><test>Test Message</test></root>";
return s.getBytes(Charset.forName(encoding));
}
@Test
public void testExceptionParentLocalNameAndXPathSet() throws Exception {
XmlSignerEndpoint endpoint = getSignatureEncpointForSignException();
MockEndpoint mock = setupExceptionMock();
try {
endpoint.getConfiguration().setParentXpath(getNodeSerachXPath());
endpoint.getConfiguration().setParentLocalName("root");
sendBody("direct:signexceptions", payload);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureException.class, "The configuration of the XML signer component is wrong. " + //
"The parent local name root and the parent XPath //pre:root are specified. You must not specify both parameters.",
null);
} finally {
endpoint.getConfiguration().setParentXpath(null);
endpoint.getConfiguration().setParentLocalName(null);
}
}
@Test
public void testExceptionXpathsToIdAttributesNameAndXPathSet() throws Exception {
XmlSignerEndpoint endpoint = getSignatureEncpointForSignException();
MockEndpoint mock = setupExceptionMock();
try {
endpoint.getConfiguration().setParentXpath(getNodeSerachXPath());
List<XPathFilterParameterSpec> xpaths
= Collections.singletonList(XmlSignatureHelper.getXpathFilter("/ns:root/a/@ID", null));
endpoint.getConfiguration().setXpathsToIdAttributes(xpaths);
sendBody("direct:signexceptions", payload);
assertMockEndpointsSatisfied();
checkThrownException(
mock,
XmlSignatureException.class,
"The configuration of the XML signer component is wrong. " + //
"The parent XPath //pre:root for an enveloped signature and the XPATHs to ID attributes for a detached signature are specified. You must not specify both parameters.",
null);
} finally {
endpoint.getConfiguration().setParentXpath(null);
endpoint.getConfiguration().setXpathsToIdAttributes(null);
}
}
@Test
public void testExceptionInvalidParentXpath() throws Exception {
XmlSignerEndpoint endpoint = getSignatureEncpointForSignException();
MockEndpoint mock = setupExceptionMock();
try {
endpoint.getConfiguration().setParentXpath(XmlSignatureHelper.getXpathFilter("//pre:root", null)); // invalid xpath: namespace-prefix mapping is missing
sendBody("direct:signexceptions", payload);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureException.class,
"The parent XPath //pre:root is wrongly configured: The XPath //pre:root is invalid.", null);
} finally {
endpoint.getConfiguration().setParentXpath(null);
}
}
@Test
public void testExceptionParentXpathWithNoResult() throws Exception {
XmlSignerEndpoint endpoint = getSignatureEncpointForSignException();
MockEndpoint mock = setupExceptionMock();
try {
endpoint.getConfiguration().setParentXpath(XmlSignatureHelper.getXpathFilter("//root", null)); // xpath with no result
sendBody("direct:signexceptions", payload);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureException.class,
"The parent XPath //root returned no result. Check the configuration of the XML signer component.", null);
} finally {
endpoint.getConfiguration().setParentXpath(null);
}
}
XmlSignerEndpoint getSignatureEncpointForSignException() {
XmlSignerEndpoint endpoint = (XmlSignerEndpoint) context()
.getEndpoint("xmlsecurity-sign:signexceptions?keyAccessor=#accessor" + //
"&signatureAlgorithm=http://www.w3.org/2001/04/xmldsig-more#rsa-sha512");
return endpoint;
}
@Test
public void testExceptionParentXpathWithNoElementResult() throws Exception {
XmlSignerEndpoint endpoint = getSignatureEncpointForSignException();
MockEndpoint mock = setupExceptionMock();
try {
String myPayload = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ (includeNewLine ? "\n" : "")
+ "<ns:root ID=\"rootId\" xmlns:ns=\"http://test\"></ns:root>";
endpoint.getConfiguration().setParentXpath(
XmlSignatureHelper.getXpathFilter("/pre:root/@ID", Collections.singletonMap("pre", "http://test"))); // xpath with no element result
sendBody("direct:signexceptions", myPayload);
assertMockEndpointsSatisfied();
checkThrownException(mock, XmlSignatureException.class,
"The parent XPath /pre:root/@ID returned no element. Check the configuration of the XML signer component.",
null);
} finally {
endpoint.getConfiguration().setParentXpath(null);
}
}
@Test
public void testEnvelopedSignatureWithParentXpath() throws Exception {
String myPayload = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
+ (includeNewLine ? "\n" : "")
+ "<ns:root xmlns:ns=\"http://test\"><a>a1</a><a/><test>Test Message</test></ns:root>";
setupMock(myPayload);
sendBody("direct:envelopedParentXpath", myPayload);
assertMockEndpointsSatisfied();
}
XmlSignerEndpoint getDetachedSignerEndpoint() {
XmlSignerEndpoint endpoint = (XmlSignerEndpoint) context().getEndpoint(
"xmlsecurity-sign:detached?keyAccessor=#keyAccessorDefault&xpathsToIdAttributes=#xpathsToIdAttributes&"//
+ "schemaResourceUri=org/apache/camel/component/xmlsecurity/Test.xsd&signatureId=&clearHeaders=false");
return endpoint;
}
private void checkOutputEncoding(
String inputEncoding, String signerEncoding, String outputEncoding, String signerEndpointUri,
String verifierEndpointUri, String directStart)
throws InterruptedException, UnsupportedEncodingException {
byte[] inputPayload = getPayloadForEncoding(inputEncoding);
byte[] expectedPayload = getPayloadForEncoding(outputEncoding);
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived(expectedPayload);
MockEndpoint mockSigned = getMockEndpoint("mock:signed");
mock.expectedMessageCount(1);
XmlSignerEndpoint endpointSigner = (XmlSignerEndpoint) context().getEndpoint(signerEndpointUri);
XmlVerifierEndpoint endpoinVerifier = (XmlVerifierEndpoint) context().getEndpoint(verifierEndpointUri);
try {
endpointSigner.getConfiguration().setOutputXmlEncoding(signerEncoding);
endpoinVerifier.getConfiguration().setOutputXmlEncoding(outputEncoding);
sendBody(directStart, inputPayload);
assertMockEndpointsSatisfied();
Message signedMessage = mockSigned.getExchanges().get(0).getIn();
byte[] signedBytes = signedMessage.getBody(byte[].class);
String signedPayload = new String(signedBytes, signerEncoding);
assertTrue(signedPayload.contains(signerEncoding));
String charsetHeaderSigner = signedMessage.getHeader(Exchange.CHARSET_NAME, String.class);
assertEquals(signerEncoding, charsetHeaderSigner);
String charsetHeaderVerifier = mock.getExchanges().get(0).getIn().getHeader(Exchange.CHARSET_NAME, String.class);
assertEquals(outputEncoding, charsetHeaderVerifier);
} finally {
endpointSigner.getConfiguration().setOutputXmlEncoding(null);
endpoinVerifier.getConfiguration().setOutputXmlEncoding(null);
}
}
private void checkBodyContains(MockEndpoint mock, String expectedPartContent) {
Message message = getMessage(mock);
String body = message.getBody(String.class);
assertNotNull(body);
assertTrue(body.contains(expectedPartContent),
"The message body " + body + " does not contain the expected string " + expectedPartContent);
}
private Object checkXpath(MockEndpoint mock, String xpathString, final Map<String, String> prefix2Namespace)
throws XPathExpressionException, SAXException, IOException, ParserConfigurationException {
Message mess = getMessage(mock);
InputStream body = mess.getBody(InputStream.class);
assertNotNull(body);
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath xpath = xpathFactory.newXPath();
NamespaceContext nc = new NamespaceContext() {
@SuppressWarnings("rawtypes")
@Override
public Iterator getPrefixes(String namespaceURI) {
return null;
}
@Override
public String getPrefix(String namespaceURI) {
return null;
}
@Override
public String getNamespaceURI(String prefix) {
return prefix2Namespace.get(prefix);
}
};
xpath.setNamespaceContext(nc);
XPathExpression expr = xpath.compile(xpathString);
Object result = expr.evaluate(XmlSignatureHelper.newDocumentBuilder(true).parse(body), XPathConstants.NODE);
assertNotNull(result, "The xpath " + xpathString + " returned a null value");
return result;
}
Message getMessage(MockEndpoint mock) {
List<Exchange> exs = mock.getExchanges();
assertNotNull(exs);
assertEquals(1, exs.size());
Exchange ex = exs.get(0);
Message mess = ex.getIn();
assertNotNull(mess);
return mess;
}
private void checkThrownException(
MockEndpoint mock, Class<? extends Exception> cl, Class<? extends Exception> expectedCauseClass)
throws Exception {
checkThrownException(mock, cl, null, expectedCauseClass);
}
static void checkThrownException(
MockEndpoint mock, Class<? extends Exception> cl, String expectedMessage,
Class<? extends Exception> expectedCauseClass)
throws Exception {
Exception e = (Exception) mock.getExchanges().get(0).getProperty(Exchange.EXCEPTION_CAUGHT);
assertNotNull(e, "Expected excpetion " + cl.getName() + " missing");
if (e.getClass() != cl) {
String stackTrace = getStrackTrace(e);
fail("Exception " + cl.getName() + " excpected, but was " + e.getClass().getName() + ": " + stackTrace);
}
if (expectedMessage != null) {
assertEquals(expectedMessage, e.getMessage());
}
if (expectedCauseClass != null) {
Throwable cause = e.getCause();
assertNotNull(cause, "Expected cause exception" + expectedCauseClass.getName() + " missing");
if (expectedCauseClass != cause.getClass()) {
fail("Cause exception " + expectedCauseClass.getName() + " expected, but was " + cause.getClass().getName()
+ ": "
+ getStrackTrace(e));
}
}
}
private static String getStrackTrace(Exception e) throws UnsupportedEncodingException {
ByteArrayOutputStream os = new ByteArrayOutputStream();
PrintWriter w = new PrintWriter(os);
e.printStackTrace(w);
w.close();
String stackTrace = new String(os.toByteArray(), "UTF-8");
return stackTrace;
}
private MockEndpoint setupExceptionMock() {
MockEndpoint mock = getMockEndpoint("mock:exception");
mock.setExpectedMessageCount(1);
MockEndpoint mockResult = getMockEndpoint("mock:result");
mockResult.setExpectedMessageCount(0);
return mock;
}
private MockEndpoint setupMock() {
return setupMock(payload);
}
private MockEndpoint setupMock(String payload) {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived(payload);
return mock;
}
public Exchange doTestSignatureRoute(RouteBuilder builder) throws Exception {
return doSignatureRouteTest(builder, null, Collections.<String, Object> emptyMap());
}
public Exchange doSignatureRouteTest(RouteBuilder builder, Exchange e, Map<String, Object> headers) throws Exception {
CamelContext context = new DefaultCamelContext();
try {
context.addRoutes(builder);
context.start();
MockEndpoint mock = context.getEndpoint("mock:result", MockEndpoint.class);
mock.setExpectedMessageCount(1);
ProducerTemplate template = context.createProducerTemplate();
if (e != null) {
template.send("direct:in", e);
} else {
template.sendBodyAndHeaders("direct:in", payload, headers);
}
assertMockEndpointsSatisfied();
return mock.getReceivedExchanges().get(0);
} finally {
context.stop();
}
}
@Override
@BeforeEach
public void setUp() throws Exception {
setUpKeys("RSA", 1024);
disableJMX();
super.setUp();
}
public void setUpKeys(String algorithm, int keylength) throws Exception {
keyPair = getKeyPair(algorithm, keylength);
}
public static KeyPair getKeyPair(String algorithm, int keylength) {
KeyPairGenerator keyGen;
try {
keyGen = KeyPairGenerator.getInstance(algorithm);
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
keyGen.initialize(keylength, new SecureRandom());
return keyGen.generateKeyPair();
}
public static KeyStore loadKeystore() throws Exception {
KeyStore keystore = KeyStore.getInstance(KeyStore.getDefaultType());
InputStream in = XmlSignatureTest.class.getResourceAsStream("/bob.keystore");
keystore.load(in, "letmein".toCharArray());
return keystore;
}
public Certificate getCertificateFromKeyStore() throws Exception {
Certificate c = loadKeystore().getCertificate("bob");
return c;
}
public PrivateKey getKeyFromKeystore() throws Exception {
return (PrivateKey) loadKeystore().getKey("bob", "letmein".toCharArray());
}
private AlgorithmMethod getCanonicalizationMethod() {
List<String> inclusivePrefixes = new ArrayList<>(1);
inclusivePrefixes.add("ds");
return XmlSignatureHelper.getCanonicalizationMethod(CanonicalizationMethod.EXCLUSIVE, inclusivePrefixes);
}
private List<AlgorithmMethod> getTransformsXPath2() {
List<XPathAndFilter> list = new ArrayList<>(3);
XPathAndFilter xpath1 = new XPathAndFilter("//n0:ToBeSigned", XPathType.Filter.INTERSECT.toString());
list.add(xpath1);
XPathAndFilter xpath2 = new XPathAndFilter("//n0:NotToBeSigned", XPathType.Filter.SUBTRACT.toString());
list.add(xpath2);
XPathAndFilter xpath3 = new XPathAndFilter("//n0:ReallyToBeSigned", XPathType.Filter.UNION.toString());
list.add(xpath3);
List<AlgorithmMethod> result = new ArrayList<>(2);
result.add(XmlSignatureHelper.getCanonicalizationMethod(CanonicalizationMethod.INCLUSIVE));
result.add(XmlSignatureHelper.getXPath2Transform(list, getNamespaceMap()));
return result;
}
private Map<String, String> getNamespaceMap() {
Map<String, String> result = new HashMap<>(1);
result.put("n0", "http://test/test");
return result;
}
private List<AlgorithmMethod> getTransformsXsltXpath() {
try {
AlgorithmMethod transformXslt
= XmlSignatureHelper.getXslTransform("/org/apache/camel/component/xmlsecurity/xslt_test.xsl");
Map<String, String> namespaceMap = new HashMap<>(1);
namespaceMap.put("n0", "https://org.apache/camel/xmlsecurity/test");
AlgorithmMethod transformXpath = XmlSignatureHelper.getXPathTransform("//n0:XMLSecurity/n0:Content", namespaceMap);
// I removed base 64 transform because the JDK implementation does
// not correctly support this transformation
// AlgorithmMethod transformBase64 = helper.getBase64Transform();
List<AlgorithmMethod> result = new ArrayList<>(3);
result.add(XmlSignatureHelper.getCanonicalizationMethod(CanonicalizationMethod.INCLUSIVE));
result.add(transformXslt);
result.add(transformXpath);
// result.add(transformBase64);
return result;
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
static KeyAccessor getKeyAccessor(final PrivateKey privateKey) {
KeyAccessor accessor = new KeyAccessor() {
@Override
public KeySelector getKeySelector(Message message) throws Exception {
return KeySelector.singletonKeySelector(privateKey);
}
@Override
public KeyInfo getKeyInfo(Message mess, Node messageBody, KeyInfoFactory keyInfoFactory) throws Exception {
return null;
}
};
return accessor;
}
public static String getBaseUri() {
String uri = "file:/" + System.getProperty("user.dir") + "/src/test/resources/org/apache/camel/component/xmlsecurity/";
return uri.replace('\\', '/');
}
public static KeySelector getKeyValueKeySelector() {
return new KeyValueKeySelector();
}
/**
* KeySelector which retrieves the public key from the KeyValue element and returns it. NOTE: If the key algorithm
* doesn't match signature algorithm, then the public key will be ignored.
*/
static class KeyValueKeySelector extends KeySelector {
@Override
public KeySelectorResult select(
KeyInfo keyInfo, KeySelector.Purpose purpose, AlgorithmMethod method, XMLCryptoContext context)
throws KeySelectorException {
if (keyInfo == null) {
throw new KeySelectorException("Null KeyInfo object!");
}
SignatureMethod sm = (SignatureMethod) method;
@SuppressWarnings("rawtypes")
List list = keyInfo.getContent();
for (int i = 0; i < list.size(); i++) {
XMLStructure xmlStructure = (XMLStructure) list.get(i);
if (xmlStructure instanceof KeyValue) {
PublicKey pk = null;
try {
pk = ((KeyValue) xmlStructure).getPublicKey();
} catch (KeyException ke) {
throw new KeySelectorException(ke);
}
// make sure algorithm is compatible with method
if (algEquals(sm.getAlgorithm(), pk.getAlgorithm())) {
return new SimpleKeySelectorResult(pk);
}
}
}
throw new KeySelectorException("No KeyValue element found!");
}
static boolean algEquals(String algURI, String algName) {
return (algName.equalsIgnoreCase("DSA") && algURI.equalsIgnoreCase(SignatureMethod.DSA_SHA1))
|| (algName.equalsIgnoreCase("RSA") && algURI.equalsIgnoreCase(SignatureMethod.RSA_SHA1));
}
}
private static class SimpleKeySelectorResult implements KeySelectorResult {
private PublicKey pk;
SimpleKeySelectorResult(PublicKey pk) {
this.pk = pk;
}
@Override
public Key getKey() {
return pk;
}
}
public static Map<String, ? extends Object> getCrytoContextProperties() {
return Collections.singletonMap("org.jcp.xml.dsig.validateManifests", Boolean.FALSE);
}
public static KeyAccessor getDefaultKeyAccessor() throws Exception {
return TestKeystore.getKeyAccessor("bob");
}
public static KeySelector getDefaultKeySelector() throws Exception {
return TestKeystore.getKeySelector("bob");
}
public static KeyAccessor getDefaultKeyAccessorDsa() throws Exception {
return TestKeystore.getKeyAccessor("bobdsa");
}
public static KeySelector getDefaultKeySelectorDsa() throws Exception {
return TestKeystore.getKeySelector("bobdsa");
}
public static XmlSignatureChecker getEnvelopingXmlSignatureChecker() {
return new EnvelopingXmlSignatureChecker();
}
public static XmlSignature2Message getXmlSignature2MessageWithTimestampdProperty() {
return new XmlSignature2Message2MessageWithTimestampProperty();
}
public static ValidationFailedHandler getValidationFailedHandlerIgnoreManifestFailures() {
return new ValidationFailedHandlerIgnoreManifestFailures();
}
public static XmlSignatureProperties getSignatureProperties() {
return new TimestampProperty();
}
public static XPathFilterParameterSpec getNodeSerachXPath() {
Map<String, String> prefix2Namespace = Collections.singletonMap("pre", "http://test/test");
return XmlSignatureHelper.getXpathFilter("//pre:root", prefix2Namespace);
}
public static URIDereferencer getSameDocumentUriDereferencer() {
return SameDocumentUriDereferencer.getInstance();
}
public static XPathFilterParameterSpec getParentXPathBean() {
Map<String, String> prefix2Namespace = Collections.singletonMap("ns", "http://test");
return XmlSignatureHelper.getXpathFilter("/ns:root/a[last()]", prefix2Namespace);
}
}
| apache-2.0 |
apache/incubator-apex-malhar | library/src/main/java/org/apache/apex/malhar/lib/testbench/CollectorTestSink.java | 2065 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.apex.malhar.lib.testbench;
import java.util.ArrayList;
import java.util.List;
import com.datatorrent.api.Sink;
/**
* A sink implementation to collect expected test results.
* <p>
* @displayName Collector Test Sink
* @category Test Bench
* @tags sink
* @since 0.3.2
*/
public class CollectorTestSink<T> implements Sink<T>
{
public final List<T> collectedTuples = new ArrayList<T>();
/**
* clears data
*/
public void clear()
{
this.collectedTuples.clear();
}
@Override
public void put(T payload)
{
synchronized (collectedTuples) {
collectedTuples.add(payload);
collectedTuples.notifyAll();
}
}
public void waitForResultCount(int count, long timeoutMillis) throws InterruptedException
{
while (collectedTuples.size() < count && timeoutMillis > 0) {
timeoutMillis -= 20;
synchronized (collectedTuples) {
if (collectedTuples.size() < count) {
collectedTuples.wait(20);
}
}
}
}
@Override
public int getCount(boolean reset)
{
synchronized (collectedTuples) {
try {
return collectedTuples.size();
} finally {
if (reset) {
collectedTuples.clear();
}
}
}
}
}
| apache-2.0 |
apixandru/intellij-community | platform/platform-impl/src/com/intellij/openapi/editor/impl/ImmediatePainter.java | 15068 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.editor.impl;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.editor.colors.EditorColors;
import com.intellij.openapi.editor.ex.RangeHighlighterEx;
import com.intellij.openapi.editor.ex.util.EditorUIUtil;
import com.intellij.openapi.editor.ex.util.EditorUtil;
import com.intellij.openapi.editor.ex.util.LexerEditorHighlighter;
import com.intellij.openapi.editor.impl.view.FontLayoutService;
import com.intellij.openapi.editor.impl.view.IterationState;
import com.intellij.openapi.editor.markup.EffectType;
import com.intellij.openapi.editor.markup.HighlighterLayer;
import com.intellij.openapi.editor.markup.HighlighterTargetArea;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.registry.RegistryValue;
import com.intellij.ui.EditorTextField;
import com.intellij.ui.Gray;
import com.intellij.ui.JBColor;
import com.intellij.util.Consumer;
import com.intellij.util.Processor;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import javax.swing.*;
import java.awt.*;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.awt.image.BufferedImage;
import java.awt.image.VolatileImage;
import java.util.ArrayList;
import java.util.List;
/**
* @author Pavel Fatin
*/
class ImmediatePainter {
private static final int DEBUG_PAUSE_DURATION = 1000;
static final RegistryValue ENABLED = Registry.get("editor.zero.latency.rendering");
static final RegistryValue DOUBLE_BUFFERING = Registry.get("editor.zero.latency.rendering.double.buffering");
private static final RegistryValue PIPELINE_FLUSH = Registry.get("editor.zero.latency.rendering.pipeline.flush");
private static final RegistryValue DEBUG = Registry.get("editor.zero.latency.rendering.debug");
private final EditorImpl myEditor;
private Image myImage;
ImmediatePainter(EditorImpl editor) {
myEditor = editor;
Disposer.register(editor.getDisposable(), () -> {
if (myImage != null) {
myImage.flush();
}
});
}
void paint(final Graphics g, final EditorActionPlan plan) {
if (ENABLED.asBoolean() && canPaintImmediately(myEditor)) {
if (plan.getCaretShift() != 1) return;
final List<EditorActionPlan.Replacement> replacements = plan.getReplacements();
if (replacements.size() != 1) return;
final EditorActionPlan.Replacement replacement = replacements.get(0);
if (replacement.getText().length() != 1) return;
final int caretOffset = replacement.getBegin();
final char c = replacement.getText().charAt(0);
paintImmediately(g, caretOffset, c);
}
}
private static boolean canPaintImmediately(final EditorImpl editor) {
final CaretModel caretModel = editor.getCaretModel();
final Caret caret = caretModel.getPrimaryCaret();
final Document document = editor.getDocument();
return !(editor.getComponent().getParent() instanceof EditorTextField) &&
document instanceof DocumentImpl &&
editor.getHighlighter() instanceof LexerEditorHighlighter &&
!editor.getSelectionModel().hasSelection() &&
caretModel.getCaretCount() == 1 &&
!isInVirtualSpace(editor, caret) &&
!isInsertion(document, caret.getOffset()) &&
!caret.isAtRtlLocation() &&
!caret.isAtBidiRunBoundary();
}
private static boolean isInVirtualSpace(final Editor editor, final Caret caret) {
return caret.getLogicalPosition().compareTo(editor.offsetToLogicalPosition(caret.getOffset())) != 0;
}
private static boolean isInsertion(final Document document, final int offset) {
return offset < document.getTextLength() && document.getCharsSequence().charAt(offset) != '\n';
}
private void paintImmediately(final Graphics g, final int offset, final char c2) {
final EditorImpl editor = myEditor;
final Document document = editor.getDocument();
final LexerEditorHighlighter highlighter = (LexerEditorHighlighter)myEditor.getHighlighter();
final EditorSettings settings = editor.getSettings();
final boolean isBlockCursor = editor.isInsertMode() == settings.isBlockCursor();
final int lineHeight = editor.getLineHeight();
final int ascent = editor.getAscent();
final int topOverhang = editor.myView.getTopOverhang();
final int bottomOverhang = editor.myView.getBottomOverhang();
final char c1 = offset == 0 ? ' ' : document.getCharsSequence().charAt(offset - 1);
final List<TextAttributes> attributes = highlighter.getAttributesForPreviousAndTypedChars(document, offset, c2);
updateAttributes(editor, offset, attributes);
final TextAttributes attributes1 = attributes.get(0);
final TextAttributes attributes2 = attributes.get(1);
if (!(canRender(attributes1) && canRender(attributes2))) {
return;
}
FontLayoutService fontLayoutService = FontLayoutService.getInstance();
final float width1 = fontLayoutService.charWidth2D(editor.getFontMetrics(attributes1.getFontType()), c1);
final float width2 = fontLayoutService.charWidth2D(editor.getFontMetrics(attributes2.getFontType()), c2);
final Font font1 = EditorUtil.fontForChar(c1, attributes1.getFontType(), editor).getFont();
final Font font2 = EditorUtil.fontForChar(c1, attributes2.getFontType(), editor).getFont();
final Point2D p2 = editor.offsetToPoint2D(offset);
float p2x = (float)p2.getX();
int p2y = (int)p2.getY();
int width1i = (int)(p2x) - (int)(p2x - width1);
int width2i = (int)(p2x + width2) - (int)p2x;
Caret caret = editor.getCaretModel().getPrimaryCaret();
//noinspection ConstantConditions
final int caretWidth = isBlockCursor ? editor.getCaretLocations(false)[0].myWidth
: JBUI.scale(caret.getVisualAttributes().getWidth(settings.getLineCursorWidth()));
final float caretShift = isBlockCursor ? 0 : caretWidth == 1 ? 0 : 1 / JBUI.sysScale((Graphics2D)g);
final Rectangle2D caretRectangle = new Rectangle2D.Float((int)(p2x + width2) - caretShift, p2y - topOverhang,
caretWidth, lineHeight + topOverhang + bottomOverhang + (isBlockCursor ? -1 : 0));
final Rectangle rectangle1 = new Rectangle((int)(p2x - width1), p2y, width1i, lineHeight);
final Rectangle rectangle2 = new Rectangle((int)p2x, p2y, (int)(width2i + caretWidth - caretShift), lineHeight);
final Consumer<Graphics> painter = graphics -> {
EditorUIUtil.setupAntialiasing(graphics);
fillRect(graphics, rectangle2, attributes2.getBackgroundColor());
drawChar(graphics, c2, p2x, p2y + ascent, font2, attributes2.getForegroundColor());
fillRect(graphics, caretRectangle, getCaretColor(editor));
fillRect(graphics, rectangle1, attributes1.getBackgroundColor());
drawChar(graphics, c1, p2x - width1, p2y + ascent, font1, attributes1.getForegroundColor());
};
final Shape originalClip = g.getClip();
g.setClip(new Rectangle2D.Float((int)p2x - caretShift, p2y, width2i + caretWidth, lineHeight));
if (DOUBLE_BUFFERING.asBoolean()) {
paintWithDoubleBuffering(g, painter);
}
else {
painter.consume(g);
}
g.setClip(originalClip);
if (PIPELINE_FLUSH.asBoolean()) {
Toolkit.getDefaultToolkit().sync();
}
if (DEBUG.asBoolean()) {
pause();
}
}
private static boolean canRender(final TextAttributes attributes) {
return attributes.getEffectType() != EffectType.BOXED || attributes.getEffectColor() == null;
}
private void paintWithDoubleBuffering(final Graphics graphics, final Consumer<Graphics> painter) {
final Rectangle bounds = graphics.getClipBounds();
createOrUpdateImageBuffer(myEditor.getComponent(), bounds.getSize());
final Graphics imageGraphics = myImage.getGraphics();
imageGraphics.translate(-bounds.x, -bounds.y);
painter.consume(imageGraphics);
imageGraphics.dispose();
graphics.drawImage(myImage, bounds.x, bounds.y, null);
}
private void createOrUpdateImageBuffer(final JComponent component, final Dimension size) {
if (ApplicationManager.getApplication().isUnitTestMode()) {
if (myImage == null || !isLargeEnough(myImage, size)) {
myImage = UIUtil.createImage(size.width, size.height, BufferedImage.TYPE_INT_ARGB);
}
}
else {
if (myImage == null) {
myImage = component.createVolatileImage(size.width, size.height);
}
else if (!isLargeEnough(myImage, size) ||
((VolatileImage)myImage).validate(component.getGraphicsConfiguration()) == VolatileImage.IMAGE_INCOMPATIBLE) {
myImage.flush();
myImage = component.createVolatileImage(size.width, size.height);
}
}
}
private static boolean isLargeEnough(final Image image, final Dimension size) {
final int width = image.getWidth(null);
final int height = image.getHeight(null);
if (width == -1 || height == -1) {
throw new IllegalArgumentException("Image size is undefined");
}
return width >= size.width && height >= size.height;
}
private static void fillRect(final Graphics g, final Rectangle2D r, final Color color) {
g.setColor(color);
((Graphics2D)g).fill(r);
}
private static void drawChar(final Graphics g,
final char c,
final float x, final float y,
final Font font, final Color color) {
g.setFont(font);
g.setColor(color);
((Graphics2D)g).drawString(String.valueOf(c), x, y);
}
private static Color getCaretColor(final Editor editor) {
Color overriddenColor = editor.getCaretModel().getPrimaryCaret().getVisualAttributes().getColor();
if (overriddenColor != null) return overriddenColor;
final Color caretColor = editor.getColorsScheme().getColor(EditorColors.CARET_COLOR);
return caretColor == null ? new JBColor(Gray._0, Gray._255) : caretColor;
}
private static void updateAttributes(final EditorImpl editor, final int offset, final List<TextAttributes> attributes) {
final List<RangeHighlighterEx> list1 = new ArrayList<>();
final List<RangeHighlighterEx> list2 = new ArrayList<>();
final Processor<RangeHighlighterEx> processor = highlighter -> {
if (!highlighter.isValid()) return true;
final boolean isLineHighlighter = highlighter.getTargetArea() == HighlighterTargetArea.LINES_IN_RANGE;
if (isLineHighlighter || highlighter.getStartOffset() < offset) {
list1.add(highlighter);
}
if (isLineHighlighter || highlighter.getEndOffset() > offset ||
(highlighter.getEndOffset() == offset && (highlighter.isGreedyToRight()))) {
list2.add(highlighter);
}
return true;
};
editor.getFilteredDocumentMarkupModel().processRangeHighlightersOverlappingWith(Math.max(0, offset - 1), offset, processor);
editor.getMarkupModel().processRangeHighlightersOverlappingWith(Math.max(0, offset - 1), offset, processor);
updateAttributes(editor, attributes.get(0), list1);
updateAttributes(editor, attributes.get(1), list2);
}
// TODO Unify with com.intellij.openapi.editor.impl.view.IterationState.setAttributes
private static void updateAttributes(final EditorImpl editor,
final TextAttributes attributes,
final List<RangeHighlighterEx> highlighters) {
if (highlighters.size() > 1) {
ContainerUtil.quickSort(highlighters, IterationState.BY_LAYER_THEN_ATTRIBUTES);
}
TextAttributes syntax = attributes;
TextAttributes caretRow = editor.getCaretModel().getTextAttributes();
final int size = highlighters.size();
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < size; i++) {
RangeHighlighterEx highlighter = highlighters.get(i);
if (highlighter.getTextAttributes() == TextAttributes.ERASE_MARKER) {
syntax = null;
}
}
final List<TextAttributes> cachedAttributes = new ArrayList<>();
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < size; i++) {
RangeHighlighterEx highlighter = highlighters.get(i);
if (caretRow != null && highlighter.getLayer() < HighlighterLayer.CARET_ROW) {
cachedAttributes.add(caretRow);
caretRow = null;
}
if (syntax != null && highlighter.getLayer() < HighlighterLayer.SYNTAX) {
cachedAttributes.add(syntax);
syntax = null;
}
TextAttributes textAttributes = highlighter.getTextAttributes();
if (textAttributes != null && textAttributes != TextAttributes.ERASE_MARKER) {
cachedAttributes.add(textAttributes);
}
}
if (caretRow != null) cachedAttributes.add(caretRow);
if (syntax != null) cachedAttributes.add(syntax);
Color foreground = null;
Color background = null;
Color effect = null;
EffectType effectType = null;
int fontType = 0;
//noinspection ForLoopReplaceableByForEach, Duplicates
for (int i = 0; i < cachedAttributes.size(); i++) {
TextAttributes attrs = cachedAttributes.get(i);
if (foreground == null) {
foreground = attrs.getForegroundColor();
}
if (background == null) {
background = attrs.getBackgroundColor();
}
if (fontType == Font.PLAIN) {
fontType = attrs.getFontType();
}
if (effect == null) {
effect = attrs.getEffectColor();
effectType = attrs.getEffectType();
}
}
if (foreground == null) foreground = editor.getForegroundColor();
if (background == null) background = editor.getBackgroundColor();
if (effectType == null) effectType = EffectType.BOXED;
TextAttributes defaultAttributes = editor.getColorsScheme().getAttributes(HighlighterColors.TEXT);
if (fontType == Font.PLAIN) fontType = defaultAttributes == null ? Font.PLAIN : defaultAttributes.getFontType();
attributes.setAttributes(foreground, background, effect, null, effectType, fontType);
}
private static void pause() {
try {
Thread.sleep(DEBUG_PAUSE_DURATION);
}
catch (InterruptedException e) {
// ...
}
}
}
| apache-2.0 |
halfhp/j2objc | jre_emul/Classes/com/google/j2objc/security/IosRSAKeyPairGenerator.java | 4015 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.j2objc.security;
import java.math.BigInteger;
import java.security.InvalidAlgorithmParameterException;
import java.security.KeyPair;
import java.security.KeyPairGeneratorSpi;
import java.security.SecureRandom;
import java.security.spec.AlgorithmParameterSpec;
import java.security.spec.RSAKeyGenParameterSpec;
/*-[
#import "com/google/j2objc/security/IosRSAKey.h"
]-*/
public class IosRSAKeyPairGenerator extends KeyPairGeneratorSpi {
/**
* Default RSA key size 2048 bits.
*/
private int keySize = 2048;
@Override
public native KeyPair generateKeyPair() /*-[
// Requested keypair attributes.
NSMutableDictionary * privateKeyAttr = [[NSMutableDictionary alloc] init];
[privateKeyAttr setObject:[NSNumber numberWithBool:YES] forKey:(id)kSecAttrIsPermanent];
NSData *privateTag = [ComGoogleJ2objcSecurityIosRSAKey_PRIVATE_KEY_TAG_
dataUsingEncoding:NSUTF8StringEncoding];
[privateKeyAttr setObject:privateTag forKey:(id)kSecAttrApplicationTag];
NSMutableDictionary * publicKeyAttr = [[NSMutableDictionary alloc] init];
[publicKeyAttr setObject:[NSNumber numberWithBool:YES] forKey:(id)kSecAttrIsPermanent];
NSData *publicTag = [ComGoogleJ2objcSecurityIosRSAKey_PUBLIC_KEY_TAG_
dataUsingEncoding:NSUTF8StringEncoding];
[publicKeyAttr setObject:publicTag forKey:(id)kSecAttrApplicationTag];
NSMutableDictionary * keyPairAttr = [[NSMutableDictionary alloc] init];
[keyPairAttr setObject:(id)kSecAttrKeyTypeRSA forKey:(id)kSecAttrKeyType];
[keyPairAttr setObject:[NSNumber numberWithUnsignedInteger:keySize_]
forKey:(id)kSecAttrKeySizeInBits];
[keyPairAttr setObject:privateKeyAttr forKey:@"private"];
[keyPairAttr setObject:publicKeyAttr forKey:@"public"];
SecKeyRef publicKeyRef = NULL;
SecKeyRef privateKeyRef = NULL;
SecKeyGeneratePair((CFDictionaryRef)keyPairAttr, &publicKeyRef, &privateKeyRef);
[privateKeyAttr release];
[publicKeyAttr release];
[keyPairAttr release];
ComGoogleJ2objcSecurityIosRSAKey_IosRSAPublicKey *publicKey =
[[ComGoogleJ2objcSecurityIosRSAKey_IosRSAPublicKey alloc]
initWithLong:(long long)publicKeyRef];
ComGoogleJ2objcSecurityIosRSAKey_IosRSAPrivateKey *privateKey =
[[ComGoogleJ2objcSecurityIosRSAKey_IosRSAPrivateKey alloc]
initWithLong:(long long)privateKeyRef];
JavaSecurityKeyPair *keyPair =
AUTORELEASE([[JavaSecurityKeyPair alloc] initWithJavaSecurityPublicKey:publicKey
withJavaSecurityPrivateKey:privateKey]);
[publicKey release];
[privateKey release];
return keyPair;
]-*/;
@Override
public void initialize(int keySize, SecureRandom random) {
this.keySize = keySize;
}
@Override
public void initialize(AlgorithmParameterSpec params, SecureRandom random)
throws InvalidAlgorithmParameterException {
if (!(params instanceof RSAKeyGenParameterSpec)) {
throw new InvalidAlgorithmParameterException("Only RSAKeyGenParameterSpec supported");
}
this.keySize = ((RSAKeyGenParameterSpec) params).getKeysize();
}
}
| apache-2.0 |
lampepfl/dotty | tests/run-macros/quote-indexed-map-by-name/quoted_2.scala | 116 | object Test {
def main(args: Array[String]): Unit = {
Index.succ["bar", "foo", ("bar", ("baz", Unit))]
}
}
| apache-2.0 |
dongjiaqiang/mina | core/src/test/java/org/apache/mina/util/ByteBufferDumperTest.java | 4431 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.mina.util;
import static org.junit.Assert.assertEquals;
import java.nio.ByteBuffer;
import org.junit.Test;
/**
*
* @author <a href="http://mina.apache.org">Apache MINA Project</a>
*/
public class ByteBufferDumperTest {
@Test
public void stringTest() {
String toTest = "yopYOP\n\r";
byte[] charData = toTest.getBytes();
assertEquals(toTest.length(), charData.length);
ByteBuffer myBuffer = ByteBuffer.allocate(toTest.length());
for (int i = 0; i < toTest.length(); i++) {
myBuffer.put(charData[i]);
}
myBuffer.flip();
int remaining = myBuffer.remaining();
int pos = myBuffer.position();
String dump = ByteBufferDumper.dump(myBuffer);
assertEquals("ByteBuffer[len=8,str='" + toTest + "']", dump);
assertEquals(remaining, myBuffer.remaining());
assertEquals(pos, myBuffer.position());
}
@Test
public void binaryTest() {
ByteBuffer myBuffer = ByteBuffer.allocate(4);
myBuffer.put((byte) 0x88);
myBuffer.put((byte) 0x03);
myBuffer.put((byte) 0xFF);
myBuffer.flip();
int remaining = myBuffer.remaining();
int pos = myBuffer.position();
String dump = ByteBufferDumper.dump(myBuffer);
System.err.println(dump);
assertEquals("ByteBuffer[len=3,bytes='0x88 0x03 0xFF']", dump);
assertEquals(remaining, myBuffer.remaining());
assertEquals(pos, myBuffer.position());
}
@Test
public void testWithSizeLimit() {
ByteBuffer bb = ByteBuffer.allocate(10);
bb.put(new byte[] { 0x01, (byte) 0x8F, 0x04, 0x7A, (byte) 0xc2, 0x23, (byte) 0xA0, 0x08, 0x44 });
bb.flip();
assertEquals("ByteBuffer[len=9,bytes='0x01 0x8F 0x04 0x7A 0xC2']", ByteBufferDumper.dump(bb, 5, false));
assertEquals("ByteBuffer[len=9,bytes='0x01 0x8F 0x04 0x7A 0xC2']", ByteBufferDumper.dump(bb, 5, true));
assertEquals("ByteBuffer[len=9,str='']", ByteBufferDumper.dump(bb, 0, true));
assertEquals("ByteBuffer[len=9,bytes='0x01 0x8F 0x04 0x7A 0xC2 0x23 0xA0 0x08 0x44']",
ByteBufferDumper.dump(bb, 10, true));
assertEquals("ByteBuffer[len=9,bytes='0x01 0x8F 0x04 0x7A 0xC2 0x23 0xA0 0x08 0x44']",
ByteBufferDumper.dump(bb, -1, false));
}
@Test
public void toHex() {
ByteBuffer bb = ByteBuffer.allocate(4);
bb.put((byte) 0);
bb.put((byte) 1);
bb.put((byte) 2);
bb.put((byte) 254);
bb.flip();
assertEquals("000102FE", ByteBufferDumper.toHex(bb));
}
@Test
public void checkFromHexStringEmptyStringReturnsEmptyByteArray() {
ByteBuffer buffer = ByteBufferDumper.fromHexString("");
assertEquals(0, buffer.remaining());
}
@Test
public void checkFromHexStringNormalStringReturnsByteArray() {
ByteBuffer buffer = ByteBufferDumper.fromHexString("ff");
assertEquals(1, buffer.remaining());
assertEquals(-1, buffer.get());
}
@Test
public void checkFromHexStringNormalStringUppercaseReturnsByteArray() {
ByteBuffer buffer = ByteBufferDumper.fromHexString("FF");
assertEquals(1, buffer.remaining());
assertEquals(-1, buffer.get());
}
@Test(expected=NumberFormatException.class)
public void checkFromHexStringInvalidStringReturnsException() {
ByteBuffer buffer = ByteBufferDumper.fromHexString("non-hexastring");
assertEquals(1, buffer.remaining());
assertEquals(-1, buffer.get());
}
}
| apache-2.0 |
minishift/minishift | vendor/github.com/containers/storage/pkg/ostree/ostree.go | 5220 | // +build ostree
package ostree
import (
"fmt"
"golang.org/x/sys/unix"
"os"
"path/filepath"
"runtime"
"syscall"
"time"
"unsafe"
"github.com/containers/storage/pkg/idtools"
"github.com/containers/storage/pkg/system"
glib "github.com/ostreedev/ostree-go/pkg/glibobject"
"github.com/ostreedev/ostree-go/pkg/otbuiltin"
"github.com/pkg/errors"
)
// #cgo pkg-config: glib-2.0 gobject-2.0 ostree-1
// #include <glib.h>
// #include <glib-object.h>
// #include <gio/gio.h>
// #include <stdlib.h>
// #include <ostree.h>
// #include <gio/ginputstream.h>
import "C"
func OstreeSupport() bool {
return true
}
func fixFiles(dir string, usermode bool) (bool, []string, error) {
var SkipOstree = errors.New("skip ostree deduplication")
var whiteouts []string
err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
if info.Mode()&(os.ModeNamedPipe|os.ModeSocket|os.ModeDevice) != 0 {
if !usermode {
stat, ok := info.Sys().(*syscall.Stat_t)
if !ok {
return errors.New("not syscall.Stat_t")
}
if stat.Rdev == 0 && (stat.Mode&unix.S_IFCHR) != 0 {
whiteouts = append(whiteouts, path)
return nil
}
}
// Skip the ostree deduplication if we encounter a file type that
// ostree does not manage.
return SkipOstree
}
if info.IsDir() {
if usermode {
if err := os.Chmod(path, info.Mode()|0700); err != nil {
return err
}
}
} else if usermode && (info.Mode().IsRegular()) {
if err := os.Chmod(path, info.Mode()|0600); err != nil {
return err
}
}
return nil
})
if err == SkipOstree {
return true, nil, nil
}
if err != nil {
return false, nil, err
}
return false, whiteouts, nil
}
// Create prepares the filesystem for the OSTREE driver and copies the directory for the given id under the parent.
func ConvertToOSTree(repoLocation, root, id string) error {
runtime.LockOSThread()
defer runtime.UnlockOSThread()
repo, err := otbuiltin.OpenRepo(repoLocation)
if err != nil {
return errors.Wrap(err, "could not open the OSTree repository")
}
skip, whiteouts, err := fixFiles(root, os.Getuid() != 0)
if err != nil {
return errors.Wrap(err, "could not prepare the OSTree directory")
}
if skip {
return nil
}
if _, err := repo.PrepareTransaction(); err != nil {
return errors.Wrap(err, "could not prepare the OSTree transaction")
}
if skip {
return nil
}
commitOpts := otbuiltin.NewCommitOptions()
commitOpts.Timestamp = time.Now()
commitOpts.LinkCheckoutSpeedup = true
commitOpts.Parent = "0000000000000000000000000000000000000000000000000000000000000000"
branch := fmt.Sprintf("containers-storage/%s", id)
for _, w := range whiteouts {
if err := os.Remove(w); err != nil {
return errors.Wrap(err, "could not delete whiteout file")
}
}
if _, err := repo.Commit(root, branch, commitOpts); err != nil {
return errors.Wrap(err, "could not commit the layer")
}
if _, err := repo.CommitTransaction(); err != nil {
return errors.Wrap(err, "could not complete the OSTree transaction")
}
if err := system.EnsureRemoveAll(root); err != nil {
return errors.Wrap(err, "could not delete layer")
}
checkoutOpts := otbuiltin.NewCheckoutOptions()
checkoutOpts.RequireHardlinks = true
checkoutOpts.Whiteouts = false
if err := otbuiltin.Checkout(repoLocation, root, branch, checkoutOpts); err != nil {
return errors.Wrap(err, "could not checkout from OSTree")
}
for _, w := range whiteouts {
if err := unix.Mknod(w, unix.S_IFCHR, 0); err != nil {
return errors.Wrap(err, "could not recreate whiteout file")
}
}
return nil
}
func CreateOSTreeRepository(repoLocation string, rootUID int, rootGID int) error {
runtime.LockOSThread()
defer runtime.UnlockOSThread()
_, err := os.Stat(repoLocation)
if err != nil && !os.IsNotExist(err) {
return err
} else if err != nil {
if err := idtools.MkdirAllAs(repoLocation, 0700, rootUID, rootGID); err != nil {
return errors.Wrap(err, "could not create OSTree repository directory: %v")
}
if _, err := otbuiltin.Init(repoLocation, otbuiltin.NewInitOptions()); err != nil {
return errors.Wrap(err, "could not create OSTree repository")
}
}
return nil
}
func openRepo(path string) (*C.struct_OstreeRepo, error) {
var cerr *C.GError
cpath := C.CString(path)
defer C.free(unsafe.Pointer(cpath))
pathc := C.g_file_new_for_path(cpath)
defer C.g_object_unref(C.gpointer(pathc))
repo := C.ostree_repo_new(pathc)
r := glib.GoBool(glib.GBoolean(C.ostree_repo_open(repo, nil, &cerr)))
if !r {
C.g_object_unref(C.gpointer(repo))
return nil, glib.ConvertGError(glib.ToGError(unsafe.Pointer(cerr)))
}
return repo, nil
}
func DeleteOSTree(repoLocation, id string) error {
runtime.LockOSThread()
defer runtime.UnlockOSThread()
repo, err := openRepo(repoLocation)
if err != nil {
return err
}
defer C.g_object_unref(C.gpointer(repo))
branch := fmt.Sprintf("containers-storage/%s", id)
cbranch := C.CString(branch)
defer C.free(unsafe.Pointer(cbranch))
var cerr *C.GError
r := glib.GoBool(glib.GBoolean(C.ostree_repo_set_ref_immediate(repo, nil, cbranch, nil, nil, &cerr)))
if !r {
return glib.ConvertGError(glib.ToGError(unsafe.Pointer(cerr)))
}
return nil
}
| apache-2.0 |
alphaBenj/zipline | zipline/utils/final.py | 3879 | from abc import ABCMeta, abstractmethod
from six import with_metaclass, iteritems
# Consistent error to be thrown in various cases regarding overriding
# `final` attributes.
_type_error = TypeError('Cannot override final attribute')
def bases_mro(bases):
"""
Yield classes in the order that methods should be looked up from the
base classes of an object.
"""
for base in bases:
for class_ in base.__mro__:
yield class_
def is_final(name, mro):
"""
Checks if `name` is a `final` object in the given `mro`.
We need to check the mro because we need to directly go into the __dict__
of the classes. Because `final` objects are descriptor, we need to grab
them _BEFORE_ the `__call__` is invoked.
"""
return any(isinstance(getattr(c, '__dict__', {}).get(name), final)
for c in bases_mro(mro))
class FinalMeta(type):
"""A metaclass template for classes the want to prevent subclassess from
overriding a some methods or attributes.
"""
def __new__(mcls, name, bases, dict_):
for k, v in iteritems(dict_):
if is_final(k, bases):
raise _type_error
setattr_ = dict_.get('__setattr__')
if setattr_ is None:
# No `__setattr__` was explicitly defined, look up the super
# class's. `bases[0]` will have a `__setattr__` because
# `object` does so we don't need to worry about the mro.
setattr_ = bases[0].__setattr__
if not is_final('__setattr__', bases) \
and not isinstance(setattr_, final):
# implicitly make the `__setattr__` a `final` object so that
# users cannot just avoid the descriptor protocol.
dict_['__setattr__'] = final(setattr_)
return super(FinalMeta, mcls).__new__(mcls, name, bases, dict_)
def __setattr__(self, name, value):
"""This stops the `final` attributes from being reassigned on the
class object.
"""
if is_final(name, self.__mro__):
raise _type_error
super(FinalMeta, self).__setattr__(name, value)
class final(with_metaclass(ABCMeta)):
"""
An attribute that cannot be overridden.
This is like the final modifier in Java.
Example usage:
>>> from six import with_metaclass
>>> class C(with_metaclass(FinalMeta, object)):
... @final
... def f(self):
... return 'value'
...
This constructs a class with final method `f`. This cannot be overridden
on the class object or on any instance. You cannot override this by
subclassing `C`; attempting to do so will raise a `TypeError` at class
construction time.
"""
def __new__(cls, attr):
# Decide if this is a method wrapper or an attribute wrapper.
# We are going to cache the `callable` check by creating a
# method or attribute wrapper.
if hasattr(attr, '__get__'):
return object.__new__(finaldescriptor)
else:
return object.__new__(finalvalue)
def __init__(self, attr):
self._attr = attr
def __set__(self, instance, value):
"""
`final` objects cannot be reassigned. This is the most import concept
about `final`s.
Unlike a `property` object, this will raise a `TypeError` when you
attempt to reassign it.
"""
raise _type_error
@abstractmethod
def __get__(self, instance, owner):
raise NotImplementedError('__get__')
class finalvalue(final):
"""
A wrapper for a non-descriptor attribute.
"""
def __get__(self, instance, owner):
return self._attr
class finaldescriptor(final):
"""
A final wrapper around a descriptor.
"""
def __get__(self, instance, owner):
return self._attr.__get__(instance, owner)
| apache-2.0 |
mihn/sputnik | src/main/java/pl/touk/sputnik/review/Paths.java | 333 | package pl.touk.sputnik.review;
public interface Paths {
public static final String ENTRY_REGEX = ".*src/(main|test)/java/";
public static final String JAVA = "java/";
public static final String SRC_MAIN = "src/main/";
public static final String SRC_TEST = "src/test/";
public static final String DOT = ".";
}
| apache-2.0 |
medicayun/medicayundicom | dcm4che14/tags/DCM4JBOSS_2_4_5/src/java/org/dcm4che/srom/PNameContent.java | 2688 | /*$Id: PNameContent.java 3493 2002-07-14 16:03:36Z gunterze $*/
/*****************************************************************************
* *
* Copyright (c) 2001,2002 by TIANI MEDGRAPH AG <gunter.zeilinger@tiani.com>*
* *
* This file is part of dcm4che. *
* *
* This library is free software; you can redistribute it and/or modify it *
* under the terms of the GNU Lesser General Public License as published *
* by the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
* This library is distributed in the hope that it will be useful, but *
* WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU *
* Lesser General Public License for more details. *
* *
* You should have received a copy of the GNU Lesser General Public *
* License along with this library; if not, write to the Free Software *
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA *
* *
*****************************************************************************/
package org.dcm4che.srom;
/**
* The <code>PNameContent</code> interface represents a
* <i>DICOM SR Person Name Content</i> of value type <code>PNAME</code>.
* <br>
*
* Person name of the person whose role is described by the
* <i>Concept Name</i>.
*
* @author gunter.zeilinger@tiani.com
* @version 1.0
*
* @see "DICOM Part 3: Information Object Definitions,
* Annex C.17.3 SR Document Content Module"
*/
public interface PNameContent extends Content {
// Constants -----------------------------------------------------
// Public --------------------------------------------------------
/**
* Returns the value of the <i>Person Name</i> field.
* <br>DICOM Tag: <code>(0040,A123)</code>
* <br>Tag Name: <code>Person Name</code>
*
* @return the value of the <i>Person Name</i> field.
*/
public String getPName();
public void setPName(String pname);
}//end interface PNameContent
| apache-2.0 |
nstopkimsk/pinpoint | profiler/src/main/java/com/navercorp/pinpoint/profiler/sender/UdpDataSenderType.java | 133 | package com.navercorp.pinpoint.profiler.sender;
/**
* @Author Taejin Koo
*/
public enum UdpDataSenderType {
OIO,
NIO;
}
| apache-2.0 |
Huawei-OSG/kubernetes | cmd/libs/go2idl/generator/execute.go | 8507 | /*
Copyright 2015 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package generator
import (
"bytes"
"fmt"
"go/format"
"io"
"io/ioutil"
"log"
"os"
"path/filepath"
"strings"
"k8s.io/kubernetes/cmd/libs/go2idl/namer"
"k8s.io/kubernetes/cmd/libs/go2idl/types"
)
func errs2strings(errors []error) []string {
strs := make([]string, len(errors))
for i := range errors {
strs[i] = errors[i].Error()
}
return strs
}
// ExecutePackages runs the generators for every package in 'packages'. 'outDir'
// is the base directory in which to place all the generated packages; it
// should be a physical path on disk, not an import path. e.g.:
// /path/to/home/path/to/gopath/src/
// Each package has its import path already, this will be appended to 'outDir'.
func (c *Context) ExecutePackages(outDir string, packages Packages) error {
var errors []error
for _, p := range packages {
if err := c.ExecutePackage(outDir, p); err != nil {
errors = append(errors, err)
}
}
if len(errors) > 0 {
return fmt.Errorf("some packages had errors:\n%v\n", strings.Join(errs2strings(errors), "\n"))
}
return nil
}
type DefaultFileType struct {
Format func([]byte) ([]byte, error)
Assemble func(io.Writer, *File)
}
func (ft DefaultFileType) AssembleFile(f *File, pathname string) error {
log.Printf("Assembling file %q", pathname)
destFile, err := os.Create(pathname)
if err != nil {
return err
}
defer destFile.Close()
b := &bytes.Buffer{}
et := NewErrorTracker(b)
ft.Assemble(et, f)
if et.Error() != nil {
return et.Error()
}
if formatted, err := ft.Format(b.Bytes()); err != nil {
err = fmt.Errorf("unable to format file %q (%v).", pathname, err)
// Write the file anyway, so they can see what's going wrong and fix the generator.
if _, err2 := destFile.Write(b.Bytes()); err2 != nil {
return err2
}
return err
} else {
_, err = destFile.Write(formatted)
return err
}
}
func (ft DefaultFileType) VerifyFile(f *File, pathname string) error {
log.Printf("Verifying file %q", pathname)
friendlyName := filepath.Join(f.PackageName, f.Name)
b := &bytes.Buffer{}
et := NewErrorTracker(b)
ft.Assemble(et, f)
if et.Error() != nil {
return et.Error()
}
formatted, err := ft.Format(b.Bytes())
if err != nil {
return fmt.Errorf("unable to format the output for %q: %v", friendlyName, err)
}
existing, err := ioutil.ReadFile(pathname)
if err != nil {
return fmt.Errorf("unable to read file %q for comparison: %v", friendlyName, err)
}
if bytes.Compare(formatted, existing) == 0 {
return nil
}
// Be nice and find the first place where they differ
i := 0
for i < len(formatted) && i < len(existing) && formatted[i] == existing[i] {
i++
}
eDiff, fDiff := existing[i:], formatted[i:]
if len(eDiff) > 100 {
eDiff = eDiff[:100]
}
if len(fDiff) > 100 {
fDiff = fDiff[:100]
}
return fmt.Errorf("output for %q differs; first existing/expected diff: \n %q\n %q", friendlyName, string(eDiff), string(fDiff))
}
func assembleGolangFile(w io.Writer, f *File) {
w.Write(f.Header)
fmt.Fprintf(w, "package %v\n\n", f.PackageName)
if len(f.Imports) > 0 {
fmt.Fprint(w, "import (\n")
// TODO: sort imports like goimports does.
for i := range f.Imports {
if strings.Contains(i, "\"") {
// they included quotes, or are using the
// `name "path/to/pkg"` format.
fmt.Fprintf(w, "\t%s\n", i)
} else {
fmt.Fprintf(w, "\t%q\n", i)
}
}
fmt.Fprint(w, ")\n\n")
}
if f.Vars.Len() > 0 {
fmt.Fprint(w, "var (\n")
w.Write(f.Vars.Bytes())
fmt.Fprint(w, ")\n\n")
}
if f.Consts.Len() > 0 {
fmt.Fprint(w, "const (\n")
w.Write(f.Consts.Bytes())
fmt.Fprint(w, ")\n\n")
}
w.Write(f.Body.Bytes())
}
func NewGolangFile() *DefaultFileType {
return &DefaultFileType{
Format: format.Source,
Assemble: assembleGolangFile,
}
}
// format should be one line only, and not end with \n.
func addIndentHeaderComment(b *bytes.Buffer, format string, args ...interface{}) {
if b.Len() > 0 {
fmt.Fprintf(b, "\n// "+format+"\n", args...)
} else {
fmt.Fprintf(b, "// "+format+"\n", args...)
}
}
func (c *Context) filteredBy(f func(*Context, *types.Type) bool) *Context {
c2 := *c
c2.Order = []*types.Type{}
for _, t := range c.Order {
if f(c, t) {
c2.Order = append(c2.Order, t)
}
}
return &c2
}
// make a new context; inheret c.Namers, but add on 'namers'. In case of a name
// collision, the namer in 'namers' wins.
func (c *Context) addNameSystems(namers namer.NameSystems) *Context {
if namers == nil {
return c
}
c2 := *c
// Copy the existing name systems so we don't corrupt a parent context
c2.Namers = namer.NameSystems{}
for k, v := range c.Namers {
c2.Namers[k] = v
}
for name, namer := range namers {
c2.Namers[name] = namer
}
return &c2
}
// ExecutePackage executes a single package. 'outDir' is the base directory in
// which to place the package; it should be a physical path on disk, not an
// import path. e.g.: '/path/to/home/path/to/gopath/src/' The package knows its
// import path already, this will be appended to 'outDir'.
func (c *Context) ExecutePackage(outDir string, p Package) error {
path := filepath.Join(outDir, p.Path())
log.Printf("Processing package %q, disk location %q", p.Name(), path)
// Filter out any types the *package* doesn't care about.
packageContext := c.filteredBy(p.Filter)
os.MkdirAll(path, 0755)
files := map[string]*File{}
for _, g := range p.Generators(packageContext) {
// Filter out types the *generator* doesn't care about.
genContext := packageContext.filteredBy(g.Filter)
// Now add any extra name systems defined by this generator
genContext = genContext.addNameSystems(g.Namers(genContext))
fileType := g.FileType()
if len(fileType) == 0 {
return fmt.Errorf("generator %q must specify a file type", g.Name())
}
f := files[g.Filename()]
if f == nil {
// This is the first generator to reference this file, so start it.
f = &File{
Name: g.Filename(),
FileType: fileType,
PackageName: p.Name(),
Header: p.Header(g.Filename()),
Imports: map[string]struct{}{},
}
files[f.Name] = f
} else {
if f.FileType != g.FileType() {
return fmt.Errorf("file %q already has type %q, but generator %q wants to use type %q", f.Name, f.FileType, g.Name(), g.FileType())
}
}
if vars := g.PackageVars(genContext); len(vars) > 0 {
addIndentHeaderComment(&f.Vars, "Package-wide variables from generator %q.", g.Name())
for _, v := range vars {
if _, err := fmt.Fprintf(&f.Vars, "%s\n", v); err != nil {
return err
}
}
}
if consts := g.PackageVars(genContext); len(consts) > 0 {
addIndentHeaderComment(&f.Consts, "Package-wide consts from generator %q.", g.Name())
for _, v := range consts {
if _, err := fmt.Fprintf(&f.Consts, "%s\n", v); err != nil {
return err
}
}
}
if err := genContext.executeBody(&f.Body, g); err != nil {
return err
}
if imports := g.Imports(genContext); len(imports) > 0 {
for _, i := range imports {
f.Imports[i] = struct{}{}
}
}
}
var errors []error
for _, f := range files {
finalPath := filepath.Join(path, f.Name)
assembler, ok := c.FileTypes[f.FileType]
if !ok {
return fmt.Errorf("the file type %q registered for file %q does not exist in the context", f.FileType, f.Name)
}
var err error
if c.Verify {
err = assembler.VerifyFile(f, finalPath)
} else {
err = assembler.AssembleFile(f, finalPath)
}
if err != nil {
errors = append(errors, err)
}
}
if len(errors) > 0 {
return fmt.Errorf("errors in package %q:\n%v\n", p.Name(), strings.Join(errs2strings(errors), "\n"))
}
return nil
}
func (c *Context) executeBody(w io.Writer, generator Generator) error {
et := NewErrorTracker(w)
if err := generator.Init(c, et); err != nil {
return err
}
for _, t := range c.Order {
if err := generator.GenerateType(c, t, et); err != nil {
return err
}
}
return et.Error()
}
| apache-2.0 |
eyakubovich/rkt | Godeps/_workspace/src/github.com/vishvananda/netlink/nl/xfrm_linux.go | 6360 | package nl
import (
"bytes"
"net"
"unsafe"
)
// Infinity for packet and byte counts
const (
XFRM_INF = ^uint64(0)
)
// Message Types
const (
XFRM_MSG_BASE = 0x10
XFRM_MSG_NEWSA = 0x10
XFRM_MSG_DELSA = 0x11
XFRM_MSG_GETSA = 0x12
XFRM_MSG_NEWPOLICY = 0x13
XFRM_MSG_DELPOLICY = 0x14
XFRM_MSG_GETPOLICY = 0x15
XFRM_MSG_ALLOCSPI = 0x16
XFRM_MSG_ACQUIRE = 0x17
XFRM_MSG_EXPIRE = 0x18
XFRM_MSG_UPDPOLICY = 0x19
XFRM_MSG_UPDSA = 0x1a
XFRM_MSG_POLEXPIRE = 0x1b
XFRM_MSG_FLUSHSA = 0x1c
XFRM_MSG_FLUSHPOLICY = 0x1d
XFRM_MSG_NEWAE = 0x1e
XFRM_MSG_GETAE = 0x1f
XFRM_MSG_REPORT = 0x20
XFRM_MSG_MIGRATE = 0x21
XFRM_MSG_NEWSADINFO = 0x22
XFRM_MSG_GETSADINFO = 0x23
XFRM_MSG_NEWSPDINFO = 0x24
XFRM_MSG_GETSPDINFO = 0x25
XFRM_MSG_MAPPING = 0x26
XFRM_MSG_MAX = 0x26
XFRM_NR_MSGTYPES = 0x17
)
// Attribute types
const (
/* Netlink message attributes. */
XFRMA_UNSPEC = 0x00
XFRMA_ALG_AUTH = 0x01 /* struct xfrm_algo */
XFRMA_ALG_CRYPT = 0x02 /* struct xfrm_algo */
XFRMA_ALG_COMP = 0x03 /* struct xfrm_algo */
XFRMA_ENCAP = 0x04 /* struct xfrm_algo + struct xfrm_encap_tmpl */
XFRMA_TMPL = 0x05 /* 1 or more struct xfrm_user_tmpl */
XFRMA_SA = 0x06 /* struct xfrm_usersa_info */
XFRMA_POLICY = 0x07 /* struct xfrm_userpolicy_info */
XFRMA_SEC_CTX = 0x08 /* struct xfrm_sec_ctx */
XFRMA_LTIME_VAL = 0x09
XFRMA_REPLAY_VAL = 0x0a
XFRMA_REPLAY_THRESH = 0x0b
XFRMA_ETIMER_THRESH = 0x0c
XFRMA_SRCADDR = 0x0d /* xfrm_address_t */
XFRMA_COADDR = 0x0e /* xfrm_address_t */
XFRMA_LASTUSED = 0x0f /* unsigned long */
XFRMA_POLICY_TYPE = 0x10 /* struct xfrm_userpolicy_type */
XFRMA_MIGRATE = 0x11
XFRMA_ALG_AEAD = 0x12 /* struct xfrm_algo_aead */
XFRMA_KMADDRESS = 0x13 /* struct xfrm_user_kmaddress */
XFRMA_ALG_AUTH_TRUNC = 0x14 /* struct xfrm_algo_auth */
XFRMA_MARK = 0x15 /* struct xfrm_mark */
XFRMA_TFCPAD = 0x16 /* __u32 */
XFRMA_REPLAY_ESN_VAL = 0x17 /* struct xfrm_replay_esn */
XFRMA_SA_EXTRA_FLAGS = 0x18 /* __u32 */
XFRMA_MAX = 0x18
)
const (
SizeofXfrmAddress = 0x10
SizeofXfrmSelector = 0x38
SizeofXfrmLifetimeCfg = 0x40
SizeofXfrmLifetimeCur = 0x20
SizeofXfrmId = 0x18
)
// typedef union {
// __be32 a4;
// __be32 a6[4];
// } xfrm_address_t;
type XfrmAddress [SizeofXfrmAddress]byte
func (x *XfrmAddress) ToIP() net.IP {
var empty = [12]byte{}
ip := make(net.IP, net.IPv6len)
if bytes.Equal(x[4:16], empty[:]) {
ip[10] = 0xff
ip[11] = 0xff
copy(ip[12:16], x[0:4])
} else {
copy(ip[:], x[:])
}
return ip
}
func (x *XfrmAddress) ToIPNet(prefixlen uint8) *net.IPNet {
ip := x.ToIP()
if GetIPFamily(ip) == FAMILY_V4 {
return &net.IPNet{ip, net.CIDRMask(int(prefixlen), 32)}
} else {
return &net.IPNet{ip, net.CIDRMask(int(prefixlen), 128)}
}
}
func (x *XfrmAddress) FromIP(ip net.IP) {
var empty = [16]byte{}
if len(ip) < net.IPv4len {
copy(x[4:16], empty[:])
} else if GetIPFamily(ip) == FAMILY_V4 {
copy(x[0:4], ip.To4()[0:4])
copy(x[4:16], empty[:12])
} else {
copy(x[0:16], ip.To16()[0:16])
}
}
func DeserializeXfrmAddress(b []byte) *XfrmAddress {
return (*XfrmAddress)(unsafe.Pointer(&b[0:SizeofXfrmAddress][0]))
}
func (msg *XfrmAddress) Serialize() []byte {
return (*(*[SizeofXfrmAddress]byte)(unsafe.Pointer(msg)))[:]
}
// struct xfrm_selector {
// xfrm_address_t daddr;
// xfrm_address_t saddr;
// __be16 dport;
// __be16 dport_mask;
// __be16 sport;
// __be16 sport_mask;
// __u16 family;
// __u8 prefixlen_d;
// __u8 prefixlen_s;
// __u8 proto;
// int ifindex;
// __kernel_uid32_t user;
// };
type XfrmSelector struct {
Daddr XfrmAddress
Saddr XfrmAddress
Dport uint16 // big endian
DportMask uint16 // big endian
Sport uint16 // big endian
SportMask uint16 // big endian
Family uint16
PrefixlenD uint8
PrefixlenS uint8
Proto uint8
Pad [3]byte
Ifindex int32
User uint32
}
func (msg *XfrmSelector) Len() int {
return SizeofXfrmSelector
}
func DeserializeXfrmSelector(b []byte) *XfrmSelector {
return (*XfrmSelector)(unsafe.Pointer(&b[0:SizeofXfrmSelector][0]))
}
func (msg *XfrmSelector) Serialize() []byte {
return (*(*[SizeofXfrmSelector]byte)(unsafe.Pointer(msg)))[:]
}
// struct xfrm_lifetime_cfg {
// __u64 soft_byte_limit;
// __u64 hard_byte_limit;
// __u64 soft_packet_limit;
// __u64 hard_packet_limit;
// __u64 soft_add_expires_seconds;
// __u64 hard_add_expires_seconds;
// __u64 soft_use_expires_seconds;
// __u64 hard_use_expires_seconds;
// };
//
type XfrmLifetimeCfg struct {
SoftByteLimit uint64
HardByteLimit uint64
SoftPacketLimit uint64
HardPacketLimit uint64
SoftAddExpiresSeconds uint64
HardAddExpiresSeconds uint64
SoftUseExpiresSeconds uint64
HardUseExpiresSeconds uint64
}
func (msg *XfrmLifetimeCfg) Len() int {
return SizeofXfrmLifetimeCfg
}
func DeserializeXfrmLifetimeCfg(b []byte) *XfrmLifetimeCfg {
return (*XfrmLifetimeCfg)(unsafe.Pointer(&b[0:SizeofXfrmLifetimeCfg][0]))
}
func (msg *XfrmLifetimeCfg) Serialize() []byte {
return (*(*[SizeofXfrmLifetimeCfg]byte)(unsafe.Pointer(msg)))[:]
}
// struct xfrm_lifetime_cur {
// __u64 bytes;
// __u64 packets;
// __u64 add_time;
// __u64 use_time;
// };
type XfrmLifetimeCur struct {
Bytes uint64
Packets uint64
AddTime uint64
UseTime uint64
}
func (msg *XfrmLifetimeCur) Len() int {
return SizeofXfrmLifetimeCur
}
func DeserializeXfrmLifetimeCur(b []byte) *XfrmLifetimeCur {
return (*XfrmLifetimeCur)(unsafe.Pointer(&b[0:SizeofXfrmLifetimeCur][0]))
}
func (msg *XfrmLifetimeCur) Serialize() []byte {
return (*(*[SizeofXfrmLifetimeCur]byte)(unsafe.Pointer(msg)))[:]
}
// struct xfrm_id {
// xfrm_address_t daddr;
// __be32 spi;
// __u8 proto;
// };
type XfrmId struct {
Daddr XfrmAddress
Spi uint32 // big endian
Proto uint8
Pad [3]byte
}
func (msg *XfrmId) Len() int {
return SizeofXfrmId
}
func DeserializeXfrmId(b []byte) *XfrmId {
return (*XfrmId)(unsafe.Pointer(&b[0:SizeofXfrmId][0]))
}
func (msg *XfrmId) Serialize() []byte {
return (*(*[SizeofXfrmId]byte)(unsafe.Pointer(msg)))[:]
}
| apache-2.0 |
wsargent/playframework | documentation/manual/working/javaGuide/main/forms/code/javaguide/forms/html/User.java | 477 | /*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package javaguide.forms.html;
public class User {
private String email;
private String password;
public void setEmail(String email) {
this.email = email;
}
public String getEmail() {
return email;
}
public void setPassword(String password) {
this.password = password;
}
public String getPassword() {
return password;
}
}
| apache-2.0 |
medicayun/medicayundicom | dcm4jboss-cdw/tags/DCM4CHEE_CDW_2_13_1/src/java/org/dcm4chex/cdw/mbean/DirRecordFactory.java | 5370 | /* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is part of dcm4che, an implementation of DICOM(TM) in
* Java(TM), available at http://sourceforge.net/projects/dcm4che.
*
* The Initial Developer of the Original Code is
* TIANI Medgraph AG.
* Portions created by the Initial Developer are Copyright (C) 2003-2005
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Gunter Zeilinger <gunter.zeilinger@tiani.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
package org.dcm4chex.cdw.mbean;
import java.util.ArrayList;
import java.util.HashMap;
import javax.xml.parsers.SAXParserFactory;
import org.dcm4che.data.Dataset;
import org.dcm4che.data.DcmElement;
import org.dcm4che.data.DcmObjectFactory;
import org.dcm4che.dict.Tags;
import org.dcm4che.media.DirRecord;
import org.dcm4che.srom.Content;
import org.dcm4chex.cdw.common.ConfigurationException;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
/**
* @author gunter.zeilinger@tiani.com
* @version $Revision: 5255 $ $Date: 2007-10-05 16:54:54 +0800 (周五, 05 10月 2007) $
* @since 05.07.2004
*/
class DirRecordFactory {
private static final DcmObjectFactory dof = DcmObjectFactory.getInstance();
private HashMap filterForType = new HashMap();
private String privateRecordUID;
private class MyHandler extends DefaultHandler {
private static final String TYPE = "type";
private static final String PRIVATE_RECORD_UID = "privateRecordUID";
private static final String RECORD = "record";
private static final String TAG = "tag";
private static final String ATTR = "attr";
private String type;
private ArrayList attrs = new ArrayList();
public void startElement(String uri, String localName, String qName,
Attributes attributes) throws SAXException {
if (qName.equals(ATTR)) {
attrs.add(attributes.getValue(TAG));
} else if (qName.equals(RECORD)) {
type = attributes.getValue(TYPE);
if (type.equals(DirRecord.PRIVATE)) {
privateRecordUID = attributes.getValue(PRIVATE_RECORD_UID);
}
}
}
public void endElement(String uri, String localName, String qName)
throws SAXException {
if (qName.equals(RECORD)) {
int[] filter = new int[attrs.size()];
for (int i = 0; i < filter.length; i++)
filter[i] = Tags.valueOf((String) attrs.get(i));
filterForType.put(type, filter);
attrs.clear();
}
}
}
public DirRecordFactory(String uri) throws ConfigurationException {
try {
SAXParserFactory.newInstance().newSAXParser().parse(uri,
new MyHandler());
} catch (Exception e) {
throw new ConfigurationException(
"Failed to load record filter from " + uri, e);
}
}
public Dataset makeRecord(String type, Dataset obj) {
int[] filter = (int[]) filterForType.get(type);
if (filter == null) throw new IllegalArgumentException("type:" + type);
Dataset keys = dof.newDataset();
keys.putAll(obj.subSet(filter));
DcmElement srcSq = obj.get(Tags.ContentSeq);
if (srcSq != null) {
DcmElement dstSq = keys.putSQ(Tags.ContentSeq);
for (int i = 0, n = srcSq.countItems(); i < n; ++i) {
Dataset item = srcSq.getItem(i);
if (Content.RelationType.HAS_CONCEPT_MOD.equals(item
.getString(Tags.RelationshipType))) {
dstSq.addItem(item);
}
}
if (dstSq.isEmpty()) keys.remove(Tags.ContentSeq);
}
if (type.equals(DirRecord.PRIVATE)) {
keys.putUI(Tags.PrivateRecordUID, privateRecordUID);
}
return keys;
}
}
| apache-2.0 |
sergecodd/FireFox-OS | B2G/gecko/mobile/android/base/httpclientandroidlib/HttpStatus.java | 7827 | /*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package ch.boye.httpclientandroidlib;
/**
* Constants enumerating the HTTP status codes.
* All status codes defined in RFC1945 (HTTP/1.0), RFC2616 (HTTP/1.1), and
* RFC2518 (WebDAV) are listed.
*
* @see StatusLine
*
* @since 4.0
*/
public interface HttpStatus {
// --- 1xx Informational ---
/** <tt>100 Continue</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_CONTINUE = 100;
/** <tt>101 Switching Protocols</tt> (HTTP/1.1 - RFC 2616)*/
public static final int SC_SWITCHING_PROTOCOLS = 101;
/** <tt>102 Processing</tt> (WebDAV - RFC 2518) */
public static final int SC_PROCESSING = 102;
// --- 2xx Success ---
/** <tt>200 OK</tt> (HTTP/1.0 - RFC 1945) */
public static final int SC_OK = 200;
/** <tt>201 Created</tt> (HTTP/1.0 - RFC 1945) */
public static final int SC_CREATED = 201;
/** <tt>202 Accepted</tt> (HTTP/1.0 - RFC 1945) */
public static final int SC_ACCEPTED = 202;
/** <tt>203 Non Authoritative Information</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_NON_AUTHORITATIVE_INFORMATION = 203;
/** <tt>204 No Content</tt> (HTTP/1.0 - RFC 1945) */
public static final int SC_NO_CONTENT = 204;
/** <tt>205 Reset Content</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_RESET_CONTENT = 205;
/** <tt>206 Partial Content</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_PARTIAL_CONTENT = 206;
/**
* <tt>207 Multi-Status</tt> (WebDAV - RFC 2518) or <tt>207 Partial Update
* OK</tt> (HTTP/1.1 - draft-ietf-http-v11-spec-rev-01?)
*/
public static final int SC_MULTI_STATUS = 207;
// --- 3xx Redirection ---
/** <tt>300 Mutliple Choices</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_MULTIPLE_CHOICES = 300;
/** <tt>301 Moved Permanently</tt> (HTTP/1.0 - RFC 1945) */
public static final int SC_MOVED_PERMANENTLY = 301;
/** <tt>302 Moved Temporarily</tt> (Sometimes <tt>Found</tt>) (HTTP/1.0 - RFC 1945) */
public static final int SC_MOVED_TEMPORARILY = 302;
/** <tt>303 See Other</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_SEE_OTHER = 303;
/** <tt>304 Not Modified</tt> (HTTP/1.0 - RFC 1945) */
public static final int SC_NOT_MODIFIED = 304;
/** <tt>305 Use Proxy</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_USE_PROXY = 305;
/** <tt>307 Temporary Redirect</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_TEMPORARY_REDIRECT = 307;
// --- 4xx Client Error ---
/** <tt>400 Bad Request</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_BAD_REQUEST = 400;
/** <tt>401 Unauthorized</tt> (HTTP/1.0 - RFC 1945) */
public static final int SC_UNAUTHORIZED = 401;
/** <tt>402 Payment Required</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_PAYMENT_REQUIRED = 402;
/** <tt>403 Forbidden</tt> (HTTP/1.0 - RFC 1945) */
public static final int SC_FORBIDDEN = 403;
/** <tt>404 Not Found</tt> (HTTP/1.0 - RFC 1945) */
public static final int SC_NOT_FOUND = 404;
/** <tt>405 Method Not Allowed</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_METHOD_NOT_ALLOWED = 405;
/** <tt>406 Not Acceptable</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_NOT_ACCEPTABLE = 406;
/** <tt>407 Proxy Authentication Required</tt> (HTTP/1.1 - RFC 2616)*/
public static final int SC_PROXY_AUTHENTICATION_REQUIRED = 407;
/** <tt>408 Request Timeout</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_REQUEST_TIMEOUT = 408;
/** <tt>409 Conflict</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_CONFLICT = 409;
/** <tt>410 Gone</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_GONE = 410;
/** <tt>411 Length Required</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_LENGTH_REQUIRED = 411;
/** <tt>412 Precondition Failed</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_PRECONDITION_FAILED = 412;
/** <tt>413 Request Entity Too Large</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_REQUEST_TOO_LONG = 413;
/** <tt>414 Request-URI Too Long</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_REQUEST_URI_TOO_LONG = 414;
/** <tt>415 Unsupported Media Type</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_UNSUPPORTED_MEDIA_TYPE = 415;
/** <tt>416 Requested Range Not Satisfiable</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_REQUESTED_RANGE_NOT_SATISFIABLE = 416;
/** <tt>417 Expectation Failed</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_EXPECTATION_FAILED = 417;
/**
* Static constant for a 418 error.
* <tt>418 Unprocessable Entity</tt> (WebDAV drafts?)
* or <tt>418 Reauthentication Required</tt> (HTTP/1.1 drafts?)
*/
// not used
// public static final int SC_UNPROCESSABLE_ENTITY = 418;
/**
* Static constant for a 419 error.
* <tt>419 Insufficient Space on Resource</tt>
* (WebDAV - draft-ietf-webdav-protocol-05?)
* or <tt>419 Proxy Reauthentication Required</tt>
* (HTTP/1.1 drafts?)
*/
public static final int SC_INSUFFICIENT_SPACE_ON_RESOURCE = 419;
/**
* Static constant for a 420 error.
* <tt>420 Method Failure</tt>
* (WebDAV - draft-ietf-webdav-protocol-05?)
*/
public static final int SC_METHOD_FAILURE = 420;
/** <tt>422 Unprocessable Entity</tt> (WebDAV - RFC 2518) */
public static final int SC_UNPROCESSABLE_ENTITY = 422;
/** <tt>423 Locked</tt> (WebDAV - RFC 2518) */
public static final int SC_LOCKED = 423;
/** <tt>424 Failed Dependency</tt> (WebDAV - RFC 2518) */
public static final int SC_FAILED_DEPENDENCY = 424;
// --- 5xx Server Error ---
/** <tt>500 Server Error</tt> (HTTP/1.0 - RFC 1945) */
public static final int SC_INTERNAL_SERVER_ERROR = 500;
/** <tt>501 Not Implemented</tt> (HTTP/1.0 - RFC 1945) */
public static final int SC_NOT_IMPLEMENTED = 501;
/** <tt>502 Bad Gateway</tt> (HTTP/1.0 - RFC 1945) */
public static final int SC_BAD_GATEWAY = 502;
/** <tt>503 Service Unavailable</tt> (HTTP/1.0 - RFC 1945) */
public static final int SC_SERVICE_UNAVAILABLE = 503;
/** <tt>504 Gateway Timeout</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_GATEWAY_TIMEOUT = 504;
/** <tt>505 HTTP Version Not Supported</tt> (HTTP/1.1 - RFC 2616) */
public static final int SC_HTTP_VERSION_NOT_SUPPORTED = 505;
/** <tt>507 Insufficient Storage</tt> (WebDAV - RFC 2518) */
public static final int SC_INSUFFICIENT_STORAGE = 507;
}
| apache-2.0 |
denis-vilyuzhanin/gwtbootstrap3-extras | src/main/java/org/gwtbootstrap3/extras/toggleswitch/client/ui/ToggleSwitchRadio.java | 1082 | package org.gwtbootstrap3.extras.toggleswitch.client.ui;
/*
* #%L
* GwtBootstrap3
* %%
* Copyright (C) 2013 - 2014 GwtBootstrap3
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.gwtbootstrap3.extras.toggleswitch.client.ui.base.ToggleSwitchBase;
import com.google.gwt.dom.client.Document;
import com.google.gwt.uibinder.client.UiConstructor;
public class ToggleSwitchRadio extends ToggleSwitchBase {
@UiConstructor
public ToggleSwitchRadio(String name) {
super(Document.get().createRadioInputElement(name));
}
}
| apache-2.0 |
zwejra/perun | perun-base/src/main/java/cz/metacentrum/perun/core/api/exceptions/WrongAttributeAssignmentException.java | 1579 | package cz.metacentrum.perun.core.api.exceptions;
import cz.metacentrum.perun.core.api.AttributeDefinition;
/**
* Thrown while assigning attribute to wrong entity. For example if you try to set value for the facility to attribute which is only for resources.
*
* @author Slavek Licehammer
*/
public class WrongAttributeAssignmentException extends PerunException {
static final long serialVersionUID = 0;
private AttributeDefinition attribute;
/**
* Simple constructor with a message
* @param message message with details about the cause
*/
public WrongAttributeAssignmentException(String message) {
super(message);
}
/**
* Constructor with a message and Throwable object
* @param message message with details about the cause
* @param cause Throwable that caused throwing of this exception
*/
public WrongAttributeAssignmentException(String message, Throwable cause) {
super(message, cause);
}
/**
* Constructor with a Throwable object
* @param cause Throwable that caused throwing of this exception
*/
public WrongAttributeAssignmentException(Throwable cause) {
super(cause);
}
/**
* Constructor with the attribute
* @param attribute attribute that was supposed to be assigned to the wrong entity
*/
public WrongAttributeAssignmentException(AttributeDefinition attribute) {
super(attribute.toString());
this.attribute = attribute;
}
/**
* Getter for the attribute
* @return attribute that was supposed to be assigned to the wrong entity
*/
public AttributeDefinition getAttribute() {
return attribute;
}
}
| bsd-2-clause |
RJHsiao/homebrew-cask | Casks/debookee.rb | 1113 | cask 'debookee' do
version '6.2.0'
sha256 '3b40caf5c9662d6dcf127b2d05474fa6d855fdc944558dfbb243ea3b0df5ced5'
# iwaxx.com/debookee was verified as official when first introduced to the cask
url 'https://www.iwaxx.com/debookee/debookee.zip'
appcast 'https://www.iwaxx.com/debookee/appcast.php',
checkpoint: '78654d9057ca16b745a36d283846fa2532d2dc0669b0d7eb02eac074bba6f89c'
name 'Debookee'
homepage 'https://debookee.com/'
depends_on macos: '>= :sierra'
app 'Debookee.app'
uninstall delete: '/Library/PrivilegedHelperTools/com.iwaxx.Debookee.PacketTool',
launchctl: 'com.iwaxx.Debookee.PacketTool'
zap trash: [
'~/Library/Application Support/com.iwaxx.Debookee',
'~/Library/Caches/com.iwaxx.Debookee',
'~/Library/Cookies/com.iwaxx.Debookee.binarycookies',
'~/Library/Logs/Debookee',
'~/Library/Preferences/com.iwaxx.Debookee.plist',
'~/Library/Saved Application State/com.iwaxx.Debookee.savedState',
'~/Library/WebKit/com.iwaxx.Debookee',
]
end
| bsd-2-clause |
tuwien-geoweb-2015/g01-block4 | node_modules/openlayers/node_modules/closure-util/node_modules/get-down/test/lib/misc.spec.js | 3847 | var fs = require('fs');
var mock = require('mock-fs');
var misc = require('../../lib/misc');
var assert = require('../helper').assert;
describe('misc', function() {
describe('copy()', function() {
beforeEach(function() {
mock({
dir: {
source1: 'one content',
source2: 'two content'
}
});
});
afterEach(function() {
mock.restore();
});
it('returns a promise of copied files', function(done) {
misc.copy('dir/source1', 'dir/dest1')
.then(function() {
assert.isTrue(fs.existsSync('dir/dest1'));
assert.equal(String(fs.readFileSync('dir/dest1')), 'one content');
done();
}, done);
});
it('resolves to dest path on success', function(done) {
misc.copy('dir/source2', 'dir/dest2')
.then(function(dest) {
assert.equal(dest, 'dir/dest2');
assert.isTrue(fs.existsSync('dir/dest2'));
assert.equal(String(fs.readFileSync('dir/dest2')), 'two content');
done();
}, done);
});
it('is rejected if dest exists', function(done) {
misc.copy('dir/source1', 'dir/source2')
.then(function(dest) {
done(new Error('Expected rejection'));
}, function(err) {
assert.instanceOf(err, Error);
done();
});
});
});
describe('existingDirectory()', function() {
beforeEach(function() {
mock({
'existing/dir': {},
'some-file': 'file content'
});
});
afterEach(function() {
mock.restore();
});
it('resolves to path if directory exists', function(done) {
misc.existingDirectory('existing/dir')
.then(function(dir) {
assert.equal(dir, 'existing/dir');
done();
}, done);
});
it('is rejected if directory does not exist', function(done) {
misc.existingDirectory('bogus/dir')
.then(function(dir) {
done(new Error('Expected rejection'));
}, function(err) {
assert.instanceOf(err, Error);
done();
});
});
it('is rejected if entry is a file', function(done) {
misc.existingDirectory('some-file')
.then(function(dir) {
done(new Error('Expected rejection'));
}, function(err) {
assert.instanceOf(err, Error);
done();
});
});
});
describe('resolveFilePath()', function() {
beforeEach(function() {
mock({
'existing-dir/existing-file': 'file content'
});
});
afterEach(function() {
mock.restore();
});
it('resolves to an existing parent dir plus file', function(done) {
misc.resolveFilePath('existing-dir/new-file')
.then(function(file) {
assert.equal(file, 'existing-dir/new-file');
done();
}, done);
});
it('resolves to an existing dir plus basename', function(done) {
misc.resolveFilePath('existing-dir', 'new-file')
.then(function(file) {
assert.equal(file, 'existing-dir/new-file');
done();
}, done);
});
it('rejects an existing file', function(done) {
misc.resolveFilePath('existing-dir/existing-file')
.then(function(file) {
done(new Error('Expected failure'));
}, function(err) {
assert.instanceOf(err, Error);
done();
});
});
it('rejects a non-existent parent dir', function(done) {
misc.resolveFilePath('bogus-dir/some-file')
.then(function(file) {
done(new Error('Expected failure'));
}, function(err) {
assert.instanceOf(err, Error);
done();
});
});
});
});
| bsd-2-clause |
devmynd/homebrew-cask | lib/hbc/utils/tty.rb | 2784 | # originally from Homebrew utils.rb
class Hbc::Utils::Tty
COLORS = {
black: 0,
red: 1,
green: 2,
yellow: 3,
blue: 4,
magenta: 5,
cyan: 6,
white: 7,
default: 9,
}.freeze
ATTRIBUTES = {
reset: 0,
bold: 1,
dim: 2,
italic: 3,
underline: 4,
blink: 5,
inverse: 7,
invisible: 8,
strikethrough: 9,
normal: 22,
}.freeze
@sequence = []
class << self
COLORS.keys.each do |sym|
define_method(sym) do
foreground(COLORS[sym])
end
define_method("fg_#{sym}".to_sym) do
foreground(COLORS[sym])
end
define_method("bg_#{sym}".to_sym) do
background(COLORS[sym])
end
end
ATTRIBUTES.keys.each do |sym|
define_method(sym) do
deferred_emit(ATTRIBUTES[sym])
end
end
def width
`/usr/bin/tput cols`.strip.to_i
end
def truncate(str)
str.to_s[0, width - 4]
end
private
def foreground(color)
deferred_emit(to_foreground_code(color))
end
def background(color)
deferred_emit(to_background_code(color))
end
def to_color_code(space, color)
return unless (num = to_color_number(color))
return space + num if num < space
return space + 9 if num > space
num
end
def to_foreground_code(color)
to_color_code(30, color)
end
def to_background_code(color)
to_color_code(40, color)
end
def to_color_number(color)
COLORS[color] || color.is_a?(Integer) ? color : nil
end
def to_attribute_number(attribute)
ATTRIBUTES[attribute] || attribute.is_a?(Integer) ? attribute : nil
end
def sanitize_integer(arg)
return arg.to_i if arg.is_a?(Integer)
return 0 if arg.to_s =~ %r{^0+$}
if arg.respond_to?(:to_i) && (int = arg.to_i) > 0
return int
end
$stderr.puts "Warning: bad Tty code #{arg}"
ATTRIBUTES[:reset]
end
def deferred_emit(*codes)
@sequence.concat Array(*codes).map(&method(:sanitize_integer))
Hbc::Utils::Tty
end
def to_s
sequence = @sequence
@sequence = []
return "" unless $stdout.tty?
if sequence.empty?
$stderr.puts "Warning: empty Tty sequence"
sequence = [ATTRIBUTES[:reset]]
end
"#{initiate}#{sequence.join(';')}#{terminate}"
end
def initiate
"\033["
end
def terminate
"m"
end
end
end
| bsd-2-clause |
joone/chromium-crosswalk | third_party/WebKit/Source/core/animation/AnimationEffectTest.cpp | 28166 | /*
* Copyright (c) 2013, Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "core/animation/AnimationEffect.h"
#include "core/animation/ComputedTimingProperties.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace blink {
class TestAnimationEffectEventDelegate : public AnimationEffect::EventDelegate {
public:
void onEventCondition(const AnimationEffect& animationNode) override
{
m_eventTriggered = true;
}
bool requiresIterationEvents(const AnimationEffect& animationNode) override
{
return true;
}
void reset()
{
m_eventTriggered = false;
}
bool eventTriggered() { return m_eventTriggered; }
private:
bool m_eventTriggered;
};
class TestAnimationEffect : public AnimationEffect {
public:
static TestAnimationEffect* create(const Timing& specified)
{
return new TestAnimationEffect(specified, new TestAnimationEffectEventDelegate());
}
void updateInheritedTime(double time)
{
updateInheritedTime(time, TimingUpdateForAnimationFrame);
}
void updateInheritedTime(double time, TimingUpdateReason reason)
{
m_eventDelegate->reset();
AnimationEffect::updateInheritedTime(time, reason);
}
void updateChildrenAndEffects() const override { }
void willDetach() { }
TestAnimationEffectEventDelegate* eventDelegate() { return m_eventDelegate.get(); }
double calculateTimeToEffectChange(bool forwards, double localTime, double timeToNextIteration) const override
{
m_localTime = localTime;
m_timeToNextIteration = timeToNextIteration;
return -1;
}
double takeLocalTime()
{
const double result = m_localTime;
m_localTime = nullValue();
return result;
}
double takeTimeToNextIteration()
{
const double result = m_timeToNextIteration;
m_timeToNextIteration = nullValue();
return result;
}
DEFINE_INLINE_VIRTUAL_TRACE()
{
visitor->trace(m_eventDelegate);
AnimationEffect::trace(visitor);
}
private:
TestAnimationEffect(const Timing& specified, TestAnimationEffectEventDelegate* eventDelegate)
: AnimationEffect(specified, eventDelegate)
, m_eventDelegate(eventDelegate)
{
}
Member<TestAnimationEffectEventDelegate> m_eventDelegate;
mutable double m_localTime;
mutable double m_timeToNextIteration;
};
TEST(AnimationAnimationEffectTest, Sanity)
{
Timing timing;
timing.iterationDuration = 2;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
EXPECT_EQ(0, animationNode->computedTiming().startTime());
animationNode->updateInheritedTime(0);
EXPECT_EQ(AnimationEffect::PhaseActive, animationNode->phase());
EXPECT_TRUE(animationNode->isInPlay());
EXPECT_TRUE(animationNode->isCurrent());
EXPECT_TRUE(animationNode->isInEffect());
EXPECT_EQ(0, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->computedTiming().startTime());
EXPECT_EQ(2, animationNode->activeDurationInternal());
EXPECT_EQ(0, animationNode->timeFraction());
animationNode->updateInheritedTime(1);
EXPECT_EQ(AnimationEffect::PhaseActive, animationNode->phase());
EXPECT_TRUE(animationNode->isInPlay());
EXPECT_TRUE(animationNode->isCurrent());
EXPECT_TRUE(animationNode->isInEffect());
EXPECT_EQ(0, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->computedTiming().startTime());
EXPECT_EQ(2, animationNode->activeDurationInternal());
EXPECT_EQ(0.5, animationNode->timeFraction());
animationNode->updateInheritedTime(2);
EXPECT_EQ(AnimationEffect::PhaseAfter, animationNode->phase());
EXPECT_FALSE(animationNode->isInPlay());
EXPECT_FALSE(animationNode->isCurrent());
EXPECT_TRUE(animationNode->isInEffect());
EXPECT_EQ(0, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->computedTiming().startTime());
EXPECT_EQ(2, animationNode->activeDurationInternal());
EXPECT_EQ(1, animationNode->timeFraction());
animationNode->updateInheritedTime(3);
EXPECT_EQ(AnimationEffect::PhaseAfter, animationNode->phase());
EXPECT_FALSE(animationNode->isInPlay());
EXPECT_FALSE(animationNode->isCurrent());
EXPECT_TRUE(animationNode->isInEffect());
EXPECT_EQ(0, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->computedTiming().startTime());
EXPECT_EQ(2, animationNode->activeDurationInternal());
EXPECT_EQ(1, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, FillAuto)
{
Timing timing;
timing.iterationDuration = 1;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(-1);
EXPECT_EQ(0, animationNode->timeFraction());
animationNode->updateInheritedTime(2);
EXPECT_EQ(1, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, FillForwards)
{
Timing timing;
timing.iterationDuration = 1;
timing.fillMode = Timing::FillModeForwards;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(-1);
EXPECT_TRUE(isNull(animationNode->timeFraction()));
animationNode->updateInheritedTime(2);
EXPECT_EQ(1, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, FillBackwards)
{
Timing timing;
timing.iterationDuration = 1;
timing.fillMode = Timing::FillModeBackwards;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(-1);
EXPECT_EQ(0, animationNode->timeFraction());
animationNode->updateInheritedTime(2);
EXPECT_TRUE(isNull(animationNode->timeFraction()));
}
TEST(AnimationAnimationEffectTest, FillBoth)
{
Timing timing;
timing.iterationDuration = 1;
timing.fillMode = Timing::FillModeBoth;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(-1);
EXPECT_EQ(0, animationNode->timeFraction());
animationNode->updateInheritedTime(2);
EXPECT_EQ(1, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, StartDelay)
{
Timing timing;
timing.iterationDuration = 1;
timing.fillMode = Timing::FillModeForwards;
timing.startDelay = 0.5;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(0);
EXPECT_TRUE(isNull(animationNode->timeFraction()));
animationNode->updateInheritedTime(0.5);
EXPECT_EQ(0, animationNode->timeFraction());
animationNode->updateInheritedTime(1.5);
EXPECT_EQ(1, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, ZeroIteration)
{
Timing timing;
timing.iterationDuration = 1;
timing.fillMode = Timing::FillModeForwards;
timing.iterationCount = 0;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(-1);
EXPECT_EQ(0, animationNode->activeDurationInternal());
EXPECT_TRUE(isNull(animationNode->currentIteration()));
EXPECT_TRUE(isNull(animationNode->timeFraction()));
animationNode->updateInheritedTime(0);
EXPECT_EQ(0, animationNode->activeDurationInternal());
EXPECT_EQ(0, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, InfiniteIteration)
{
Timing timing;
timing.iterationDuration = 1;
timing.fillMode = Timing::FillModeForwards;
timing.iterationCount = std::numeric_limits<double>::infinity();
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(-1);
EXPECT_TRUE(isNull(animationNode->currentIteration()));
EXPECT_TRUE(isNull(animationNode->timeFraction()));
EXPECT_EQ(std::numeric_limits<double>::infinity(), animationNode->activeDurationInternal());
animationNode->updateInheritedTime(0);
EXPECT_EQ(0, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, Iteration)
{
Timing timing;
timing.iterationCount = 2;
timing.iterationDuration = 2;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(0);
EXPECT_EQ(0, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->timeFraction());
animationNode->updateInheritedTime(1);
EXPECT_EQ(0, animationNode->currentIteration());
EXPECT_EQ(0.5, animationNode->timeFraction());
animationNode->updateInheritedTime(2);
EXPECT_EQ(1, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->timeFraction());
animationNode->updateInheritedTime(2);
EXPECT_EQ(1, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->timeFraction());
animationNode->updateInheritedTime(5);
EXPECT_EQ(1, animationNode->currentIteration());
EXPECT_EQ(1, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, IterationStart)
{
Timing timing;
timing.iterationStart = 1.2;
timing.iterationCount = 2.2;
timing.iterationDuration = 1;
timing.fillMode = Timing::FillModeBoth;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(-1);
EXPECT_EQ(1, animationNode->currentIteration());
EXPECT_NEAR(0.2, animationNode->timeFraction(), 0.000000000000001);
animationNode->updateInheritedTime(0);
EXPECT_EQ(1, animationNode->currentIteration());
EXPECT_NEAR(0.2, animationNode->timeFraction(), 0.000000000000001);
animationNode->updateInheritedTime(10);
EXPECT_EQ(3, animationNode->currentIteration());
EXPECT_NEAR(0.4, animationNode->timeFraction(), 0.000000000000001);
}
TEST(AnimationAnimationEffectTest, IterationAlternate)
{
Timing timing;
timing.iterationCount = 10;
timing.iterationDuration = 1;
timing.direction = Timing::PlaybackDirectionAlternate;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(0.75);
EXPECT_EQ(0, animationNode->currentIteration());
EXPECT_EQ(0.75, animationNode->timeFraction());
animationNode->updateInheritedTime(1.75);
EXPECT_EQ(1, animationNode->currentIteration());
EXPECT_EQ(0.25, animationNode->timeFraction());
animationNode->updateInheritedTime(2.75);
EXPECT_EQ(2, animationNode->currentIteration());
EXPECT_EQ(0.75, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, IterationAlternateReverse)
{
Timing timing;
timing.iterationCount = 10;
timing.iterationDuration = 1;
timing.direction = Timing::PlaybackDirectionAlternateReverse;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(0.75);
EXPECT_EQ(0, animationNode->currentIteration());
EXPECT_EQ(0.25, animationNode->timeFraction());
animationNode->updateInheritedTime(1.75);
EXPECT_EQ(1, animationNode->currentIteration());
EXPECT_EQ(0.75, animationNode->timeFraction());
animationNode->updateInheritedTime(2.75);
EXPECT_EQ(2, animationNode->currentIteration());
EXPECT_EQ(0.25, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, ZeroDurationSanity)
{
Timing timing;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
EXPECT_EQ(0, animationNode->computedTiming().startTime());
animationNode->updateInheritedTime(0);
EXPECT_EQ(AnimationEffect::PhaseAfter, animationNode->phase());
EXPECT_FALSE(animationNode->isInPlay());
EXPECT_FALSE(animationNode->isCurrent());
EXPECT_TRUE(animationNode->isInEffect());
EXPECT_EQ(0, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->computedTiming().startTime());
EXPECT_EQ(0, animationNode->activeDurationInternal());
EXPECT_EQ(1, animationNode->timeFraction());
animationNode->updateInheritedTime(1);
EXPECT_EQ(AnimationEffect::PhaseAfter, animationNode->phase());
EXPECT_FALSE(animationNode->isInPlay());
EXPECT_FALSE(animationNode->isCurrent());
EXPECT_TRUE(animationNode->isInEffect());
EXPECT_EQ(0, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->computedTiming().startTime());
EXPECT_EQ(0, animationNode->activeDurationInternal());
EXPECT_EQ(1, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, ZeroDurationFillForwards)
{
Timing timing;
timing.fillMode = Timing::FillModeForwards;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(-1);
EXPECT_TRUE(isNull(animationNode->timeFraction()));
animationNode->updateInheritedTime(0);
EXPECT_EQ(1, animationNode->timeFraction());
animationNode->updateInheritedTime(1);
EXPECT_EQ(1, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, ZeroDurationFillBackwards)
{
Timing timing;
timing.fillMode = Timing::FillModeBackwards;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(-1);
EXPECT_EQ(0, animationNode->timeFraction());
animationNode->updateInheritedTime(0);
EXPECT_TRUE(isNull(animationNode->timeFraction()));
animationNode->updateInheritedTime(1);
EXPECT_TRUE(isNull(animationNode->timeFraction()));
}
TEST(AnimationAnimationEffectTest, ZeroDurationFillBoth)
{
Timing timing;
timing.fillMode = Timing::FillModeBoth;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(-1);
EXPECT_EQ(0, animationNode->timeFraction());
animationNode->updateInheritedTime(0);
EXPECT_EQ(1, animationNode->timeFraction());
animationNode->updateInheritedTime(1);
EXPECT_EQ(1, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, ZeroDurationStartDelay)
{
Timing timing;
timing.fillMode = Timing::FillModeForwards;
timing.startDelay = 0.5;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(0);
EXPECT_TRUE(isNull(animationNode->timeFraction()));
animationNode->updateInheritedTime(0.5);
EXPECT_EQ(1, animationNode->timeFraction());
animationNode->updateInheritedTime(1.5);
EXPECT_EQ(1, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, ZeroDurationIterationStartAndCount)
{
Timing timing;
timing.iterationStart = 0.1;
timing.iterationCount = 0.2;
timing.fillMode = Timing::FillModeBoth;
timing.startDelay = 0.3;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(0);
EXPECT_EQ(0.1, animationNode->timeFraction());
animationNode->updateInheritedTime(0.3);
EXPECT_DOUBLE_EQ(0.3, animationNode->timeFraction());
animationNode->updateInheritedTime(1);
EXPECT_DOUBLE_EQ(0.3, animationNode->timeFraction());
}
// FIXME: Needs specification work.
TEST(AnimationAnimationEffectTest, ZeroDurationInfiniteIteration)
{
Timing timing;
timing.fillMode = Timing::FillModeForwards;
timing.iterationCount = std::numeric_limits<double>::infinity();
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(-1);
EXPECT_EQ(0, animationNode->activeDurationInternal());
EXPECT_TRUE(isNull(animationNode->currentIteration()));
EXPECT_TRUE(isNull(animationNode->timeFraction()));
animationNode->updateInheritedTime(0);
EXPECT_EQ(0, animationNode->activeDurationInternal());
EXPECT_EQ(std::numeric_limits<double>::infinity(), animationNode->currentIteration());
EXPECT_EQ(1, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, ZeroDurationIteration)
{
Timing timing;
timing.fillMode = Timing::FillModeForwards;
timing.iterationCount = 2;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(-1);
EXPECT_TRUE(isNull(animationNode->currentIteration()));
EXPECT_TRUE(isNull(animationNode->timeFraction()));
animationNode->updateInheritedTime(0);
EXPECT_EQ(1, animationNode->currentIteration());
EXPECT_EQ(1, animationNode->timeFraction());
animationNode->updateInheritedTime(1);
EXPECT_EQ(1, animationNode->currentIteration());
EXPECT_EQ(1, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, ZeroDurationIterationStart)
{
Timing timing;
timing.iterationStart = 1.2;
timing.iterationCount = 2.2;
timing.fillMode = Timing::FillModeBoth;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(-1);
EXPECT_EQ(1, animationNode->currentIteration());
EXPECT_NEAR(0.2, animationNode->timeFraction(), 0.000000000000001);
animationNode->updateInheritedTime(0);
EXPECT_EQ(3, animationNode->currentIteration());
EXPECT_NEAR(0.4, animationNode->timeFraction(), 0.000000000000001);
animationNode->updateInheritedTime(10);
EXPECT_EQ(3, animationNode->currentIteration());
EXPECT_NEAR(0.4, animationNode->timeFraction(), 0.000000000000001);
}
TEST(AnimationAnimationEffectTest, ZeroDurationIterationAlternate)
{
Timing timing;
timing.fillMode = Timing::FillModeForwards;
timing.iterationCount = 2;
timing.direction = Timing::PlaybackDirectionAlternate;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(-1);
EXPECT_TRUE(isNull(animationNode->currentIteration()));
EXPECT_TRUE(isNull(animationNode->timeFraction()));
animationNode->updateInheritedTime(0);
EXPECT_EQ(1, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->timeFraction());
animationNode->updateInheritedTime(1);
EXPECT_EQ(1, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, ZeroDurationIterationAlternateReverse)
{
Timing timing;
timing.fillMode = Timing::FillModeForwards;
timing.iterationCount = 2;
timing.direction = Timing::PlaybackDirectionAlternateReverse;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(-1);
EXPECT_TRUE(isNull(animationNode->currentIteration()));
EXPECT_TRUE(isNull(animationNode->timeFraction()));
animationNode->updateInheritedTime(0);
EXPECT_EQ(1, animationNode->currentIteration());
EXPECT_EQ(1, animationNode->timeFraction());
animationNode->updateInheritedTime(1);
EXPECT_EQ(1, animationNode->currentIteration());
EXPECT_EQ(1, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, InfiniteDurationSanity)
{
Timing timing;
timing.iterationDuration = std::numeric_limits<double>::infinity();
timing.iterationCount = 1;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
EXPECT_EQ(0, animationNode->computedTiming().startTime());
animationNode->updateInheritedTime(0);
EXPECT_EQ(std::numeric_limits<double>::infinity(), animationNode->activeDurationInternal());
EXPECT_EQ(AnimationEffect::PhaseActive, animationNode->phase());
EXPECT_TRUE(animationNode->isInPlay());
EXPECT_TRUE(animationNode->isCurrent());
EXPECT_TRUE(animationNode->isInEffect());
EXPECT_EQ(0, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->timeFraction());
animationNode->updateInheritedTime(1);
EXPECT_EQ(std::numeric_limits<double>::infinity(), animationNode->activeDurationInternal());
EXPECT_EQ(AnimationEffect::PhaseActive, animationNode->phase());
EXPECT_TRUE(animationNode->isInPlay());
EXPECT_TRUE(animationNode->isCurrent());
EXPECT_TRUE(animationNode->isInEffect());
EXPECT_EQ(0, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->timeFraction());
}
// FIXME: Needs specification work.
TEST(AnimationAnimationEffectTest, InfiniteDurationZeroIterations)
{
Timing timing;
timing.iterationDuration = std::numeric_limits<double>::infinity();
timing.iterationCount = 0;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
EXPECT_EQ(0, animationNode->computedTiming().startTime());
animationNode->updateInheritedTime(0);
EXPECT_EQ(0, animationNode->activeDurationInternal());
EXPECT_EQ(AnimationEffect::PhaseAfter, animationNode->phase());
EXPECT_FALSE(animationNode->isInPlay());
EXPECT_FALSE(animationNode->isCurrent());
EXPECT_TRUE(animationNode->isInEffect());
EXPECT_EQ(0, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->timeFraction());
animationNode->updateInheritedTime(1);
EXPECT_EQ(AnimationEffect::PhaseAfter, animationNode->phase());
EXPECT_EQ(AnimationEffect::PhaseAfter, animationNode->phase());
EXPECT_FALSE(animationNode->isInPlay());
EXPECT_FALSE(animationNode->isCurrent());
EXPECT_TRUE(animationNode->isInEffect());
EXPECT_EQ(0, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, InfiniteDurationInfiniteIterations)
{
Timing timing;
timing.iterationDuration = std::numeric_limits<double>::infinity();
timing.iterationCount = std::numeric_limits<double>::infinity();
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
EXPECT_EQ(0, animationNode->computedTiming().startTime());
animationNode->updateInheritedTime(0);
EXPECT_EQ(std::numeric_limits<double>::infinity(), animationNode->activeDurationInternal());
EXPECT_EQ(AnimationEffect::PhaseActive, animationNode->phase());
EXPECT_TRUE(animationNode->isInPlay());
EXPECT_TRUE(animationNode->isCurrent());
EXPECT_TRUE(animationNode->isInEffect());
EXPECT_EQ(0, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->timeFraction());
animationNode->updateInheritedTime(1);
EXPECT_EQ(std::numeric_limits<double>::infinity(), animationNode->activeDurationInternal());
EXPECT_EQ(AnimationEffect::PhaseActive, animationNode->phase());
EXPECT_TRUE(animationNode->isInPlay());
EXPECT_TRUE(animationNode->isCurrent());
EXPECT_TRUE(animationNode->isInEffect());
EXPECT_EQ(0, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, InfiniteDurationZeroPlaybackRate)
{
Timing timing;
timing.iterationDuration = std::numeric_limits<double>::infinity();
timing.playbackRate = 0;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
EXPECT_EQ(0, animationNode->computedTiming().startTime());
animationNode->updateInheritedTime(0);
EXPECT_EQ(std::numeric_limits<double>::infinity(), animationNode->activeDurationInternal());
EXPECT_EQ(AnimationEffect::PhaseActive, animationNode->phase());
EXPECT_TRUE(animationNode->isInPlay());
EXPECT_TRUE(animationNode->isCurrent());
EXPECT_TRUE(animationNode->isInEffect());
EXPECT_EQ(0, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->timeFraction());
animationNode->updateInheritedTime(std::numeric_limits<double>::infinity());
EXPECT_EQ(std::numeric_limits<double>::infinity(), animationNode->activeDurationInternal());
EXPECT_EQ(AnimationEffect::PhaseAfter, animationNode->phase());
EXPECT_FALSE(animationNode->isInPlay());
EXPECT_FALSE(animationNode->isCurrent());
EXPECT_TRUE(animationNode->isInEffect());
EXPECT_EQ(0, animationNode->currentIteration());
EXPECT_EQ(0, animationNode->timeFraction());
}
TEST(AnimationAnimationEffectTest, EndTime)
{
Timing timing;
timing.startDelay = 1;
timing.endDelay = 2;
timing.iterationDuration = 4;
timing.iterationCount = 2;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
EXPECT_EQ(11, animationNode->endTimeInternal());
}
TEST(AnimationAnimationEffectTest, Events)
{
Timing timing;
timing.iterationDuration = 1;
timing.fillMode = Timing::FillModeForwards;
timing.iterationCount = 2;
timing.startDelay = 1;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(0.0, TimingUpdateOnDemand);
EXPECT_FALSE(animationNode->eventDelegate()->eventTriggered());
animationNode->updateInheritedTime(0.0, TimingUpdateForAnimationFrame);
EXPECT_TRUE(animationNode->eventDelegate()->eventTriggered());
animationNode->updateInheritedTime(1.5, TimingUpdateOnDemand);
EXPECT_FALSE(animationNode->eventDelegate()->eventTriggered());
animationNode->updateInheritedTime(1.5, TimingUpdateForAnimationFrame);
EXPECT_TRUE(animationNode->eventDelegate()->eventTriggered());
}
TEST(AnimationAnimationEffectTest, TimeToEffectChange)
{
Timing timing;
timing.iterationDuration = 1;
timing.fillMode = Timing::FillModeForwards;
timing.iterationStart = 0.2;
timing.iterationCount = 2.5;
timing.startDelay = 1;
timing.direction = Timing::PlaybackDirectionAlternate;
TestAnimationEffect* animationNode = TestAnimationEffect::create(timing);
animationNode->updateInheritedTime(0);
EXPECT_EQ(0, animationNode->takeLocalTime());
EXPECT_TRUE(std::isinf(animationNode->takeTimeToNextIteration()));
// Normal iteration.
animationNode->updateInheritedTime(1.75);
EXPECT_EQ(1.75, animationNode->takeLocalTime());
EXPECT_NEAR(0.05, animationNode->takeTimeToNextIteration(), 0.000000000000001);
// Reverse iteration.
animationNode->updateInheritedTime(2.75);
EXPECT_EQ(2.75, animationNode->takeLocalTime());
EXPECT_NEAR(0.05, animationNode->takeTimeToNextIteration(), 0.000000000000001);
// Item ends before iteration finishes.
animationNode->updateInheritedTime(3.4);
EXPECT_EQ(AnimationEffect::PhaseActive, animationNode->phase());
EXPECT_EQ(3.4, animationNode->takeLocalTime());
EXPECT_TRUE(std::isinf(animationNode->takeTimeToNextIteration()));
// Item has finished.
animationNode->updateInheritedTime(3.5);
EXPECT_EQ(AnimationEffect::PhaseAfter, animationNode->phase());
EXPECT_EQ(3.5, animationNode->takeLocalTime());
EXPECT_TRUE(std::isinf(animationNode->takeTimeToNextIteration()));
}
} // namespace blink
| bsd-3-clause |
scheib/chromium | chrome/android/javatests/src/org/chromium/chrome/browser/offlinepages/prefetch/PrefetchTestBridge.java | 1460 | // Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.offlinepages.prefetch;
import org.chromium.base.annotations.JNINamespace;
/** Prefetch test Java to native bridge. */
@JNINamespace("offline_pages::prefetch")
public class PrefetchTestBridge {
public static void enableLimitlessPrefetching(boolean enabled) {
nativeEnableLimitlessPrefetching(enabled);
}
public static boolean isLimitlessPrefetchingEnabled() {
return nativeIsLimitlessPrefetchingEnabled();
}
public static void insertIntoCachedImageFetcher(String url, byte[] imageData) {
nativeInsertIntoCachedImageFetcher(url, imageData);
}
public static void addCandidatePrefetchURL(String url, String title, String thumbnailUrl,
String faviconUrl, String snippet, String attribution) {
nativeAddCandidatePrefetchURL(url, title, thumbnailUrl, faviconUrl, snippet, attribution);
}
static native void nativeEnableLimitlessPrefetching(boolean enabled);
static native boolean nativeIsLimitlessPrefetchingEnabled();
static native void nativeInsertIntoCachedImageFetcher(String url, byte[] imageData);
static native void nativeAddCandidatePrefetchURL(String url, String title, String thumbnailUrl,
String faviconUrl, String snippet, String attribution);
}
| bsd-3-clause |
aaron-goshine/react | src/renderers/dom/stack/client/wrappers/ReactDOMSelect.js | 5936 | /**
* Copyright 2013-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule ReactDOMSelect
*/
'use strict';
var ReactControlledValuePropTypes = require('ReactControlledValuePropTypes');
var ReactDOMComponentTree = require('ReactDOMComponentTree');
var warning = require('warning');
var didWarnValueDefaultValue = false;
function getDeclarationErrorAddendum(owner) {
if (owner) {
var name = owner.getName();
if (name) {
return ' Check the render method of `' + name + '`.';
}
}
return '';
}
var valuePropNames = ['value', 'defaultValue'];
/**
* Validation function for `value` and `defaultValue`.
* @private
*/
function checkSelectPropTypes(inst, props) {
var owner = inst._currentElement._owner;
ReactControlledValuePropTypes.checkPropTypes(
'select',
props,
owner ? owner.getName() : null
);
for (var i = 0; i < valuePropNames.length; i++) {
var propName = valuePropNames[i];
if (props[propName] == null) {
continue;
}
var isArray = Array.isArray(props[propName]);
if (props.multiple && !isArray) {
warning(
false,
'The `%s` prop supplied to <select> must be an array if ' +
'`multiple` is true.%s',
propName,
getDeclarationErrorAddendum(owner)
);
} else if (!props.multiple && isArray) {
warning(
false,
'The `%s` prop supplied to <select> must be a scalar ' +
'value if `multiple` is false.%s',
propName,
getDeclarationErrorAddendum(owner)
);
}
}
}
/**
* @param {ReactDOMComponent} inst
* @param {boolean} multiple
* @param {*} propValue A stringable (with `multiple`, a list of stringables).
* @private
*/
function updateOptions(inst, multiple, propValue) {
var options = ReactDOMComponentTree.getNodeFromInstance(inst).options;
if (multiple) {
let selectedValue = {};
for (let i = 0; i < propValue.length; i++) {
selectedValue['' + propValue[i]] = true;
}
for (let i = 0; i < options.length; i++) {
var selected = selectedValue.hasOwnProperty(options[i].value);
if (options[i].selected !== selected) {
options[i].selected = selected;
}
}
} else {
// Do not set `select.value` as exact behavior isn't consistent across all
// browsers for all cases.
let selectedValue = '' + propValue;
for (let i = 0; i < options.length; i++) {
if (options[i].value === selectedValue) {
options[i].selected = true;
return;
}
}
if (options.length) {
options[0].selected = true;
}
}
}
/**
* Implements a <select> host component that allows optionally setting the
* props `value` and `defaultValue`. If `multiple` is false, the prop must be a
* stringable. If `multiple` is true, the prop must be an array of stringables.
*
* If `value` is not supplied (or null/undefined), user actions that change the
* selected option will trigger updates to the rendered options.
*
* If it is supplied (and not null/undefined), the rendered options will not
* update in response to user actions. Instead, the `value` prop must change in
* order for the rendered options to update.
*
* If `defaultValue` is provided, any options with the supplied values will be
* selected.
*/
var ReactDOMSelect = {
getHostProps: function(inst, props) {
return Object.assign({}, props, {
value: undefined,
});
},
mountWrapper: function(inst, props) {
if (__DEV__) {
checkSelectPropTypes(inst, props);
}
var value = props.value;
inst._wrapperState = {
initialValue: value != null ? value : props.defaultValue,
listeners: null,
wasMultiple: Boolean(props.multiple),
};
if (
props.value !== undefined &&
props.defaultValue !== undefined &&
!didWarnValueDefaultValue
) {
warning(
false,
'Select elements must be either controlled or uncontrolled ' +
'(specify either the value prop, or the defaultValue prop, but not ' +
'both). Decide between using a controlled or uncontrolled select ' +
'element and remove one of these props. More info: ' +
'https://fb.me/react-controlled-components'
);
didWarnValueDefaultValue = true;
}
},
getSelectValueContext: function(inst) {
// ReactDOMOption looks at this initial value so the initial generated
// markup has correct `selected` attributes
return inst._wrapperState.initialValue;
},
postUpdateWrapper: function(inst) {
var props = inst._currentElement.props;
// After the initial mount, we control selected-ness manually so don't pass
// this value down
inst._wrapperState.initialValue = undefined;
var wasMultiple = inst._wrapperState.wasMultiple;
inst._wrapperState.wasMultiple = Boolean(props.multiple);
var value = props.value;
if (value != null) {
updateOptions(inst, Boolean(props.multiple), value);
} else if (wasMultiple !== Boolean(props.multiple)) {
// For simplicity, reapply `defaultValue` if `multiple` is toggled.
if (props.defaultValue != null) {
updateOptions(inst, Boolean(props.multiple), props.defaultValue);
} else {
// Revert the select back to its default unselected state.
updateOptions(inst, Boolean(props.multiple), props.multiple ? [] : '');
}
}
},
restoreControlledState: function(inst) {
if (inst._rootNodeID) {
var props = inst._currentElement.props;
var value = props.value;
if (value != null) {
updateOptions(inst, Boolean(props.multiple), value);
}
}
},
};
module.exports = ReactDOMSelect;
| bsd-3-clause |
jcabdala/atlantisapp | atlantis/assets/assets/plugins/tables/datatables/tabletools/TableTools.min.js | 29682 | // Simple Set Clipboard System
// Author: Joseph Huckaby
var ZeroClipboard_TableTools={version:"1.0.4-TableTools2",clients:{},moviePath:"",nextId:1,$:function(a){"string"==typeof a&&(a=document.getElementById(a));a.addClass||(a.hide=function(){this.style.display="none"},a.show=function(){this.style.display=""},a.addClass=function(a){this.removeClass(a);this.className+=" "+a},a.removeClass=function(a){this.className=this.className.replace(RegExp("\\s*"+a+"\\s*")," ").replace(/^\s+/,"").replace(/\s+$/,"")},a.hasClass=function(a){return!!this.className.match(RegExp("\\s*"+
a+"\\s*"))});return a},setMoviePath:function(a){this.moviePath=a},dispatch:function(a,b,c){(a=this.clients[a])&&a.receiveEvent(b,c)},register:function(a,b){this.clients[a]=b},getDOMObjectPosition:function(a){var b={left:0,top:0,width:a.width?a.width:a.offsetWidth,height:a.height?a.height:a.offsetHeight};""!=a.style.width&&(b.width=a.style.width.replace("px",""));""!=a.style.height&&(b.height=a.style.height.replace("px",""));for(;a;)b.left+=a.offsetLeft,b.top+=a.offsetTop,a=a.offsetParent;return b},
Client:function(a){this.handlers={};this.id=ZeroClipboard_TableTools.nextId++;this.movieId="ZeroClipboard_TableToolsMovie_"+this.id;ZeroClipboard_TableTools.register(this.id,this);a&&this.glue(a)}};
ZeroClipboard_TableTools.Client.prototype={id:0,ready:!1,movie:null,clipText:"",fileName:"",action:"copy",handCursorEnabled:!0,cssEffects:!0,handlers:null,sized:!1,glue:function(a,b){this.domElement=ZeroClipboard_TableTools.$(a);var c=99;this.domElement.style.zIndex&&(c=parseInt(this.domElement.style.zIndex)+1);var d=ZeroClipboard_TableTools.getDOMObjectPosition(this.domElement);this.div=document.createElement("div");var e=this.div.style;e.position="absolute";e.left="0px";e.top="0px";e.width=d.width+
"px";e.height=d.height+"px";e.zIndex=c;"undefined"!=typeof b&&""!=b&&(this.div.title=b);0!=d.width&&0!=d.height&&(this.sized=!0);this.domElement&&(this.domElement.appendChild(this.div),this.div.innerHTML=this.getHTML(d.width,d.height))},positionElement:function(){var a=ZeroClipboard_TableTools.getDOMObjectPosition(this.domElement),b=this.div.style;b.position="absolute";b.width=a.width+"px";b.height=a.height+"px";0!=a.width&&0!=a.height&&(this.sized=!0,b=this.div.childNodes[0],b.width=a.width,b.height=
a.height)},getHTML:function(a,b){var c="",d="id="+this.id+"&width="+a+"&height="+b;if(navigator.userAgent.match(/MSIE/))var e=location.href.match(/^https/i)?"https://":"http://",c=c+('<object classid="clsid:D27CDB6E-AE6D-11cf-96B8-444553540000" codebase="'+e+'download.macromedia.com/pub/shockwave/cabs/flash/swflash.cab#version=10,0,0,0" width="'+a+'" height="'+b+'" id="'+this.movieId+'" align="middle"><param name="allowScriptAccess" value="always" /><param name="allowFullScreen" value="false" /><param name="movie" value="'+
ZeroClipboard_TableTools.moviePath+'" /><param name="loop" value="false" /><param name="menu" value="false" /><param name="quality" value="best" /><param name="bgcolor" value="#ffffff" /><param name="flashvars" value="'+d+'"/><param name="wmode" value="transparent"/></object>');else c+='<embed id="'+this.movieId+'" src="'+ZeroClipboard_TableTools.moviePath+'" loop="false" menu="false" quality="best" bgcolor="#ffffff" width="'+a+'" height="'+b+'" name="'+this.movieId+'" align="middle" allowScriptAccess="always" allowFullScreen="false" type="application/x-shockwave-flash" pluginspage="http://www.macromedia.com/go/getflashplayer" flashvars="'+
d+'" wmode="transparent" />';return c},hide:function(){this.div&&(this.div.style.left="-2000px")},show:function(){this.reposition()},destroy:function(){if(this.domElement&&this.div){this.hide();this.div.innerHTML="";var a=document.getElementsByTagName("body")[0];try{a.removeChild(this.div)}catch(b){}this.div=this.domElement=null}},reposition:function(a){a&&((this.domElement=ZeroClipboard_TableTools.$(a))||this.hide());if(this.domElement&&this.div){var a=ZeroClipboard_TableTools.getDOMObjectPosition(this.domElement),
b=this.div.style;b.left=""+a.left+"px";b.top=""+a.top+"px"}},clearText:function(){this.clipText="";this.ready&&this.movie.clearText()},appendText:function(a){this.clipText+=a;this.ready&&this.movie.appendText(a)},setText:function(a){this.clipText=a;this.ready&&this.movie.setText(a)},setCharSet:function(a){this.charSet=a;this.ready&&this.movie.setCharSet(a)},setBomInc:function(a){this.incBom=a;this.ready&&this.movie.setBomInc(a)},setFileName:function(a){this.fileName=a;this.ready&&this.movie.setFileName(a)},
setAction:function(a){this.action=a;this.ready&&this.movie.setAction(a)},addEventListener:function(a,b){a=a.toString().toLowerCase().replace(/^on/,"");this.handlers[a]||(this.handlers[a]=[]);this.handlers[a].push(b)},setHandCursor:function(a){this.handCursorEnabled=a;this.ready&&this.movie.setHandCursor(a)},setCSSEffects:function(a){this.cssEffects=!!a},receiveEvent:function(a,b){a=a.toString().toLowerCase().replace(/^on/,"");switch(a){case "load":this.movie=document.getElementById(this.movieId);
if(!this.movie){var c=this;setTimeout(function(){c.receiveEvent("load",null)},1);return}if(!this.ready&&navigator.userAgent.match(/Firefox/)&&navigator.userAgent.match(/Windows/)){c=this;setTimeout(function(){c.receiveEvent("load",null)},100);this.ready=!0;return}this.ready=!0;this.movie.clearText();this.movie.appendText(this.clipText);this.movie.setFileName(this.fileName);this.movie.setAction(this.action);this.movie.setCharSet(this.charSet);this.movie.setBomInc(this.incBom);this.movie.setHandCursor(this.handCursorEnabled);
break;case "mouseover":this.domElement&&this.cssEffects&&this.recoverActive&&this.domElement.addClass("active");break;case "mouseout":this.domElement&&this.cssEffects&&(this.recoverActive=!1,this.domElement.hasClass("active")&&(this.domElement.removeClass("active"),this.recoverActive=!0));break;case "mousedown":this.domElement&&this.cssEffects&&this.domElement.addClass("active");break;case "mouseup":this.domElement&&this.cssEffects&&(this.domElement.removeClass("active"),this.recoverActive=!1)}if(this.handlers[a])for(var d=
0,e=this.handlers[a].length;d<e;d++){var f=this.handlers[a][d];if("function"==typeof f)f(this,b);else if("object"==typeof f&&2==f.length)f[0][f[1]](this,b);else if("string"==typeof f)window[f](this,b)}}};
/*
* File: TableTools.min.js
* Version: 2.1.5
* Author: Allan Jardine (www.sprymedia.co.uk)
*
* Copyright 2009-2012 Allan Jardine, all rights reserved.
*
* This source file is free software, under either the GPL v2 license or a
* BSD (3 point) style license, as supplied with this software.
*
* This source file is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the license files for details.
*/
var TableTools;
(function(e,n,g){TableTools=function(a,b){!this instanceof TableTools&&alert("Warning: TableTools must be initialised with the keyword 'new'");this.s={that:this,dt:a.fnSettings(),print:{saveStart:-1,saveLength:-1,saveScroll:-1,funcEnd:function(){}},buttonCounter:0,select:{type:"",selected:[],preRowSelect:null,postSelected:null,postDeselected:null,all:!1,selectedClass:""},custom:{},swfPath:"",buttonSet:[],master:!1,tags:{}};this.dom={container:null,table:null,print:{hidden:[],message:null},collection:{collection:null,
background:null}};this.classes=e.extend(!0,{},TableTools.classes);this.s.dt.bJUI&&e.extend(!0,this.classes,TableTools.classes_themeroller);this.fnSettings=function(){return this.s};"undefined"==typeof b&&(b={});this._fnConstruct(b);return this};TableTools.prototype={fnGetSelected:function(a){var b=[],c=this.s.dt.aoData,d=this.s.dt.aiDisplay,f;if(a){a=0;for(f=d.length;a<f;a++)c[d[a]]._DTTT_selected&&b.push(c[d[a]].nTr)}else{a=0;for(f=c.length;a<f;a++)c[a]._DTTT_selected&&b.push(c[a].nTr)}return b},
fnGetSelectedData:function(){var a=[],b=this.s.dt.aoData,c,d;c=0;for(d=b.length;c<d;c++)b[c]._DTTT_selected&&a.push(this.s.dt.oInstance.fnGetData(c));return a},fnIsSelected:function(a){a=this.s.dt.oInstance.fnGetPosition(a);return!0===this.s.dt.aoData[a]._DTTT_selected?!0:!1},fnSelectAll:function(a){var b=this._fnGetMasterSettings();this._fnRowSelect(!0===a?b.dt.aiDisplay:b.dt.aoData)},fnSelectNone:function(a){this._fnGetMasterSettings();this._fnRowDeselect(this.fnGetSelected(a))},fnSelect:function(a){"single"==
this.s.select.type?(this.fnSelectNone(),this._fnRowSelect(a)):"multi"==this.s.select.type&&this._fnRowSelect(a)},fnDeselect:function(a){this._fnRowDeselect(a)},fnGetTitle:function(a){var b="";"undefined"!=typeof a.sTitle&&""!==a.sTitle?b=a.sTitle:(a=g.getElementsByTagName("title"),0<a.length&&(b=a[0].innerHTML));return 4>"\u00a1".toString().length?b.replace(/[^a-zA-Z0-9_\u00A1-\uFFFF\.,\-_ !\(\)]/g,""):b.replace(/[^a-zA-Z0-9_\.,\-_ !\(\)]/g,"")},fnCalcColRatios:function(a){var b=this.s.dt.aoColumns,
a=this._fnColumnTargets(a.mColumns),c=[],d=0,f=0,e,g;e=0;for(g=a.length;e<g;e++)a[e]&&(d=b[e].nTh.offsetWidth,f+=d,c.push(d));e=0;for(g=c.length;e<g;e++)c[e]/=f;return c.join("\t")},fnGetTableData:function(a){if(this.s.dt)return this._fnGetDataTablesData(a)},fnSetText:function(a,b){this._fnFlashSetText(a,b)},fnResizeButtons:function(){for(var a in ZeroClipboard_TableTools.clients)if(a){var b=ZeroClipboard_TableTools.clients[a];"undefined"!=typeof b.domElement&&b.domElement.parentNode&&b.positionElement()}},
fnResizeRequired:function(){for(var a in ZeroClipboard_TableTools.clients)if(a){var b=ZeroClipboard_TableTools.clients[a];if("undefined"!=typeof b.domElement&&b.domElement.parentNode==this.dom.container&&!1===b.sized)return!0}return!1},fnPrint:function(a,b){void 0===b&&(b={});void 0===a||a?this._fnPrintStart(b):this._fnPrintEnd()},fnInfo:function(a,b){var c=g.createElement("div");c.className=this.classes.print.info;c.innerHTML=a;g.body.appendChild(c);setTimeout(function(){e(c).fadeOut("normal",function(){g.body.removeChild(c)})},
b)},_fnConstruct:function(a){var b=this;this._fnCustomiseSettings(a);this.dom.container=g.createElement(this.s.tags.container);this.dom.container.className=this.classes.container;"none"!=this.s.select.type&&this._fnRowSelectConfig();this._fnButtonDefinations(this.s.buttonSet,this.dom.container);this.s.dt.aoDestroyCallback.push({sName:"TableTools",fn:function(){e(b.s.dt.nTBody).off("click.DTTT_Select","tr");e(b.dom.container).empty()}})},_fnCustomiseSettings:function(a){"undefined"==typeof this.s.dt._TableToolsInit&&
(this.s.master=!0,this.s.dt._TableToolsInit=!0);this.dom.table=this.s.dt.nTable;this.s.custom=e.extend({},TableTools.DEFAULTS,a);this.s.swfPath=this.s.custom.sSwfPath;"undefined"!=typeof ZeroClipboard_TableTools&&(ZeroClipboard_TableTools.moviePath=this.s.swfPath);this.s.select.type=this.s.custom.sRowSelect;this.s.select.preRowSelect=this.s.custom.fnPreRowSelect;this.s.select.postSelected=this.s.custom.fnRowSelected;this.s.select.postDeselected=this.s.custom.fnRowDeselected;this.s.custom.sSelectedClass&&
(this.classes.select.row=this.s.custom.sSelectedClass);this.s.tags=this.s.custom.oTags;this.s.buttonSet=this.s.custom.aButtons},_fnButtonDefinations:function(a,b){for(var c,d=0,f=a.length;d<f;d++){if("string"==typeof a[d]){if("undefined"==typeof TableTools.BUTTONS[a[d]]){alert("TableTools: Warning - unknown button type: "+a[d]);continue}c=e.extend({},TableTools.BUTTONS[a[d]],!0)}else{if("undefined"==typeof TableTools.BUTTONS[a[d].sExtends]){alert("TableTools: Warning - unknown button type: "+a[d].sExtends);
continue}c=e.extend({},TableTools.BUTTONS[a[d].sExtends],!0);c=e.extend(c,a[d],!0)}b.appendChild(this._fnCreateButton(c,e(b).hasClass(this.classes.collection.container)))}},_fnCreateButton:function(a,b){var c=this._fnButtonBase(a,b);a.sAction.match(/flash/)?this._fnFlashConfig(c,a):"text"==a.sAction?this._fnTextConfig(c,a):"div"==a.sAction?this._fnTextConfig(c,a):"collection"==a.sAction&&(this._fnTextConfig(c,a),this._fnCollectionConfig(c,a));return c},_fnButtonBase:function(a,b){var c,d,f;b?(c="default"!==
a.sTag?a.sTag:this.s.tags.collection.button,d="default"!==a.sLinerTag?a.sLiner:this.s.tags.collection.liner,f=this.classes.collection.buttons.normal):(c="default"!==a.sTag?a.sTag:this.s.tags.button,d="default"!==a.sLinerTag?a.sLiner:this.s.tags.liner,f=this.classes.buttons.normal);c=g.createElement(c);d=g.createElement(d);var e=this._fnGetMasterSettings();c.className=f+" "+a.sButtonClass;c.setAttribute("id","ToolTables_"+this.s.dt.sInstance+"_"+e.buttonCounter);c.appendChild(d);d.innerHTML=a.sButtonText;
e.buttonCounter++;return c},_fnGetMasterSettings:function(){if(this.s.master)return this.s;for(var a=TableTools._aInstances,b=0,c=a.length;b<c;b++)if(this.dom.table==a[b].s.dt.nTable)return a[b].s},_fnCollectionConfig:function(a,b){var c=g.createElement(this.s.tags.collection.container);c.style.display="none";c.className=this.classes.collection.container;b._collection=c;g.body.appendChild(c);this._fnButtonDefinations(b.aButtons,c)},_fnCollectionShow:function(a,b){var c=this,d=e(a).offset(),f=b._collection,
j=d.left,d=d.top+e(a).outerHeight(),m=e(n).height(),h=e(g).height(),k=e(n).width(),o=e(g).width();f.style.position="absolute";f.style.left=j+"px";f.style.top=d+"px";f.style.display="block";e(f).css("opacity",0);var l=g.createElement("div");l.style.position="absolute";l.style.left="0px";l.style.top="0px";l.style.height=(m>h?m:h)+"px";l.style.width=(k>o?k:o)+"px";l.className=this.classes.collection.background;e(l).css("opacity",0);g.body.appendChild(l);g.body.appendChild(f);m=e(f).outerWidth();k=e(f).outerHeight();
j+m>o&&(f.style.left=o-m+"px");d+k>h&&(f.style.top=d-k-e(a).outerHeight()+"px");this.dom.collection.collection=f;this.dom.collection.background=l;setTimeout(function(){e(f).animate({opacity:1},500);e(l).animate({opacity:0.25},500)},10);this.fnResizeButtons();e(l).click(function(){c._fnCollectionHide.call(c,null,null)})},_fnCollectionHide:function(a,b){!(null!==b&&"collection"==b.sExtends)&&null!==this.dom.collection.collection&&(e(this.dom.collection.collection).animate({opacity:0},500,function(){this.style.display=
"none"}),e(this.dom.collection.background).animate({opacity:0},500,function(){this.parentNode.removeChild(this)}),this.dom.collection.collection=null,this.dom.collection.background=null)},_fnRowSelectConfig:function(){if(this.s.master){var a=this,b=this.s.dt;e(b.nTable).addClass(this.classes.select.table);e(b.nTBody).on("click.DTTT_Select","tr",function(c){this.parentNode==b.nTBody&&null!==b.oInstance.fnGetData(this)&&(a.fnIsSelected(this)?a._fnRowDeselect(this,c):"single"==a.s.select.type?(a.fnSelectNone(),
a._fnRowSelect(this,c)):"multi"==a.s.select.type&&a._fnRowSelect(this,c))});b.oApi._fnCallbackReg(b,"aoRowCreatedCallback",function(c,d,f){b.aoData[f]._DTTT_selected&&e(c).addClass(a.classes.select.row)},"TableTools-SelectAll")}},_fnRowSelect:function(a,b){var c=this._fnSelectData(a),d=[],f,j;f=0;for(j=c.length;f<j;f++)c[f].nTr&&d.push(c[f].nTr);if(null===this.s.select.preRowSelect||this.s.select.preRowSelect.call(this,b,d,!0)){f=0;for(j=c.length;f<j;f++)c[f]._DTTT_selected=!0,c[f].nTr&&e(c[f].nTr).addClass(this.classes.select.row);
null!==this.s.select.postSelected&&this.s.select.postSelected.call(this,d);TableTools._fnEventDispatch(this,"select",d,!0)}},_fnRowDeselect:function(a,b){var c=this._fnSelectData(a),d=[],f,j;f=0;for(j=c.length;f<j;f++)c[f].nTr&&d.push(c[f].nTr);if(null===this.s.select.preRowSelect||this.s.select.preRowSelect.call(this,b,d,!1)){f=0;for(j=c.length;f<j;f++)c[f]._DTTT_selected=!1,c[f].nTr&&e(c[f].nTr).removeClass(this.classes.select.row);null!==this.s.select.postDeselected&&this.s.select.postDeselected.call(this,
d);TableTools._fnEventDispatch(this,"select",d,!1)}},_fnSelectData:function(a){var b=[],c,d,f;if(a.nodeName)c=this.s.dt.oInstance.fnGetPosition(a),b.push(this.s.dt.aoData[c]);else if("undefined"!==typeof a.length){d=0;for(f=a.length;d<f;d++)a[d].nodeName?(c=this.s.dt.oInstance.fnGetPosition(a[d]),b.push(this.s.dt.aoData[c])):"number"===typeof a[d]?b.push(this.s.dt.aoData[a[d]]):b.push(a[d])}else b.push(a);return b},_fnTextConfig:function(a,b){var c=this;null!==b.fnInit&&b.fnInit.call(this,a,b);""!==
b.sToolTip&&(a.title=b.sToolTip);e(a).hover(function(){b.fnMouseover!==null&&b.fnMouseover.call(this,a,b,null)},function(){b.fnMouseout!==null&&b.fnMouseout.call(this,a,b,null)});null!==b.fnSelect&&TableTools._fnEventListen(this,"select",function(d){b.fnSelect.call(c,a,b,d)});e(a).click(function(d){b.fnClick!==null&&b.fnClick.call(c,a,b,null,d);b.fnComplete!==null&&b.fnComplete.call(c,a,b,null,null);c._fnCollectionHide(a,b)})},_fnFlashConfig:function(a,b){var c=this,d=new ZeroClipboard_TableTools.Client;
null!==b.fnInit&&b.fnInit.call(this,a,b);d.setHandCursor(!0);"flash_save"==b.sAction?(d.setAction("save"),d.setCharSet("utf16le"==b.sCharSet?"UTF16LE":"UTF8"),d.setBomInc(b.bBomInc),d.setFileName(b.sFileName.replace("*",this.fnGetTitle(b)))):"flash_pdf"==b.sAction?(d.setAction("pdf"),d.setFileName(b.sFileName.replace("*",this.fnGetTitle(b)))):d.setAction("copy");d.addEventListener("mouseOver",function(){b.fnMouseover!==null&&b.fnMouseover.call(c,a,b,d)});d.addEventListener("mouseOut",function(){b.fnMouseout!==
null&&b.fnMouseout.call(c,a,b,d)});d.addEventListener("mouseDown",function(){b.fnClick!==null&&b.fnClick.call(c,a,b,d)});d.addEventListener("complete",function(f,e){b.fnComplete!==null&&b.fnComplete.call(c,a,b,d,e);c._fnCollectionHide(a,b)});this._fnFlashGlue(d,a,b.sToolTip)},_fnFlashGlue:function(a,b,c){var d=this,f=b.getAttribute("id");g.getElementById(f)?a.glue(b,c):setTimeout(function(){d._fnFlashGlue(a,b,c)},100)},_fnFlashSetText:function(a,b){var c=this._fnChunkData(b,8192);a.clearText();for(var d=
0,f=c.length;d<f;d++)a.appendText(c[d])},_fnColumnTargets:function(a){var b=[],c=this.s.dt;if("object"==typeof a){i=0;for(iLen=c.aoColumns.length;i<iLen;i++)b.push(!1);i=0;for(iLen=a.length;i<iLen;i++)b[a[i]]=!0}else if("visible"==a){i=0;for(iLen=c.aoColumns.length;i<iLen;i++)b.push(c.aoColumns[i].bVisible?!0:!1)}else if("hidden"==a){i=0;for(iLen=c.aoColumns.length;i<iLen;i++)b.push(c.aoColumns[i].bVisible?!1:!0)}else if("sortable"==a){i=0;for(iLen=c.aoColumns.length;i<iLen;i++)b.push(c.aoColumns[i].bSortable?
!0:!1)}else{i=0;for(iLen=c.aoColumns.length;i<iLen;i++)b.push(!0)}return b},_fnNewline:function(a){return"auto"==a.sNewLine?navigator.userAgent.match(/Windows/)?"\r\n":"\n":a.sNewLine},_fnGetDataTablesData:function(a){var b,c,d,f,j,g=[],h="",k=this.s.dt,o,l=RegExp(a.sFieldBoundary,"g"),n=this._fnColumnTargets(a.mColumns);d="undefined"!=typeof a.bSelectedOnly?a.bSelectedOnly:!1;if(a.bHeader){j=[];b=0;for(c=k.aoColumns.length;b<c;b++)n[b]&&(h=k.aoColumns[b].sTitle.replace(/\n/g," ").replace(/<.*?>/g,
"").replace(/^\s+|\s+$/g,""),h=this._fnHtmlDecode(h),j.push(this._fnBoundData(h,a.sFieldBoundary,l)));g.push(j.join(a.sFieldSeperator))}var p=k.aiDisplay;f=this.fnGetSelected();if("none"!==this.s.select.type&&d&&0!==f.length){p=[];b=0;for(c=f.length;b<c;b++)p.push(k.oInstance.fnGetPosition(f[b]))}d=0;for(f=p.length;d<f;d++){o=k.aoData[p[d]].nTr;j=[];b=0;for(c=k.aoColumns.length;b<c;b++)n[b]&&(h=k.oApi._fnGetCellData(k,p[d],b,"display"),a.fnCellRender?h=a.fnCellRender(h,b,o,p[d])+"":"string"==typeof h?
(h=h.replace(/\n/g," "),h=h.replace(/<img.*?\s+alt\s*=\s*(?:"([^"]+)"|'([^']+)'|([^\s>]+)).*?>/gi,"$1$2$3"),h=h.replace(/<.*?>/g,"")):h+="",h=h.replace(/^\s+/,"").replace(/\s+$/,""),h=this._fnHtmlDecode(h),j.push(this._fnBoundData(h,a.sFieldBoundary,l)));g.push(j.join(a.sFieldSeperator));a.bOpenRows&&(b=e.grep(k.aoOpenRows,function(a){return a.nParent===o}),1===b.length&&(h=this._fnBoundData(e("td",b[0].nTr).html(),a.sFieldBoundary,l),g.push(h)))}if(a.bFooter&&null!==k.nTFoot){j=[];b=0;for(c=k.aoColumns.length;b<
c;b++)n[b]&&null!==k.aoColumns[b].nTf&&(h=k.aoColumns[b].nTf.innerHTML.replace(/\n/g," ").replace(/<.*?>/g,""),h=this._fnHtmlDecode(h),j.push(this._fnBoundData(h,a.sFieldBoundary,l)));g.push(j.join(a.sFieldSeperator))}return _sLastData=g.join(this._fnNewline(a))},_fnBoundData:function(a,b,c){return""===b?a:b+a.replace(c,b+b)+b},_fnChunkData:function(a,b){for(var c=[],d=a.length,f=0;f<d;f+=b)f+b<d?c.push(a.substring(f,f+b)):c.push(a.substring(f,d));return c},_fnHtmlDecode:function(a){if(-1===a.indexOf("&"))return a;
var b=g.createElement("div");return a.replace(/&([^\s]*);/g,function(a,d){if("#"===a.substr(1,1))return String.fromCharCode(Number(d.substr(1)));b.innerHTML=a;return b.childNodes[0].nodeValue})},_fnPrintStart:function(a){var b=this,c=this.s.dt;this._fnPrintHideNodes(c.nTable);this.s.print.saveStart=c._iDisplayStart;this.s.print.saveLength=c._iDisplayLength;a.bShowAll&&(c._iDisplayStart=0,c._iDisplayLength=-1,c.oApi._fnCalculateEnd(c),c.oApi._fnDraw(c));if(""!==c.oScroll.sX||""!==c.oScroll.sY)this._fnPrintScrollStart(c),
e(this.s.dt.nTable).bind("draw.DTTT_Print",function(){b._fnPrintScrollStart(c)});var d=c.aanFeatures,f;for(f in d)if("i"!=f&&"t"!=f&&1==f.length)for(var j=0,m=d[f].length;j<m;j++)this.dom.print.hidden.push({node:d[f][j],display:"block"}),d[f][j].style.display="none";e(g.body).addClass(this.classes.print.body);""!==a.sInfo&&this.fnInfo(a.sInfo,3E3);a.sMessage&&(this.dom.print.message=g.createElement("div"),this.dom.print.message.className=this.classes.print.message,this.dom.print.message.innerHTML=
a.sMessage,g.body.insertBefore(this.dom.print.message,g.body.childNodes[0]));this.s.print.saveScroll=e(n).scrollTop();n.scrollTo(0,0);e(g).bind("keydown.DTTT",function(a){if(a.keyCode==27){a.preventDefault();b._fnPrintEnd.call(b,a)}})},_fnPrintEnd:function(){var a=this.s.dt,b=this.s.print,c=this.dom.print;this._fnPrintShowNodes();if(""!==a.oScroll.sX||""!==a.oScroll.sY)e(this.s.dt.nTable).unbind("draw.DTTT_Print"),this._fnPrintScrollEnd();n.scrollTo(0,b.saveScroll);null!==c.message&&(g.body.removeChild(c.message),
c.message=null);e(g.body).removeClass("DTTT_Print");a._iDisplayStart=b.saveStart;a._iDisplayLength=b.saveLength;a.oApi._fnCalculateEnd(a);a.oApi._fnDraw(a);e(g).unbind("keydown.DTTT")},_fnPrintScrollStart:function(){var a=this.s.dt;a.nScrollHead.getElementsByTagName("div")[0].getElementsByTagName("table");var b=a.nTable.parentNode,c=a.nTable.getElementsByTagName("thead");0<c.length&&a.nTable.removeChild(c[0]);null!==a.nTFoot&&(c=a.nTable.getElementsByTagName("tfoot"),0<c.length&&a.nTable.removeChild(c[0]));
c=a.nTHead.cloneNode(!0);a.nTable.insertBefore(c,a.nTable.childNodes[0]);null!==a.nTFoot&&(c=a.nTFoot.cloneNode(!0),a.nTable.insertBefore(c,a.nTable.childNodes[1]));""!==a.oScroll.sX&&(a.nTable.style.width=e(a.nTable).outerWidth()+"px",b.style.width=e(a.nTable).outerWidth()+"px",b.style.overflow="visible");""!==a.oScroll.sY&&(b.style.height=e(a.nTable).outerHeight()+"px",b.style.overflow="visible")},_fnPrintScrollEnd:function(){var a=this.s.dt,b=a.nTable.parentNode;""!==a.oScroll.sX&&(b.style.width=
a.oApi._fnStringToCss(a.oScroll.sX),b.style.overflow="auto");""!==a.oScroll.sY&&(b.style.height=a.oApi._fnStringToCss(a.oScroll.sY),b.style.overflow="auto")},_fnPrintShowNodes:function(){for(var a=this.dom.print.hidden,b=0,c=a.length;b<c;b++)a[b].node.style.display=a[b].display;a.splice(0,a.length)},_fnPrintHideNodes:function(a){for(var b=this.dom.print.hidden,c=a.parentNode,d=c.childNodes,f=0,g=d.length;f<g;f++)if(d[f]!=a&&1==d[f].nodeType){var m=e(d[f]).css("display");"none"!=m&&(b.push({node:d[f],
display:m}),d[f].style.display="none")}"BODY"!=c.nodeName&&this._fnPrintHideNodes(c)}};TableTools._aInstances=[];TableTools._aListeners=[];TableTools.fnGetMasters=function(){for(var a=[],b=0,c=TableTools._aInstances.length;b<c;b++)TableTools._aInstances[b].s.master&&a.push(TableTools._aInstances[b]);return a};TableTools.fnGetInstance=function(a){"object"!=typeof a&&(a=g.getElementById(a));for(var b=0,c=TableTools._aInstances.length;b<c;b++)if(TableTools._aInstances[b].s.master&&TableTools._aInstances[b].dom.table==
a)return TableTools._aInstances[b];return null};TableTools._fnEventListen=function(a,b,c){TableTools._aListeners.push({that:a,type:b,fn:c})};TableTools._fnEventDispatch=function(a,b,c,d){for(var f=TableTools._aListeners,e=0,g=f.length;e<g;e++)a.dom.table==f[e].that.dom.table&&f[e].type==b&&f[e].fn(c,d)};TableTools.buttonBase={sAction:"text",sTag:"default",sLinerTag:"default",sButtonClass:"DTTT_button_text",sButtonText:"Button text",sTitle:"",sToolTip:"",sCharSet:"utf8",bBomInc:!1,sFileName:"*.csv",
sFieldBoundary:"",sFieldSeperator:"\t",sNewLine:"auto",mColumns:"all",bHeader:!0,bFooter:!0,bOpenRows:!1,bSelectedOnly:!1,fnMouseover:null,fnMouseout:null,fnClick:null,fnSelect:null,fnComplete:null,fnInit:null,fnCellRender:null};TableTools.BUTTONS={csv:e.extend({},TableTools.buttonBase,{sAction:"flash_save",sButtonClass:"DTTT_button_csv",sButtonText:"CSV",sFieldBoundary:'"',sFieldSeperator:",",fnClick:function(a,b,c){this.fnSetText(c,this.fnGetTableData(b))}}),xls:e.extend({},TableTools.buttonBase,
{sAction:"flash_save",sCharSet:"utf16le",bBomInc:!0,sButtonClass:"DTTT_button_xls",sButtonText:"Excel",fnClick:function(a,b,c){this.fnSetText(c,this.fnGetTableData(b))}}),copy:e.extend({},TableTools.buttonBase,{sAction:"flash_copy",sButtonClass:"DTTT_button_copy",sButtonText:"Copy",fnClick:function(a,b,c){this.fnSetText(c,this.fnGetTableData(b))},fnComplete:function(a,b,c,d){a=d.split("\n").length;a=null===this.s.dt.nTFoot?a-1:a-2;this.fnInfo("<h6>Table copied</h6><p>Copied "+a+" row"+(1==a?"":"s")+
" to the clipboard.</p>",1500)}}),pdf:e.extend({},TableTools.buttonBase,{sAction:"flash_pdf",sNewLine:"\n",sFileName:"*.pdf",sButtonClass:"DTTT_button_pdf",sButtonText:"PDF",sPdfOrientation:"portrait",sPdfSize:"A4",sPdfMessage:"",fnClick:function(a,b,c){this.fnSetText(c,"title:"+this.fnGetTitle(b)+"\nmessage:"+b.sPdfMessage+"\ncolWidth:"+this.fnCalcColRatios(b)+"\norientation:"+b.sPdfOrientation+"\nsize:"+b.sPdfSize+"\n--/TableToolsOpts--\n"+this.fnGetTableData(b))}}),print:e.extend({},TableTools.buttonBase,
{sInfo:"<h6>Print view</h6><p>Please use your browser's print function to print this table. Press escape when finished.",sMessage:null,bShowAll:!0,sToolTip:"View print view",sButtonClass:"DTTT_button_print",sButtonText:"Print",fnClick:function(a,b){this.fnPrint(!0,b)}}),text:e.extend({},TableTools.buttonBase),select:e.extend({},TableTools.buttonBase,{sButtonText:"Select button",fnSelect:function(a){0!==this.fnGetSelected().length?e(a).removeClass(this.classes.buttons.disabled):e(a).addClass(this.classes.buttons.disabled)},
fnInit:function(a){e(a).addClass(this.classes.buttons.disabled)}}),select_single:e.extend({},TableTools.buttonBase,{sButtonText:"Select button",fnSelect:function(a){1==this.fnGetSelected().length?e(a).removeClass(this.classes.buttons.disabled):e(a).addClass(this.classes.buttons.disabled)},fnInit:function(a){e(a).addClass(this.classes.buttons.disabled)}}),select_all:e.extend({},TableTools.buttonBase,{sButtonText:"Select all",fnClick:function(){this.fnSelectAll()},fnSelect:function(a){this.fnGetSelected().length==
this.s.dt.fnRecordsDisplay()?e(a).addClass(this.classes.buttons.disabled):e(a).removeClass(this.classes.buttons.disabled)}}),select_none:e.extend({},TableTools.buttonBase,{sButtonText:"Deselect all",fnClick:function(){this.fnSelectNone()},fnSelect:function(a){0!==this.fnGetSelected().length?e(a).removeClass(this.classes.buttons.disabled):e(a).addClass(this.classes.buttons.disabled)},fnInit:function(a){e(a).addClass(this.classes.buttons.disabled)}}),ajax:e.extend({},TableTools.buttonBase,{sAjaxUrl:"/xhr.php",
sButtonText:"Ajax button",fnClick:function(a,b){var c=this.fnGetTableData(b);e.ajax({url:b.sAjaxUrl,data:[{name:"tableData",value:c}],success:b.fnAjaxComplete,dataType:"json",type:"POST",cache:!1,error:function(){alert("Error detected when sending table data to server")}})},fnAjaxComplete:function(){alert("Ajax complete")}}),div:e.extend({},TableTools.buttonBase,{sAction:"div",sTag:"div",sButtonClass:"DTTT_nonbutton",sButtonText:"Text button"}),collection:e.extend({},TableTools.buttonBase,{sAction:"collection",
sButtonClass:"DTTT_button_collection",sButtonText:"Collection",fnClick:function(a,b){this._fnCollectionShow(a,b)}})};TableTools.classes={container:"DTTT_container",buttons:{normal:"DTTT_button",disabled:"DTTT_disabled"},collection:{container:"DTTT_collection",background:"DTTT_collection_background",buttons:{normal:"DTTT_button",disabled:"DTTT_disabled"}},select:{table:"DTTT_selectable",row:"DTTT_selected"},print:{body:"DTTT_Print",info:"DTTT_print_info",message:"DTTT_PrintMessage"}};TableTools.classes_themeroller=
{container:"DTTT_container ui-buttonset ui-buttonset-multi",buttons:{normal:"DTTT_button ui-button ui-state-default"},collection:{container:"DTTT_collection ui-buttonset ui-buttonset-multi"}};TableTools.DEFAULTS={sSwfPath:"media/swf/copy_csv_xls_pdf.swf",sRowSelect:"none",sSelectedClass:null,fnPreRowSelect:null,fnRowSelected:null,fnRowDeselected:null,aButtons:["copy","csv","xls","pdf","print"],oTags:{container:"div",button:"a",liner:"span",collection:{container:"div",button:"a",liner:"span"}}};TableTools.prototype.CLASS=
"TableTools";TableTools.VERSION="2.1.5";TableTools.prototype.VERSION=TableTools.VERSION;"function"==typeof e.fn.dataTable&&"function"==typeof e.fn.dataTableExt.fnVersionCheck&&e.fn.dataTableExt.fnVersionCheck("1.9.0")?e.fn.dataTableExt.aoFeatures.push({fnInit:function(a){a=new TableTools(a.oInstance,"undefined"!=typeof a.oInit.oTableTools?a.oInit.oTableTools:{});TableTools._aInstances.push(a);return a.dom.container},cFeature:"T",sFeature:"TableTools"}):alert("Warning: TableTools 2 requires DataTables 1.9.0 or newer - www.datatables.net/download");
e.fn.DataTable.TableTools=TableTools})(jQuery,window,document);
| bsd-3-clause |
Workday/OpenFrame | printing/metafile_skia_wrapper.cc | 1241 | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "printing/metafile_skia_wrapper.h"
#include "skia/ext/platform_canvas.h"
#include "skia/ext/refptr.h"
#include "third_party/skia/include/core/SkMetaData.h"
namespace printing {
namespace {
const char* kMetafileKey = "CrMetafile";
} // namespace
// static
void MetafileSkiaWrapper::SetMetafileOnCanvas(const SkCanvas& canvas,
PdfMetafileSkia* metafile) {
skia::RefPtr<MetafileSkiaWrapper> wrapper;
if (metafile)
wrapper = skia::AdoptRef(new MetafileSkiaWrapper(metafile));
SkMetaData& meta = skia::GetMetaData(canvas);
meta.setRefCnt(kMetafileKey, wrapper.get());
}
// static
PdfMetafileSkia* MetafileSkiaWrapper::GetMetafileFromCanvas(
const SkCanvas& canvas) {
SkMetaData& meta = skia::GetMetaData(canvas);
SkRefCnt* value;
if (!meta.findRefCnt(kMetafileKey, &value) || !value)
return NULL;
return static_cast<MetafileSkiaWrapper*>(value)->metafile_;
}
MetafileSkiaWrapper::MetafileSkiaWrapper(PdfMetafileSkia* metafile)
: metafile_(metafile) {
}
} // namespace printing
| bsd-3-clause |
amikey/chromium | libcef_dll/ctocpp/dictionary_value_ctocpp.cc | 12823 | // Copyright (c) 2015 The Chromium Embedded Framework Authors. All rights
// reserved. Use of this source code is governed by a BSD-style license that
// can be found in the LICENSE file.
//
// ---------------------------------------------------------------------------
//
// This file was generated by the CEF translator tool. If making changes by
// hand only do so within the body of existing method and function
// implementations. See the translator.README.txt file in the tools directory
// for more information.
//
#include "libcef_dll/ctocpp/binary_value_ctocpp.h"
#include "libcef_dll/ctocpp/dictionary_value_ctocpp.h"
#include "libcef_dll/ctocpp/list_value_ctocpp.h"
#include "libcef_dll/transfer_util.h"
// STATIC METHODS - Body may be edited by hand.
CefRefPtr<CefDictionaryValue> CefDictionaryValue::Create() {
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Execute
cef_dictionary_value_t* _retval = cef_dictionary_value_create();
// Return type: refptr_same
return CefDictionaryValueCToCpp::Wrap(_retval);
}
// VIRTUAL METHODS - Body may be edited by hand.
bool CefDictionaryValueCToCpp::IsValid() {
if (CEF_MEMBER_MISSING(struct_, is_valid))
return false;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Execute
int _retval = struct_->is_valid(struct_);
// Return type: bool
return _retval?true:false;
}
bool CefDictionaryValueCToCpp::IsOwned() {
if (CEF_MEMBER_MISSING(struct_, is_owned))
return false;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Execute
int _retval = struct_->is_owned(struct_);
// Return type: bool
return _retval?true:false;
}
bool CefDictionaryValueCToCpp::IsReadOnly() {
if (CEF_MEMBER_MISSING(struct_, is_read_only))
return false;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Execute
int _retval = struct_->is_read_only(struct_);
// Return type: bool
return _retval?true:false;
}
CefRefPtr<CefDictionaryValue> CefDictionaryValueCToCpp::Copy(
bool exclude_empty_children) {
if (CEF_MEMBER_MISSING(struct_, copy))
return NULL;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Execute
cef_dictionary_value_t* _retval = struct_->copy(struct_,
exclude_empty_children);
// Return type: refptr_same
return CefDictionaryValueCToCpp::Wrap(_retval);
}
size_t CefDictionaryValueCToCpp::GetSize() {
if (CEF_MEMBER_MISSING(struct_, get_size))
return 0;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Execute
size_t _retval = struct_->get_size(struct_);
// Return type: simple
return _retval;
}
bool CefDictionaryValueCToCpp::Clear() {
if (CEF_MEMBER_MISSING(struct_, clear))
return false;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Execute
int _retval = struct_->clear(struct_);
// Return type: bool
return _retval?true:false;
}
bool CefDictionaryValueCToCpp::HasKey(const CefString& key) {
if (CEF_MEMBER_MISSING(struct_, has_key))
return false;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Verify param: key; type: string_byref_const
DCHECK(!key.empty());
if (key.empty())
return false;
// Execute
int _retval = struct_->has_key(struct_,
key.GetStruct());
// Return type: bool
return _retval?true:false;
}
bool CefDictionaryValueCToCpp::GetKeys(KeyList& keys) {
if (CEF_MEMBER_MISSING(struct_, get_keys))
return false;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Translate param: keys; type: string_vec_byref
cef_string_list_t keysList = cef_string_list_alloc();
DCHECK(keysList);
if (keysList)
transfer_string_list_contents(keys, keysList);
// Execute
int _retval = struct_->get_keys(struct_,
keysList);
// Restore param:keys; type: string_vec_byref
if (keysList) {
keys.clear();
transfer_string_list_contents(keysList, keys);
cef_string_list_free(keysList);
}
// Return type: bool
return _retval?true:false;
}
bool CefDictionaryValueCToCpp::Remove(const CefString& key) {
if (CEF_MEMBER_MISSING(struct_, remove))
return false;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Verify param: key; type: string_byref_const
DCHECK(!key.empty());
if (key.empty())
return false;
// Execute
int _retval = struct_->remove(struct_,
key.GetStruct());
// Return type: bool
return _retval?true:false;
}
CefValueType CefDictionaryValueCToCpp::GetType(const CefString& key) {
if (CEF_MEMBER_MISSING(struct_, get_type))
return VTYPE_INVALID;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Verify param: key; type: string_byref_const
DCHECK(!key.empty());
if (key.empty())
return VTYPE_INVALID;
// Execute
cef_value_type_t _retval = struct_->get_type(struct_,
key.GetStruct());
// Return type: simple
return _retval;
}
bool CefDictionaryValueCToCpp::GetBool(const CefString& key) {
if (CEF_MEMBER_MISSING(struct_, get_bool))
return false;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Verify param: key; type: string_byref_const
DCHECK(!key.empty());
if (key.empty())
return false;
// Execute
int _retval = struct_->get_bool(struct_,
key.GetStruct());
// Return type: bool
return _retval?true:false;
}
int CefDictionaryValueCToCpp::GetInt(const CefString& key) {
if (CEF_MEMBER_MISSING(struct_, get_int))
return 0;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Verify param: key; type: string_byref_const
DCHECK(!key.empty());
if (key.empty())
return 0;
// Execute
int _retval = struct_->get_int(struct_,
key.GetStruct());
// Return type: simple
return _retval;
}
double CefDictionaryValueCToCpp::GetDouble(const CefString& key) {
if (CEF_MEMBER_MISSING(struct_, get_double))
return 0;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Verify param: key; type: string_byref_const
DCHECK(!key.empty());
if (key.empty())
return 0;
// Execute
double _retval = struct_->get_double(struct_,
key.GetStruct());
// Return type: simple
return _retval;
}
CefString CefDictionaryValueCToCpp::GetString(const CefString& key) {
if (CEF_MEMBER_MISSING(struct_, get_string))
return CefString();
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Verify param: key; type: string_byref_const
DCHECK(!key.empty());
if (key.empty())
return CefString();
// Execute
cef_string_userfree_t _retval = struct_->get_string(struct_,
key.GetStruct());
// Return type: string
CefString _retvalStr;
_retvalStr.AttachToUserFree(_retval);
return _retvalStr;
}
CefRefPtr<CefBinaryValue> CefDictionaryValueCToCpp::GetBinary(
const CefString& key) {
if (CEF_MEMBER_MISSING(struct_, get_binary))
return NULL;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Verify param: key; type: string_byref_const
DCHECK(!key.empty());
if (key.empty())
return NULL;
// Execute
cef_binary_value_t* _retval = struct_->get_binary(struct_,
key.GetStruct());
// Return type: refptr_same
return CefBinaryValueCToCpp::Wrap(_retval);
}
CefRefPtr<CefDictionaryValue> CefDictionaryValueCToCpp::GetDictionary(
const CefString& key) {
if (CEF_MEMBER_MISSING(struct_, get_dictionary))
return NULL;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Verify param: key; type: string_byref_const
DCHECK(!key.empty());
if (key.empty())
return NULL;
// Execute
cef_dictionary_value_t* _retval = struct_->get_dictionary(struct_,
key.GetStruct());
// Return type: refptr_same
return CefDictionaryValueCToCpp::Wrap(_retval);
}
CefRefPtr<CefListValue> CefDictionaryValueCToCpp::GetList(
const CefString& key) {
if (CEF_MEMBER_MISSING(struct_, get_list))
return NULL;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Verify param: key; type: string_byref_const
DCHECK(!key.empty());
if (key.empty())
return NULL;
// Execute
cef_list_value_t* _retval = struct_->get_list(struct_,
key.GetStruct());
// Return type: refptr_same
return CefListValueCToCpp::Wrap(_retval);
}
bool CefDictionaryValueCToCpp::SetNull(const CefString& key) {
if (CEF_MEMBER_MISSING(struct_, set_null))
return false;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Verify param: key; type: string_byref_const
DCHECK(!key.empty());
if (key.empty())
return false;
// Execute
int _retval = struct_->set_null(struct_,
key.GetStruct());
// Return type: bool
return _retval?true:false;
}
bool CefDictionaryValueCToCpp::SetBool(const CefString& key, bool value) {
if (CEF_MEMBER_MISSING(struct_, set_bool))
return false;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Verify param: key; type: string_byref_const
DCHECK(!key.empty());
if (key.empty())
return false;
// Execute
int _retval = struct_->set_bool(struct_,
key.GetStruct(),
value);
// Return type: bool
return _retval?true:false;
}
bool CefDictionaryValueCToCpp::SetInt(const CefString& key, int value) {
if (CEF_MEMBER_MISSING(struct_, set_int))
return false;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Verify param: key; type: string_byref_const
DCHECK(!key.empty());
if (key.empty())
return false;
// Execute
int _retval = struct_->set_int(struct_,
key.GetStruct(),
value);
// Return type: bool
return _retval?true:false;
}
bool CefDictionaryValueCToCpp::SetDouble(const CefString& key, double value) {
if (CEF_MEMBER_MISSING(struct_, set_double))
return false;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Verify param: key; type: string_byref_const
DCHECK(!key.empty());
if (key.empty())
return false;
// Execute
int _retval = struct_->set_double(struct_,
key.GetStruct(),
value);
// Return type: bool
return _retval?true:false;
}
bool CefDictionaryValueCToCpp::SetString(const CefString& key,
const CefString& value) {
if (CEF_MEMBER_MISSING(struct_, set_string))
return false;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Verify param: key; type: string_byref_const
DCHECK(!key.empty());
if (key.empty())
return false;
// Unverified params: value
// Execute
int _retval = struct_->set_string(struct_,
key.GetStruct(),
value.GetStruct());
// Return type: bool
return _retval?true:false;
}
bool CefDictionaryValueCToCpp::SetBinary(const CefString& key,
CefRefPtr<CefBinaryValue> value) {
if (CEF_MEMBER_MISSING(struct_, set_binary))
return false;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Verify param: key; type: string_byref_const
DCHECK(!key.empty());
if (key.empty())
return false;
// Verify param: value; type: refptr_same
DCHECK(value.get());
if (!value.get())
return false;
// Execute
int _retval = struct_->set_binary(struct_,
key.GetStruct(),
CefBinaryValueCToCpp::Unwrap(value));
// Return type: bool
return _retval?true:false;
}
bool CefDictionaryValueCToCpp::SetDictionary(const CefString& key,
CefRefPtr<CefDictionaryValue> value) {
if (CEF_MEMBER_MISSING(struct_, set_dictionary))
return false;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Verify param: key; type: string_byref_const
DCHECK(!key.empty());
if (key.empty())
return false;
// Verify param: value; type: refptr_same
DCHECK(value.get());
if (!value.get())
return false;
// Execute
int _retval = struct_->set_dictionary(struct_,
key.GetStruct(),
CefDictionaryValueCToCpp::Unwrap(value));
// Return type: bool
return _retval?true:false;
}
bool CefDictionaryValueCToCpp::SetList(const CefString& key,
CefRefPtr<CefListValue> value) {
if (CEF_MEMBER_MISSING(struct_, set_list))
return false;
// AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING
// Verify param: key; type: string_byref_const
DCHECK(!key.empty());
if (key.empty())
return false;
// Verify param: value; type: refptr_same
DCHECK(value.get());
if (!value.get())
return false;
// Execute
int _retval = struct_->set_list(struct_,
key.GetStruct(),
CefListValueCToCpp::Unwrap(value));
// Return type: bool
return _retval?true:false;
}
#ifndef NDEBUG
template<> base::AtomicRefCount CefCToCpp<CefDictionaryValueCToCpp,
CefDictionaryValue, cef_dictionary_value_t>::DebugObjCt = 0;
#endif
| bsd-3-clause |
vsekhar/elastic-go | src/cmd/compile/internal/ssa/passbm_test.go | 3205 | // Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssa
import (
"cmd/compile/internal/types"
"fmt"
"testing"
)
const (
blockCount = 1000
passCount = 15000
)
type passFunc func(*Func)
func BenchmarkDSEPass(b *testing.B) { benchFnPass(b, dse, blockCount, genFunction) }
func BenchmarkDSEPassBlock(b *testing.B) { benchFnBlock(b, dse, genFunction) }
func BenchmarkCSEPass(b *testing.B) { benchFnPass(b, cse, blockCount, genFunction) }
func BenchmarkCSEPassBlock(b *testing.B) { benchFnBlock(b, cse, genFunction) }
func BenchmarkDeadcodePass(b *testing.B) { benchFnPass(b, deadcode, blockCount, genFunction) }
func BenchmarkDeadcodePassBlock(b *testing.B) { benchFnBlock(b, deadcode, genFunction) }
func multi(f *Func) {
cse(f)
dse(f)
deadcode(f)
}
func BenchmarkMultiPass(b *testing.B) { benchFnPass(b, multi, blockCount, genFunction) }
func BenchmarkMultiPassBlock(b *testing.B) { benchFnBlock(b, multi, genFunction) }
// benchFnPass runs passFunc b.N times across a single function.
func benchFnPass(b *testing.B, fn passFunc, size int, bg blockGen) {
b.ReportAllocs()
c := testConfig(b)
fun := c.Fun("entry", bg(size)...)
CheckFunc(fun.f)
b.ResetTimer()
for i := 0; i < b.N; i++ {
fn(fun.f)
b.StopTimer()
CheckFunc(fun.f)
b.StartTimer()
}
}
// benchFnPass runs passFunc across a function with b.N blocks.
func benchFnBlock(b *testing.B, fn passFunc, bg blockGen) {
b.ReportAllocs()
c := testConfig(b)
fun := c.Fun("entry", bg(b.N)...)
CheckFunc(fun.f)
b.ResetTimer()
for i := 0; i < passCount; i++ {
fn(fun.f)
}
b.StopTimer()
}
func genFunction(size int) []bloc {
var blocs []bloc
elemType := types.Types[types.TINT64]
ptrType := elemType.PtrTo()
valn := func(s string, m, n int) string { return fmt.Sprintf("%s%d-%d", s, m, n) }
blocs = append(blocs,
Bloc("entry",
Valu(valn("store", 0, 4), OpInitMem, types.TypeMem, 0, nil),
Valu("sb", OpSB, types.TypeInvalid, 0, nil),
Goto(blockn(1)),
),
)
for i := 1; i < size+1; i++ {
blocs = append(blocs, Bloc(blockn(i),
Valu(valn("v", i, 0), OpConstBool, types.Types[types.TBOOL], 1, nil),
Valu(valn("addr", i, 1), OpAddr, ptrType, 0, nil, "sb"),
Valu(valn("addr", i, 2), OpAddr, ptrType, 0, nil, "sb"),
Valu(valn("addr", i, 3), OpAddr, ptrType, 0, nil, "sb"),
Valu(valn("zero", i, 1), OpZero, types.TypeMem, 8, elemType, valn("addr", i, 3),
valn("store", i-1, 4)),
Valu(valn("store", i, 1), OpStore, types.TypeMem, 0, elemType, valn("addr", i, 1),
valn("v", i, 0), valn("zero", i, 1)),
Valu(valn("store", i, 2), OpStore, types.TypeMem, 0, elemType, valn("addr", i, 2),
valn("v", i, 0), valn("store", i, 1)),
Valu(valn("store", i, 3), OpStore, types.TypeMem, 0, elemType, valn("addr", i, 1),
valn("v", i, 0), valn("store", i, 2)),
Valu(valn("store", i, 4), OpStore, types.TypeMem, 0, elemType, valn("addr", i, 3),
valn("v", i, 0), valn("store", i, 3)),
Goto(blockn(i+1))))
}
blocs = append(blocs,
Bloc(blockn(size+1), Goto("exit")),
Bloc("exit", Exit("store0-4")),
)
return blocs
}
| bsd-3-clause |
luizvarela/grafos_java | src/prefuse/action/assignment/DataShapeAction.java | 4385 | package prefuse.action.assignment;
import java.util.Map;
import prefuse.Constants;
import prefuse.data.tuple.TupleSet;
import prefuse.util.DataLib;
import prefuse.visual.VisualItem;
/**
* <p>
* Assignment Action that assigns shape values for a group of items based upon
* a data field. Shape values are simple integer codes that indicate to
* appropriate renderer instances what shape should be drawn. The default
* list of shape values is included in the {@link prefuse.Constants} class,
* all beginning with the prefix <code>SHAPE</code>. Of course, clients can
* always create their own shape codes that are handled by a custom Renderer.
* </p>
*
* <p>The data field will be assumed to be nominal, and shapes will
* be assigned to unique values in the order they are encountered. Note that
* if the number of unique values is greater than
* {@link prefuse.Constants#SHAPE_COUNT} (when no palette is given) or
* the length of a specified palette, then duplicate shapes will start
* being assigned.</p>
*
* <p>This Action only sets the shape field of the VisualItem. For this value
* to have an effect, a renderer instance that takes this shape value
* into account must be used (e.g., {@link prefuse.render.ShapeRenderer}).
* </p>
*
* @author <a href="http://jheer.org">jeffrey heer</a>
*/
public class DataShapeAction extends ShapeAction {
protected static final int NO_SHAPE = Integer.MIN_VALUE;
protected String m_dataField;
protected int[] m_palette;
protected Map m_ordinalMap;
/**
* Create a new DataShapeAction.
* @param group the data group to process
* @param field the data field to base shape assignments on
*/
public DataShapeAction(String group, String field) {
super(group, NO_SHAPE);
m_dataField = field;
}
/**
* Create a new DataShapeAction.
* @param group the data group to process
* @param field the data field to base shape assignments on
* @param palette a palette of shape values to use for the encoding.
* By default, shape values are assumed to be one of the integer SHAPE
* codes included in the {@link prefuse.Constants} class.
*/
public DataShapeAction(String group, String field, int[] palette) {
super(group, NO_SHAPE);
m_dataField = field;
m_palette = palette;
}
// ------------------------------------------------------------------------
/**
* Returns the data field used to encode shape values.
* @return the data field that is mapped to shape values
*/
public String getDataField() {
return m_dataField;
}
/**
* Set the data field used to encode shape values.
* @param field the data field to map to shape values
*/
public void setDataField(String field) {
m_dataField = field;
}
/**
* This operation is not supported by the DataShapeAction type.
* Calling this method will result in a thrown exception.
* @see prefuse.action.assignment.ShapeAction#setDefaultShape(int)
* @throws UnsupportedOperationException
*/
public void setDefaultShape(int defaultShape) {
throw new UnsupportedOperationException();
}
// ------------------------------------------------------------------------
/**
* @see prefuse.action.EncoderAction#setup()
*/
protected void setup() {
TupleSet ts = m_vis.getGroup(m_group);
m_ordinalMap = DataLib.ordinalMap(ts, m_dataField);
}
/**
* @see prefuse.action.assignment.ShapeAction#getShape(prefuse.visual.VisualItem)
*/
public int getShape(VisualItem item) {
// check for any cascaded rules first
int shape = super.getShape(item);
if ( shape != NO_SHAPE ) {
return shape;
}
// otherwise perform data-driven assignment
Object v = item.get(m_dataField);
int idx = ((Integer)m_ordinalMap.get(v)).intValue();
if ( m_palette == null ) {
return idx % Constants.SHAPE_COUNT;
} else {
return m_palette[idx % m_palette.length];
}
}
} // end of class DataShapeAction
| bsd-3-clause |
miguelsousa/robofab | Scripts/RoboFabIntro/intro_Kerning.py | 2296 | #FLM: RoboFab Intro, Kerning
#
#
# demo of RoboFab kerning.
#
#
# NOTE: this will mess up the kerning in your test font.
from robofab.world import CurrentFont
# (make sure you have a font with some kerning opened in FontLab)
f = CurrentFont()
# If you are familiar with the way RoboFog handled kerning,
# you will feel right at home with RoboFab's kerning implementation.
# As in RoboFog, the kerning object walks like a dict and talks like a
# dict, but it's not a dict. It is a special object that has some features
# build specifically for working with kerning. Let's have a look!
kerning = f.kerning
# A general note about use the kerning object in FontLab. In FontLab, kerning
# data lives in individual glyphs, so to access it at the font level we must go
# through every glyph, gathering kerning pairs as we go. This process occurs
# each time you call font.kerning. So, to speed thinks up, it is best to reference
# it with an assignment. This will keep it from being generated every time you
# you call and attribute or make a change.
# kerning gives you access to some bits of global data
print "%s has %s kerning pairs"%(f.info.postscriptFullName, len(kerning))
print "the average kerning value is %s"%kerning.getAverage()
min, max = kerning.getExtremes()
print "the largest kerning value is %s"%max
print "the smallest kerning value is %s"%min
# ok, kerning.getExtremes() may be a little silly, but it could have its uses.
# kerning pairs are accesed as if you are working with a dict.
# (left glyph name, right glyph name)
kerning[('V', 'o')] = -14
print '(V, o)', kerning[('V', 'o')]
# if you want to go through all kerning pairs:
for pair in kerning:
print pair, kerning[pair]
# kerning also has some useful methods. A few examples:
# scale all kerning!
print 'scaling...'
kerning.scale(100)
print "the average kerning value is %s"%kerning.getAverage()
min, max = kerning.getExtremes()
print "the largest kerning value is %s"%max
print "the smallest kerning value is %s"%min
# get a count of pairs that contian certain glyphs
print 'counting...'
count = kerning.occurrenceCount(['A', 'B', 'C'])
for glyphName in count.keys():
print "%s: found in %s pairs"%(glyphName, count[glyphName])
# don't forget to update the font after you have made some changes!
f.update()
| bsd-3-clause |
manekinekko/angular | aio/content/examples/animations/src/app/app.component.1.ts | 417 | // #docplaster
import { Component, HostBinding } from '@angular/core';
@Component({
selector: 'app-root',
templateUrl: 'app.component.html',
styleUrls: ['app.component.css'],
animations: [
// animation triggers go here
]
})
export class AppComponent {
@HostBinding('@.disabled') public animationsDisabled = false;
toggleAnimations() {
this.animationsDisabled = !this.animationsDisabled;
}
}
| mit |
symfony/symfony | src/Symfony/Component/Config/Tests/Builder/Fixtures/NodeInitialValues.config.php | 427 | <?php
use Symfony\Config\NodeInitialValuesConfig;
return static function (NodeInitialValuesConfig $config) {
$config->someCleverName(['second' => 'foo'])->first('bar');
$config->messenger()
->transports('fast_queue', ['dsn' => 'sync://'])
->serializer('acme');
$config->messenger()
->transports('slow_queue')
->dsn('doctrine://')
->options(['table' => 'my_messages']);
};
| mit |
impedimentToProgress/Ratchet | llvm/lib/Transforms/IPO/PartialInlining.cpp | 6319 | //===- PartialInlining.cpp - Inline parts of functions --------------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This pass performs partial inlining, typically by inlining an if statement
// that surrounds the body of the function.
//
//===----------------------------------------------------------------------===//
#include "llvm/Transforms/IPO.h"
#include "llvm/ADT/Statistic.h"
#include "llvm/IR/CFG.h"
#include "llvm/IR/Dominators.h"
#include "llvm/IR/Instructions.h"
#include "llvm/IR/Module.h"
#include "llvm/Pass.h"
#include "llvm/Transforms/Utils/Cloning.h"
#include "llvm/Transforms/Utils/CodeExtractor.h"
using namespace llvm;
#define DEBUG_TYPE "partialinlining"
STATISTIC(NumPartialInlined, "Number of functions partially inlined");
namespace {
struct PartialInliner : public ModulePass {
void getAnalysisUsage(AnalysisUsage &AU) const override { }
static char ID; // Pass identification, replacement for typeid
PartialInliner() : ModulePass(ID) {
initializePartialInlinerPass(*PassRegistry::getPassRegistry());
}
bool runOnModule(Module& M) override;
private:
Function* unswitchFunction(Function* F);
};
}
char PartialInliner::ID = 0;
INITIALIZE_PASS(PartialInliner, "partial-inliner",
"Partial Inliner", false, false)
ModulePass* llvm::createPartialInliningPass() { return new PartialInliner(); }
Function* PartialInliner::unswitchFunction(Function* F) {
// First, verify that this function is an unswitching candidate...
BasicBlock* entryBlock = F->begin();
BranchInst *BR = dyn_cast<BranchInst>(entryBlock->getTerminator());
if (!BR || BR->isUnconditional())
return nullptr;
BasicBlock* returnBlock = nullptr;
BasicBlock* nonReturnBlock = nullptr;
unsigned returnCount = 0;
for (BasicBlock *BB : successors(entryBlock)) {
if (isa<ReturnInst>(BB->getTerminator())) {
returnBlock = BB;
returnCount++;
} else
nonReturnBlock = BB;
}
if (returnCount != 1)
return nullptr;
// Clone the function, so that we can hack away on it.
ValueToValueMapTy VMap;
Function* duplicateFunction = CloneFunction(F, VMap,
/*ModuleLevelChanges=*/false);
duplicateFunction->setLinkage(GlobalValue::InternalLinkage);
F->getParent()->getFunctionList().push_back(duplicateFunction);
BasicBlock* newEntryBlock = cast<BasicBlock>(VMap[entryBlock]);
BasicBlock* newReturnBlock = cast<BasicBlock>(VMap[returnBlock]);
BasicBlock* newNonReturnBlock = cast<BasicBlock>(VMap[nonReturnBlock]);
// Go ahead and update all uses to the duplicate, so that we can just
// use the inliner functionality when we're done hacking.
F->replaceAllUsesWith(duplicateFunction);
// Special hackery is needed with PHI nodes that have inputs from more than
// one extracted block. For simplicity, just split the PHIs into a two-level
// sequence of PHIs, some of which will go in the extracted region, and some
// of which will go outside.
BasicBlock* preReturn = newReturnBlock;
newReturnBlock = newReturnBlock->splitBasicBlock(
newReturnBlock->getFirstNonPHI());
BasicBlock::iterator I = preReturn->begin();
BasicBlock::iterator Ins = newReturnBlock->begin();
while (I != preReturn->end()) {
PHINode* OldPhi = dyn_cast<PHINode>(I);
if (!OldPhi) break;
PHINode* retPhi = PHINode::Create(OldPhi->getType(), 2, "", Ins);
OldPhi->replaceAllUsesWith(retPhi);
Ins = newReturnBlock->getFirstNonPHI();
retPhi->addIncoming(I, preReturn);
retPhi->addIncoming(OldPhi->getIncomingValueForBlock(newEntryBlock),
newEntryBlock);
OldPhi->removeIncomingValue(newEntryBlock);
++I;
}
newEntryBlock->getTerminator()->replaceUsesOfWith(preReturn, newReturnBlock);
// Gather up the blocks that we're going to extract.
std::vector<BasicBlock*> toExtract;
toExtract.push_back(newNonReturnBlock);
for (Function::iterator FI = duplicateFunction->begin(),
FE = duplicateFunction->end(); FI != FE; ++FI)
if (&*FI != newEntryBlock && &*FI != newReturnBlock &&
&*FI != newNonReturnBlock)
toExtract.push_back(FI);
// The CodeExtractor needs a dominator tree.
DominatorTree DT;
DT.recalculate(*duplicateFunction);
// Extract the body of the if.
Function* extractedFunction
= CodeExtractor(toExtract, &DT).extractCodeRegion();
InlineFunctionInfo IFI;
// Inline the top-level if test into all callers.
std::vector<User *> Users(duplicateFunction->user_begin(),
duplicateFunction->user_end());
for (std::vector<User*>::iterator UI = Users.begin(), UE = Users.end();
UI != UE; ++UI)
if (CallInst *CI = dyn_cast<CallInst>(*UI))
InlineFunction(CI, IFI);
else if (InvokeInst *II = dyn_cast<InvokeInst>(*UI))
InlineFunction(II, IFI);
// Ditch the duplicate, since we're done with it, and rewrite all remaining
// users (function pointers, etc.) back to the original function.
duplicateFunction->replaceAllUsesWith(F);
duplicateFunction->eraseFromParent();
++NumPartialInlined;
return extractedFunction;
}
bool PartialInliner::runOnModule(Module& M) {
std::vector<Function*> worklist;
worklist.reserve(M.size());
for (Module::iterator FI = M.begin(), FE = M.end(); FI != FE; ++FI)
if (!FI->use_empty() && !FI->isDeclaration())
worklist.push_back(&*FI);
bool changed = false;
while (!worklist.empty()) {
Function* currFunc = worklist.back();
worklist.pop_back();
if (currFunc->use_empty()) continue;
bool recursive = false;
for (User *U : currFunc->users())
if (Instruction* I = dyn_cast<Instruction>(U))
if (I->getParent()->getParent() == currFunc) {
recursive = true;
break;
}
if (recursive) continue;
if (Function* newFunc = unswitchFunction(currFunc)) {
worklist.push_back(newFunc);
changed = true;
}
}
return changed;
}
| mit |
benjaminhorner/codebutler | node_modules/grunt-bower/node_modules/bower/lib/core/resolvers/GitHubResolver.js | 3922 | var util = require('util');
var path = require('path');
var fs = require('graceful-fs');
var Q = require('q');
var mout = require('mout');
var request = require('request');
var progress = require('request-progress');
var replay = require('request-replay');
var GitRemoteResolver = require('./GitRemoteResolver');
var extract = require('../../util/extract');
var createError = require('../../util/createError');
function GitHubResolver(decEndpoint, config, logger) {
var split;
GitRemoteResolver.call(this, decEndpoint, config, logger);
// Check if it's public
this._public = mout.string.startsWith(this._source, 'git://');
// Grab the org/repo
split = this._source.split('/');
this._org = split[split.length - 2];
this._repo = split[split.length - 1];
// Error out if no org or repo
if (!this._org || !this._repo) {
throw createError('Invalid GitHub URL', 'EINVEND', {
details: this._source + ' seems not to be a GitHub valid URL'
});
}
// Remote .git part form the end of the repo
if (mout.string.endsWith(this._repo, '.git')) {
this._repo = this._repo.substr(0, this._repo.length - 4);
}
}
util.inherits(GitHubResolver, GitRemoteResolver);
mout.object.mixIn(GitHubResolver, GitRemoteResolver);
// -----------------
GitHubResolver.prototype._checkout = function () {
// Only works with public repositories and tags
// TODO: Actually it might work with non-public repos since ssh & https protocols
// can also reference public repositories
// As such, we could proceed and detect 404 status code but..
if (!this._public || !this._resolution.tag) {
return GitRemoteResolver.prototype._checkout.call(this);
}
var tarballUrl = 'http://github.com/' + this._org + '/' + this._repo + '/archive/' + this._resolution.tag + '.tar.gz';
var file = path.join(this._tempDir, 'archive.tar.gz');
var reqHeaders = {};
var that = this;
var deferred = Q.defer();
if (this._config.userAgent) {
reqHeaders['User-Agent'] = this._config.userAgent;
}
this._logger.action('download', tarballUrl, {
url: that._source,
to: file
});
// Download the tarball
replay(progress(request(tarballUrl, {
proxy: this._config.proxy,
strictSSL: this._config.strictSsl,
timeout: this._config.timeout,
headers: reqHeaders
}), {
delay: 8000
}))
.on('progress', function (state) {
var totalMb = Math.round(state.total / 1024 / 1024);
var receivedMb = Math.round(state.received / 1024 / 1024);
that._logger.info('progress', receivedMb + 'MB of ' + totalMb + 'MB downloaded, ' + state.percent + '%');
})
.on('replay', function (nr, error) {
that._logger.debug('retry', 'Retrying request to ' + tarballUrl + ' because it failed with ' + error.code);
})
.on('response', function (response) {
var status = response.statusCode;
if (status < 200 || status > 300) {
deferred.reject(createError('Status code of ' + status, 'EHTTP'));
}
})
.on('error', deferred.reject)
// Pipe read stream to write stream
.pipe(fs.createWriteStream(file))
.on('error', deferred.reject)
.on('close', function () {
// Extract archive
that._logger.action('extract', path.basename(file), {
archive: file,
to: that._tempDir
});
extract(file, that._tempDir)
.then(deferred.resolve, deferred.reject);
});
return deferred.promise;
};
GitHubResolver.prototype._savePkgMeta = function (meta) {
// Set homepage if not defined
if (!meta.homepage) {
meta.homepage = 'https://github.com/' + this._org + '/' + this._repo;
}
return GitRemoteResolver.prototype._savePkgMeta.call(this, meta);
};
module.exports = GitHubResolver;
| mit |
mollstam/UnrealPy | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/construct-2.5.2/construct/protocols/layer3/dhcpv6.py | 2854 | """
the Dynamic Host Configuration Protocol (DHCP) for IPv6
http://www.networksorcery.com/enp/rfc/rfc3315.txt
"""
from construct import *
from ipv6 import Ipv6Address
import six
dhcp_option = Struct("dhcp_option",
Enum(UBInt16("code"),
OPTION_CLIENTID = 1,
OPTION_SERVERID = 2,
OPTION_IA_NA = 3,
OPTION_IA_TA = 4,
OPTION_IAADDR = 5,
OPTION_ORO = 6,
OPTION_PREFERENCE = 7,
OPTION_ELAPSED_TIME = 8,
OPTION_RELAY_MSG = 9,
OPTION_AUTH = 11,
OPTION_UNICAST = 12,
OPTION_STATUS_CODE = 13,
OPTION_RAPID_COMMIT = 14,
OPTION_USER_CLASS = 15,
OPTION_VENDOR_CLASS = 16,
OPTION_VENDOR_OPTS = 17,
OPTION_INTERFACE_ID = 18,
OPTION_RECONF_MSG = 19,
OPTION_RECONF_ACCEPT = 20,
SIP_SERVERS_DOMAIN_NAME_LIST = 21,
SIP_SERVERS_IPV6_ADDRESS_LIST = 22,
DNS_RECURSIVE_NAME_SERVER = 23,
DOMAIN_SEARCH_LIST = 24,
OPTION_IA_PD = 25,
OPTION_IAPREFIX = 26,
OPTION_NIS_SERVERS = 27,
OPTION_NISP_SERVERS = 28,
OPTION_NIS_DOMAIN_NAME = 29,
OPTION_NISP_DOMAIN_NAME = 30,
SNTP_SERVER_LIST = 31,
INFORMATION_REFRESH_TIME = 32,
BCMCS_CONTROLLER_DOMAIN_NAME_LIST = 33,
BCMCS_CONTROLLER_IPV6_ADDRESS_LIST = 34,
OPTION_GEOCONF_CIVIC = 36,
OPTION_REMOTE_ID = 37,
RELAY_AGENT_SUBSCRIBER_ID = 38,
OPTION_CLIENT_FQDN = 39,
),
UBInt16("length"),
Field("data", lambda ctx: ctx.length),
)
client_message = Struct("client_message",
Bitwise(BitField("transaction_id", 24)),
)
relay_message = Struct("relay_message",
Byte("hop_count"),
Ipv6Address("linkaddr"),
Ipv6Address("peeraddr"),
)
dhcp_message = Struct("dhcp_message",
Enum(Byte("msgtype"),
# these are client-server messages
SOLICIT = 1,
ADVERTISE = 2,
REQUEST = 3,
CONFIRM = 4,
RENEW = 5,
REBIND = 6,
REPLY = 7,
RELEASE_ = 8,
DECLINE_ = 9,
RECONFIGURE = 10,
INFORMATION_REQUEST = 11,
# these two are relay messages
RELAY_FORW = 12,
RELAY_REPL = 13,
),
# relay messages have a different structure from client-server messages
Switch("params", lambda ctx: ctx.msgtype,
{
"RELAY_FORW" : relay_message,
"RELAY_REPL" : relay_message,
},
default = client_message,
),
Rename("options", GreedyRange(dhcp_option)),
)
if __name__ == "__main__":
test1 = six.b("\x03\x11\x22\x33\x00\x17\x00\x03ABC\x00\x05\x00\x05HELLO")
test2 = six.b("\x0c\x040123456789abcdef0123456789abcdef\x00\x09\x00\x0bhello world\x00\x01\x00\x00")
print (dhcp_message.parse(test1))
print (dhcp_message.parse(test2))
| mit |
sashberd/cdnjs | ajax/libs/ace/1.3.2/snippets/d.js | 404 | define("ace/snippets/d",["require","exports","module"],function(e,t,n){"use strict";t.snippetText="",t.scope="d"});
(function() {
window.require(["ace/snippets/d"], function(m) {
if (typeof module == "object") {
module.exports = m;
}
});
})();
| mit |
mainio/concrete5 | concrete/elements/preview_footer_required.php | 120 | <?php
defined('C5_EXECUTE') or die('Access Denied.');
$v = View::getRequestInstance();
$v->markFooterAssetPosition();
| mit |
zenos-os/zenos | vendor/corert/src/Common/src/TypeSystem/NativeFormat/NativeFormatField.CodeGen.cs | 530 | // Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace Internal.TypeSystem.NativeFormat
{
partial class NativeFormatField
{
public override bool IsIntrinsic
{
get
{
return (GetFieldFlags(FieldFlags.AttributeMetadataCache | FieldFlags.Intrinsic) & FieldFlags.Intrinsic) != 0;
}
}
}
}
| mit |
dev-kevin/questions | cake/libs/model/datasources/datasource.php | 13638 | <?php
/**
* DataSource base class
*
* PHP versions 4 and 5
*
* CakePHP(tm) : Rapid Development Framework (http://cakephp.org)
* Copyright 2005-2011, Cake Software Foundation, Inc. (http://cakefoundation.org)
*
* Licensed under The MIT License
* Redistributions of files must retain the above copyright notice.
*
* @copyright Copyright 2005-2011, Cake Software Foundation, Inc. (http://cakefoundation.org)
* @link http://cakephp.org CakePHP(tm) Project
* @package cake
* @subpackage cake.cake.libs.model.datasources
* @since CakePHP(tm) v 0.10.5.1790
* @license MIT License (http://www.opensource.org/licenses/mit-license.php)
*/
/**
* DataSource base class
*
* @package cake
* @subpackage cake.cake.libs.model.datasources
*/
class DataSource extends Object {
/**
* Are we connected to the DataSource?
*
* @var boolean
* @access public
*/
var $connected = false;
/**
* Print full query debug info?
*
* @var boolean
* @access public
*/
var $fullDebug = false;
/**
* Error description of last query
*
* @var unknown_type
* @access public
*/
var $error = null;
/**
* String to hold how many rows were affected by the last SQL operation.
*
* @var string
* @access public
*/
var $affected = null;
/**
* Number of rows in current resultset
*
* @var int
* @access public
*/
var $numRows = null;
/**
* Time the last query took
*
* @var int
* @access public
*/
var $took = null;
/**
* The starting character that this DataSource uses for quoted identifiers.
*
* @var string
* @access public
*/
var $startQuote = null;
/**
* The ending character that this DataSource uses for quoted identifiers.
*
* @var string
* @access public
*/
var $endQuote = null;
/**
* Result
*
* @var array
* @access protected
*/
var $_result = null;
/**
* Queries count.
*
* @var int
* @access protected
*/
var $_queriesCnt = 0;
/**
* Total duration of all queries.
*
* @var unknown_type
* @access protected
*/
var $_queriesTime = null;
/**
* Log of queries executed by this DataSource
*
* @var unknown_type
* @access protected
*/
var $_queriesLog = array();
/**
* Maximum number of items in query log
*
* This is to prevent query log taking over too much memory.
*
* @var int Maximum number of queries in the queries log.
* @access protected
*/
var $_queriesLogMax = 200;
/**
* Caches serialzed results of executed queries
*
* @var array Maximum number of queries in the queries log.
* @access protected
*/
var $_queryCache = array();
/**
* The default configuration of a specific DataSource
*
* @var array
* @access protected
*/
var $_baseConfig = array();
/**
* Holds references to descriptions loaded by the DataSource
*
* @var array
* @access private
*/
var $__descriptions = array();
/**
* Holds a list of sources (tables) contained in the DataSource
*
* @var array
* @access protected
*/
var $_sources = null;
/**
* A reference to the physical connection of this DataSource
*
* @var array
* @access public
*/
var $connection = null;
/**
* The DataSource configuration
*
* @var array
* @access public
*/
var $config = array();
/**
* The DataSource configuration key name
*
* @var string
* @access public
*/
var $configKeyName = null;
/**
* Whether or not this DataSource is in the middle of a transaction
*
* @var boolean
* @access protected
*/
var $_transactionStarted = false;
/**
* Whether or not source data like available tables and schema descriptions
* should be cached
*
* @var boolean
* @access public
*/
var $cacheSources = true;
/**
* Constructor.
*
* @param array $config Array of configuration information for the datasource.
* @return void.
*/
function __construct($config = array()) {
parent::__construct();
$this->setConfig($config);
}
/**
* Caches/returns cached results for child instances
*
* @param mixed $data
* @return array Array of sources available in this datasource.
* @access public
*/
function listSources($data = null) {
if ($this->cacheSources === false) {
return null;
}
if ($this->_sources !== null) {
return $this->_sources;
}
$key = ConnectionManager::getSourceName($this) . '_' . $this->config['database'] . '_list';
$key = preg_replace('/[^A-Za-z0-9_\-.+]/', '_', $key);
$sources = Cache::read($key, '_cake_model_');
if (empty($sources)) {
$sources = $data;
Cache::write($key, $data, '_cake_model_');
}
$this->_sources = $sources;
return $sources;
}
/**
* Convenience method for DboSource::listSources(). Returns source names in lowercase.
*
* @param boolean $reset Whether or not the source list should be reset.
* @return array Array of sources available in this datasource
* @access public
*/
function sources($reset = false) {
if ($reset === true) {
$this->_sources = null;
}
return array_map('strtolower', $this->listSources());
}
/**
* Returns a Model description (metadata) or null if none found.
*
* @param Model $model
* @return array Array of Metadata for the $model
* @access public
*/
function describe(&$model) {
if ($this->cacheSources === false) {
return null;
}
$table = $model->tablePrefix . $model->table;
if (isset($this->__descriptions[$table])) {
return $this->__descriptions[$table];
}
$cache = $this->__cacheDescription($table);
if ($cache !== null) {
$this->__descriptions[$table] =& $cache;
return $cache;
}
return null;
}
/**
* Begin a transaction
*
* @return boolean Returns true if a transaction is not in progress
* @access public
*/
function begin(&$model) {
return !$this->_transactionStarted;
}
/**
* Commit a transaction
*
* @return boolean Returns true if a transaction is in progress
* @access public
*/
function commit(&$model) {
return $this->_transactionStarted;
}
/**
* Rollback a transaction
*
* @return boolean Returns true if a transaction is in progress
* @access public
*/
function rollback(&$model) {
return $this->_transactionStarted;
}
/**
* Converts column types to basic types
*
* @param string $real Real column type (i.e. "varchar(255)")
* @return string Abstract column type (i.e. "string")
* @access public
*/
function column($real) {
return false;
}
/**
* Used to create new records. The "C" CRUD.
*
* To-be-overridden in subclasses.
*
* @param Model $model The Model to be created.
* @param array $fields An Array of fields to be saved.
* @param array $values An Array of values to save.
* @return boolean success
* @access public
*/
function create(&$model, $fields = null, $values = null) {
return false;
}
/**
* Used to read records from the Datasource. The "R" in CRUD
*
* To-be-overridden in subclasses.
*
* @param Model $model The model being read.
* @param array $queryData An array of query data used to find the data you want
* @return mixed
* @access public
*/
function read(&$model, $queryData = array()) {
return false;
}
/**
* Update a record(s) in the datasource.
*
* To-be-overridden in subclasses.
*
* @param Model $model Instance of the model class being updated
* @param array $fields Array of fields to be updated
* @param array $values Array of values to be update $fields to.
* @return boolean Success
* @access public
*/
function update(&$model, $fields = null, $values = null) {
return false;
}
/**
* Delete a record(s) in the datasource.
*
* To-be-overridden in subclasses.
*
* @param Model $model The model class having record(s) deleted
* @param mixed $id Primary key of the model
* @access public
*/
function delete(&$model, $id = null) {
if ($id == null) {
$id = $model->id;
}
}
/**
* Returns the ID generated from the previous INSERT operation.
*
* @param unknown_type $source
* @return mixed Last ID key generated in previous INSERT
* @access public
*/
function lastInsertId($source = null) {
return false;
}
/**
* Returns the number of rows returned by last operation.
*
* @param unknown_type $source
* @return integer Number of rows returned by last operation
* @access public
*/
function lastNumRows($source = null) {
return false;
}
/**
* Returns the number of rows affected by last query.
*
* @param unknown_type $source
* @return integer Number of rows affected by last query.
* @access public
*/
function lastAffected($source = null) {
return false;
}
/**
* Check whether the conditions for the Datasource being available
* are satisfied. Often used from connect() to check for support
* before establishing a connection.
*
* @return boolean Whether or not the Datasources conditions for use are met.
* @access public
*/
function enabled() {
return true;
}
/**
* Returns true if the DataSource supports the given interface (method)
*
* @param string $interface The name of the interface (method)
* @return boolean True on success
* @access public
*/
function isInterfaceSupported($interface) {
static $methods = false;
if ($methods === false) {
$methods = array_map('strtolower', get_class_methods($this));
}
return in_array(strtolower($interface), $methods);
}
/**
* Sets the configuration for the DataSource.
* Merges the $config information with the _baseConfig and the existing $config property.
*
* @param array $config The configuration array
* @return void
* @access public
*/
function setConfig($config = array()) {
$this->config = array_merge($this->_baseConfig, $this->config, $config);
}
/**
* Cache the DataSource description
*
* @param string $object The name of the object (model) to cache
* @param mixed $data The description of the model, usually a string or array
* @return mixed
* @access private
*/
function __cacheDescription($object, $data = null) {
if ($this->cacheSources === false) {
return null;
}
if ($data !== null) {
$this->__descriptions[$object] =& $data;
}
$key = ConnectionManager::getSourceName($this) . '_' . $object;
$cache = Cache::read($key, '_cake_model_');
if (empty($cache)) {
$cache = $data;
Cache::write($key, $cache, '_cake_model_');
}
return $cache;
}
/**
* Replaces `{$__cakeID__$}` and `{$__cakeForeignKey__$}` placeholders in query data.
*
* @param string $query Query string needing replacements done.
* @param array $data Array of data with values that will be inserted in placeholders.
* @param string $association Name of association model being replaced
* @param unknown_type $assocData
* @param Model $model Instance of the model to replace $__cakeID__$
* @param Model $linkModel Instance of model to replace $__cakeForeignKey__$
* @param array $stack
* @return string String of query data with placeholders replaced.
* @access public
* @todo Remove and refactor $assocData, ensure uses of the method have the param removed too.
*/
function insertQueryData($query, $data, $association, $assocData, &$model, &$linkModel, $stack) {
$keys = array('{$__cakeID__$}', '{$__cakeForeignKey__$}');
foreach ($keys as $key) {
$val = null;
$type = null;
if (strpos($query, $key) !== false) {
switch ($key) {
case '{$__cakeID__$}':
if (isset($data[$model->alias]) || isset($data[$association])) {
if (isset($data[$model->alias][$model->primaryKey])) {
$val = $data[$model->alias][$model->primaryKey];
} elseif (isset($data[$association][$model->primaryKey])) {
$val = $data[$association][$model->primaryKey];
}
} else {
$found = false;
foreach (array_reverse($stack) as $assoc) {
if (isset($data[$assoc]) && isset($data[$assoc][$model->primaryKey])) {
$val = $data[$assoc][$model->primaryKey];
$found = true;
break;
}
}
if (!$found) {
$val = '';
}
}
$type = $model->getColumnType($model->primaryKey);
break;
case '{$__cakeForeignKey__$}':
foreach ($model->__associations as $id => $name) {
foreach ($model->$name as $assocName => $assoc) {
if ($assocName === $association) {
if (isset($assoc['foreignKey'])) {
$foreignKey = $assoc['foreignKey'];
$assocModel = $model->$assocName;
$type = $assocModel->getColumnType($assocModel->primaryKey);
if (isset($data[$model->alias][$foreignKey])) {
$val = $data[$model->alias][$foreignKey];
} elseif (isset($data[$association][$foreignKey])) {
$val = $data[$association][$foreignKey];
} else {
$found = false;
foreach (array_reverse($stack) as $assoc) {
if (isset($data[$assoc]) && isset($data[$assoc][$foreignKey])) {
$val = $data[$assoc][$foreignKey];
$found = true;
break;
}
}
if (!$found) {
$val = '';
}
}
}
break 3;
}
}
}
break;
}
if (empty($val) && $val !== '0') {
return false;
}
$query = str_replace($key, $this->value($val, $type), $query);
}
}
return $query;
}
/**
* To-be-overridden in subclasses.
*
* @param Model $model Model instance
* @param string $key Key name to make
* @return string Key name for model.
* @access public
*/
function resolveKey(&$model, $key) {
return $model->alias . $key;
}
/**
* Closes the current datasource.
*
* @return void
* @access public
*/
function __destruct() {
if ($this->_transactionStarted) {
$null = null;
$this->rollback($null);
}
if ($this->connected) {
$this->close();
}
}
}
| mit |
hosamshahin/OpenDSA | SourceCode/Java/Lists/Link.java | 597 | /* *** ODSATag: Link *** */
class Link { // Singly linked list node class
private Object e; // Value for this node
private Link n; // Point to next node in list
// Constructors
Link(Object it, Link inn) { e = it; n = inn; }
Link(Link inn) { e = null; n = inn; }
Object element() { return e; } // Return the value
Object setElement(Object it) { return e = it; } // Set element value
Link next() { return n; } // Return next link
Link setNext(Link inn) { return n = inn; } // Set next link
}
/* *** ODSAendTag: Link *** */
| mit |
zbrown94/hackathonUI48 | node_modules/geojson-area/test/basic.js | 508 | var gjArea = require('../'),
assert = require('assert');
describe('geojson area', function() {
it('computes the area of illinois', function() {
var ill = require('./illinois.json');
assert.equal(gjArea.geometry(ill), 145978332359.37125);
});
// http://www.wolframalpha.com/input/?i=surface+area+of+earth
it('computes the area of the world', function() {
var all = require('./all.json');
assert.equal(gjArea.geometry(all), 511207893395811.06);
});
});
| mit |
hyonholee/azure-sdk-for-net | sdk/storage/Microsoft.Azure.Management.Storage/src/Generated/BlobServicesOperationsExtensions.cs | 9259 | // <auto-generated>
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
// </auto-generated>
namespace Microsoft.Azure.Management.Storage
{
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// Extension methods for BlobServicesOperations.
/// </summary>
public static partial class BlobServicesOperationsExtensions
{
/// <summary>
/// List blob services of storage account. It returns a collection of one
/// object named default.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription. The name is
/// case insensitive.
/// </param>
/// <param name='accountName'>
/// The name of the storage account within the specified resource group.
/// Storage account names must be between 3 and 24 characters in length and use
/// numbers and lower-case letters only.
/// </param>
public static IEnumerable<BlobServiceProperties> List(this IBlobServicesOperations operations, string resourceGroupName, string accountName)
{
return operations.ListAsync(resourceGroupName, accountName).GetAwaiter().GetResult();
}
/// <summary>
/// List blob services of storage account. It returns a collection of one
/// object named default.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription. The name is
/// case insensitive.
/// </param>
/// <param name='accountName'>
/// The name of the storage account within the specified resource group.
/// Storage account names must be between 3 and 24 characters in length and use
/// numbers and lower-case letters only.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IEnumerable<BlobServiceProperties>> ListAsync(this IBlobServicesOperations operations, string resourceGroupName, string accountName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListWithHttpMessagesAsync(resourceGroupName, accountName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Sets the properties of a storage account’s Blob service, including
/// properties for Storage Analytics and CORS (Cross-Origin Resource Sharing)
/// rules.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription. The name is
/// case insensitive.
/// </param>
/// <param name='accountName'>
/// The name of the storage account within the specified resource group.
/// Storage account names must be between 3 and 24 characters in length and use
/// numbers and lower-case letters only.
/// </param>
/// <param name='parameters'>
/// The properties of a storage account’s Blob service, including properties
/// for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules.
/// </param>
public static BlobServiceProperties SetServiceProperties(this IBlobServicesOperations operations, string resourceGroupName, string accountName, BlobServiceProperties parameters)
{
return operations.SetServicePropertiesAsync(resourceGroupName, accountName, parameters).GetAwaiter().GetResult();
}
/// <summary>
/// Sets the properties of a storage account’s Blob service, including
/// properties for Storage Analytics and CORS (Cross-Origin Resource Sharing)
/// rules.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription. The name is
/// case insensitive.
/// </param>
/// <param name='accountName'>
/// The name of the storage account within the specified resource group.
/// Storage account names must be between 3 and 24 characters in length and use
/// numbers and lower-case letters only.
/// </param>
/// <param name='parameters'>
/// The properties of a storage account’s Blob service, including properties
/// for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<BlobServiceProperties> SetServicePropertiesAsync(this IBlobServicesOperations operations, string resourceGroupName, string accountName, BlobServiceProperties parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.SetServicePropertiesWithHttpMessagesAsync(resourceGroupName, accountName, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets the properties of a storage account’s Blob service, including
/// properties for Storage Analytics and CORS (Cross-Origin Resource Sharing)
/// rules.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription. The name is
/// case insensitive.
/// </param>
/// <param name='accountName'>
/// The name of the storage account within the specified resource group.
/// Storage account names must be between 3 and 24 characters in length and use
/// numbers and lower-case letters only.
/// </param>
public static BlobServiceProperties GetServiceProperties(this IBlobServicesOperations operations, string resourceGroupName, string accountName)
{
return operations.GetServicePropertiesAsync(resourceGroupName, accountName).GetAwaiter().GetResult();
}
/// <summary>
/// Gets the properties of a storage account’s Blob service, including
/// properties for Storage Analytics and CORS (Cross-Origin Resource Sharing)
/// rules.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription. The name is
/// case insensitive.
/// </param>
/// <param name='accountName'>
/// The name of the storage account within the specified resource group.
/// Storage account names must be between 3 and 24 characters in length and use
/// numbers and lower-case letters only.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<BlobServiceProperties> GetServicePropertiesAsync(this IBlobServicesOperations operations, string resourceGroupName, string accountName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetServicePropertiesWithHttpMessagesAsync(resourceGroupName, accountName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
| mit |
davidcbucher/heroku-buildpack-geo-python | vendor/distribute-0.6.34/release.py | 4493 | #!/usr/bin/env python
"""
Script to fully automate the release process. Requires Python 2.6+
with sphinx installed and the 'hg' command on the path.
"""
from __future__ import print_function
import subprocess
import shutil
import os
import sys
import urllib2
import getpass
import collections
try:
import keyring
except Exception:
pass
VERSION = '0.6.34'
def get_next_version():
digits = map(int, VERSION.split('.'))
digits[-1] += 1
return '.'.join(map(str, digits))
NEXT_VERSION = get_next_version()
files_with_versions = ('docs/conf.py', 'setup.py', 'release.py',
'README.txt', 'distribute_setup.py')
def get_repo_name():
"""
Get the repo name from the hgrc default path.
"""
default = subprocess.check_output('hg paths default').strip()
parts = default.split('/')
if parts[-1] == '':
parts.pop()
return '/'.join(parts[-2:])
def get_mercurial_creds(system='https://bitbucket.org', username=None):
"""
Return named tuple of username,password in much the same way that
Mercurial would (from the keyring).
"""
# todo: consider getting this from .hgrc
username = username or getpass.getuser()
keyring_username = '@@'.join((username, system))
system = '@'.join((keyring_username, 'Mercurial'))
password = (
keyring.get_password(system, keyring_username)
if 'keyring' in globals()
else None
)
if not password:
password = getpass.getpass()
Credential = collections.namedtuple('Credential', 'username password')
return Credential(username, password)
def add_milestone_and_version(version=NEXT_VERSION):
auth = 'Basic ' + ':'.join(get_mercurial_creds()).encode('base64').strip()
headers = {
'Authorization': auth,
}
base = 'https://api.bitbucket.org'
for type in 'milestones', 'versions':
url = (base + '/1.0/repositories/{repo}/issues/{type}'
.format(repo = get_repo_name(), type=type))
req = urllib2.Request(url = url, headers = headers,
data='name='+version)
try:
urllib2.urlopen(req)
except urllib2.HTTPError as e:
print(e.fp.read())
def bump_versions():
list(map(bump_version, files_with_versions))
def bump_version(filename):
with open(filename, 'rb') as f:
lines = [line.replace(VERSION, NEXT_VERSION) for line in f]
with open(filename, 'wb') as f:
f.writelines(lines)
def do_release():
assert all(map(os.path.exists, files_with_versions)), (
"Expected file(s) missing")
assert has_sphinx(), "You must have Sphinx installed to release"
res = raw_input('Have you read through the SCM changelog and '
'confirmed the changelog is current for releasing {VERSION}? '
.format(**globals()))
if not res.lower().startswith('y'):
print("Please do that")
raise SystemExit(1)
print("Travis-CI tests: http://travis-ci.org/#!/jaraco/distribute")
res = raw_input('Have you or has someone verified that the tests '
'pass on this revision? ')
if not res.lower().startswith('y'):
print("Please do that")
raise SystemExit(2)
subprocess.check_call(['hg', 'tag', VERSION])
subprocess.check_call(['hg', 'update', VERSION])
has_docs = build_docs()
if os.path.isdir('./dist'):
shutil.rmtree('./dist')
cmd = [sys.executable, 'setup.py', '-q', 'egg_info', '-RD', '-b', '',
'sdist', 'register', 'upload']
if has_docs:
cmd.append('upload_docs')
subprocess.check_call(cmd)
upload_bootstrap_script()
# update to the tip for the next operation
subprocess.check_call(['hg', 'update'])
# we just tagged the current version, bump for the next release.
bump_versions()
subprocess.check_call(['hg', 'ci', '-m',
'Bumped to {NEXT_VERSION} in preparation for next '
'release.'.format(**globals())])
# push the changes
subprocess.check_call(['hg', 'push'])
add_milestone_and_version()
def has_sphinx():
try:
devnull = open(os.path.devnull, 'wb')
subprocess.Popen(['sphinx-build', '--version'], stdout=devnull,
stderr=subprocess.STDOUT).wait()
except Exception:
return False
return True
def build_docs():
if not os.path.isdir('docs'):
return
if os.path.isdir('docs/build'):
shutil.rmtree('docs/build')
subprocess.check_call([
'sphinx-build',
'-b', 'html',
'-d', 'build/doctrees',
'.',
'build/html',
],
cwd='docs')
return True
def upload_bootstrap_script():
scp_command = 'pscp' if sys.platform.startswith('win') else 'scp'
try:
subprocess.check_call([scp_command, 'distribute_setup.py',
'pypi@ziade.org:python-distribute.org/'])
except:
print("Unable to upload bootstrap script. Ask Tarek to do it.")
if __name__ == '__main__':
do_release()
| mit |
february29/Learning | web/vue/AccountBook-Express/node_modules/msgpack-lite/test/13.decoder.js | 839 | #!/usr/bin/env mocha -R spec
var assert = require("assert");
var msgpackJS = "../index";
var isBrowser = ("undefined" !== typeof window);
var msgpack = isBrowser && window.msgpack || require(msgpackJS);
var TITLE = __filename.replace(/^.*\//, "");
var source = {"foo": "bar"};
var packed = msgpack.encode(source);
describe(TITLE, function() {
it("Decoder().decode(obj)", function(done) {
var decoder = new msgpack.Decoder();
decoder.on("data", function(data) {
assert.deepEqual(data, source);
});
decoder.on("end", done);
decoder.decode(packed);
decoder.end();
});
it("Decoder().end(obj)", function(done) {
var decoder = new msgpack.Decoder();
decoder.on("data", function(data) {
assert.deepEqual(data, source);
});
decoder.on("end", done);
decoder.end(packed);
});
});
| mit |
sancospi/jsdelivr | files/floating-label/1.0.1/floatingLabel.js | 9307 | (function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.floatingLabel = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(_dereq_,module,exports){
module.exports = {
config: {
floatingClassName: 'floating',
delegateEvents: false
},
init: function initializeFloatingLabel( opt ) {
'use strict';
this._eventsDelegated = false;
if ( opt instanceof Object ) {
for ( var option in opt ) {
if ( window.Object.hasOwnProperty.call( opt, option ) && window.Object.hasOwnProperty.call( this.config, option )) {
this.config[ option ] = opt[ option ];
}
}
}
// Once the DOM is loaded, evaluate the inputs on the page
if ( window.addEventListener ) {
window.addEventListener( 'DOMContentLoaded', this.evaluateInputs.bind( this ), false );
} else {
document.onreadystatechange = this.evaluateInputs.bind( this );
}
},
/**
* evaluateInputs - Loop through all the elements currently on the page and display the floating label if needed.
* @version 1.0.0
* @example
* this.evaluateInputs();
* @return {void}
*/
evaluateInputs: function evaluateInputs() {
'use strict';
var self = this,
inputs = document.querySelectorAll( 'input[type="text"], input[type="password"], input[type="email"], input[type="search"], input[type="url"], input[type="tel"], input[type="number"], textarea' );
function showHideLabel( input, label ) {
if ( input.value.length ) {
self.addClass( label, self.config.floatingClassName );
} else {
self.removeClass( label, self.config.floatingClassName );
}
}
function inputEventHandler( evt ) {
if ( !evt ) {
evt = window.event;
}
var inputEl = evt.target || evt.srcElement,
labelEl = self.getPreviousSibling( inputEl ),
typeRe = /text|password|url|email|tel|search|number/i;
if ( ( inputEl.nodeName === 'INPUT' && typeRe.test( inputEl.getAttribute( 'type' ))) || inputEl.nodeName === 'TEXTAREA' ) {
showHideLabel( inputEl, labelEl );
}
}
for ( var input = 0; input < inputs.length; input++ ) {
if ( ( inputs[ input ] instanceof Element ) && window.Object.hasOwnProperty.call( inputs, input )) {
var inputEl = inputs[ input ],
labelEl = self.getPreviousSibling( inputEl );
showHideLabel( inputEl, labelEl );
this.removeEventListener( inputEl, 'keyup', inputEventHandler, false );
this.removeEventListener( inputEl, 'input', inputEventHandler, false );
if ( !this.config.delegateEvents ) {
this.addEventListener( inputEl, 'keyup', inputEventHandler, false );
this.addEventListener( inputEl, 'input', inputEventHandler, false );
}
}
}
if ( this.config.delegateEvents && !this._eventsDelegated ) {
this.addEventListener( document.body, 'keyup', inputEventHandler, false );
this.addEventListener( document.body, 'input', inputEventHandler, false );
this._eventsDelegated = true;
}
},
/**
* getPreviousSibling - Small function to get the previous sibling of an element. Should be compatible with IE8+
* @version 1.0.0
* @example
* this.getPreviousSibling( el );
* @param {element} el - The element to get the previous sibling of
* @return {element} - The previous sibling element
*/
getPreviousSibling: function getPreviousSibling( el ) {
'use strict';
el = el.previousSibling;
while ( el && el.nodeType !== 1 ) {
el = el.previousSibling;
}
return el;
},
/**
* addClass - Small function to add a class to an element. Should be compatible with IE8+
* @version 1.0.0
* @example
* this.addClass( this.currentTooltip, 'visible' );
* @param {element} el - The element to add the class to
* @param {string} className - The class name to add to the element
* @return {element} - The element that had the class added to it
* @api private
*/
addClass: function addClass( el, className ) {
'use strict';
if ( el.classList ) {
el.classList.add( className );
} else {
el.className += ' ' + className;
}
return el;
},
/**
* removeClass - Small function to remove a class from an element. Should be compatible with IE8+
* @version 1.0.0
* @example
* this.removeClass( this.currentTooltip, 'visible' );
* @param {element} el - The element to remove the class from
* @param {string} className - The class name to remove from the element
* @return {element} - The element that had the class removed from it
* @api private
*/
removeClass: function removeClass( el, className ) {
'use strict';
if ( el ) {
if ( el.classList ) {
el.classList.remove( className );
} else {
el.className = el.className.replace( new RegExp( '(^|\\b)' + className.split( ' ' ).join( '|' ) + '(\\b|$)', 'gi' ), ' ' );
}
}
return el;
},
/**
* hasClass - Small function to see if an element has a specific class. Should be compatible with IE8+
* @version 1.0.0
* @example
* this.hasClass( this.currentTooltip, 'visible' );
* @param {element} el - The element to check the class existence on
* @param {string} className - The class to check for
* @return {boolean} - True or false depending on if the element has the class
* @api private
*/
hasClass: function hasClass( el, className ) {
'use strict';
if ( el.classList ) {
return el.classList.contains( className );
} else {
return new RegExp( '(^| )' + className + '( |$)', 'gi' ).test( el.className );
}
},
/**
* addEventListener - Small function to add an event listener. Should be compatible with IE8+
* @version 1.0.0
* @example
* this.addEventListener( document.body, 'click', this.open( this.currentTooltip ));
* @param {element} el - The element node that needs to have the event listener added
* @param {string} eventName - The event name (sans the "on")
* @param {function} handler - The function to be run when the event is triggered
* @return {element} - The element that had an event bound
* @api private
*/
addEventListener: function addEventListener( el, eventName, handler, useCapture ) {
'use strict';
if ( !useCapture ) {
useCapture = false;
}
if ( el.addEventListener ) {
el.addEventListener( eventName, handler, useCapture );
return el;
} else {
if ( eventName === 'focus' ) {
eventName = 'focusin';
}
el.attachEvent( 'on' + eventName, function() {
handler.call( el );
});
return el;
}
},
/**
* removeEventListener - Small function to remove and event listener. Should be compatible with IE8+
* @version 1.0.0
* @example
* this.removeEventListener( document.body, 'click', this.open( this.currentTooltip ));
* @param {element} el - The element node that needs to have the event listener removed
* @param {string} eventName - The event name (sans the "on")
* @param {function} handler - The function that was to be run when the event is triggered
* @return {element} - The element that had an event removed
* @api private
*/
removeEventListener: function removeEventListener( el, eventName, handler, useCapture ) {
'use strict';
if ( !useCapture ) {
useCapture = false;
}
if ( el.removeEventListener ) {
el.removeEventListener( eventName, handler, useCapture );
} else {
if ( eventName === 'focus' ) {
eventName = 'focusin';
}
el.detachEvent( 'on' + eventName, function() {
handler.call( el );
});
}
return el;
}
};
},{}]},{},[1])(1)
});
| mit |
sufuf3/cdnjs | ajax/libs/js-quantities/1.2.0/quantities.js | 54953 | /*!
Copyright © 2006-2007 Kevin C. Olbrich
Copyright © 2010-2013 LIM SAS (http://lim.eu) - Julien Sanchez
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
/*jshint eqeqeq:true, immed:true, undef:true */
/*global module:false, define:false */
(function (root, factory) {
"use strict";
if (typeof exports === "object") {
// Node. Does not work with strict CommonJS, but
// only CommonJS-like enviroments that support module.exports,
// like Node.
module.exports = factory();
} else if (typeof define === "function" && define.amd) {
// AMD. Register as an anonymous module.
define(factory);
} else {
// Browser globals
root.Qty = factory();
}
}(this, function() {
"use strict";
var UNITS = {
/* prefixes */
"<googol>" : [["googol"], 1e100, "prefix"],
"<kibi>" : [["Ki","Kibi","kibi"], Math.pow(2,10), "prefix"],
"<mebi>" : [["Mi","Mebi","mebi"], Math.pow(2,20), "prefix"],
"<gibi>" : [["Gi","Gibi","gibi"], Math.pow(2,30), "prefix"],
"<tebi>" : [["Ti","Tebi","tebi"], Math.pow(2,40), "prefix"],
"<pebi>" : [["Pi","Pebi","pebi"], Math.pow(2,50), "prefix"],
"<exi>" : [["Ei","Exi","exi"], Math.pow(2,60), "prefix"],
"<zebi>" : [["Zi","Zebi","zebi"], Math.pow(2,70), "prefix"],
"<yebi>" : [["Yi","Yebi","yebi"], Math.pow(2,80), "prefix"],
"<yotta>" : [["Y","Yotta","yotta"], 1e24, "prefix"],
"<zetta>" : [["Z","Zetta","zetta"], 1e21, "prefix"],
"<exa>" : [["E","Exa","exa"], 1e18, "prefix"],
"<peta>" : [["P","Peta","peta"], 1e15, "prefix"],
"<tera>" : [["T","Tera","tera"], 1e12, "prefix"],
"<giga>" : [["G","Giga","giga"], 1e9, "prefix"],
"<mega>" : [["M","Mega","mega"], 1e6, "prefix"],
"<kilo>" : [["k","kilo"], 1e3, "prefix"],
"<hecto>" : [["h","Hecto","hecto"], 1e2, "prefix"],
"<deca>" : [["da","Deca","deca","deka"], 1e1, "prefix"],
"<deci>" : [["d","Deci","deci"], 1e-1, "prefix"],
"<centi>" : [["c","Centi","centi"], 1e-2, "prefix"],
"<milli>" : [["m","Milli","milli"], 1e-3, "prefix"],
"<micro>" : [["u","Micro","micro"], 1e-6, "prefix"],
"<nano>" : [["n","Nano","nano"], 1e-9, "prefix"],
"<pico>" : [["p","Pico","pico"], 1e-12, "prefix"],
"<femto>" : [["f","Femto","femto"], 1e-15, "prefix"],
"<atto>" : [["a","Atto","atto"], 1e-18, "prefix"],
"<zepto>" : [["z","Zepto","zepto"], 1e-21, "prefix"],
"<yocto>" : [["y","Yocto","yocto"], 1e-24, "prefix"],
"<1>" : [["1", "<1>"], 1, ""],
/* length units */
"<meter>" : [["m","meter","meters","metre","metres"], 1.0, "length", ["<meter>"] ],
"<inch>" : [["in","inch","inches","\""], 0.0254, "length", ["<meter>"]],
"<foot>" : [["ft","foot","feet","'"], 0.3048, "length", ["<meter>"]],
"<yard>" : [["yd","yard","yards"], 0.9144, "length", ["<meter>"]],
"<mile>" : [["mi","mile","miles"], 1609.344, "length", ["<meter>"]],
"<naut-mile>" : [["nmi"], 1852, "length", ["<meter>"]],
"<league>": [["league","leagues"], 4828, "length", ["<meter>"]],
"<furlong>": [["furlong","furlongs"], 201.2, "length", ["<meter>"]],
"<rod>" : [["rd","rod","rods"], 5.029, "length", ["<meter>"]],
"<mil>" : [["mil","mils"], 0.0000254, "length", ["<meter>"]],
"<angstrom>" :[["ang","angstrom","angstroms"], 1e-10, "length", ["<meter>"]],
"<fathom>" : [["fathom","fathoms"], 1.829, "length", ["<meter>"]],
"<pica>" : [["pica","picas"], 0.00423333333, "length", ["<meter>"]],
"<point>" : [["pt","point","points"], 0.000352777778, "length", ["<meter>"]],
"<redshift>" : [["z","red-shift"], 1.302773e26, "length", ["<meter>"]],
"<AU>" : [["AU","astronomical-unit"], 149597900000, "length", ["<meter>"]],
"<light-second>":[["ls","light-second"], 299792500, "length", ["<meter>"]],
"<light-minute>":[["lmin","light-minute"], 17987550000, "length", ["<meter>"]],
"<light-year>" : [["ly","light-year"], 9460528000000000, "length", ["<meter>"]],
"<parsec>" : [["pc","parsec","parsecs"], 30856780000000000, "length", ["<meter>"]],
/* mass */
"<kilogram>" : [["kg","kilogram","kilograms"], 1.0, "mass", ["<kilogram>"]],
"<AMU>" : [["u","AMU","amu"], 6.0221415e26, "mass", ["<kilogram>"]],
"<dalton>" : [["Da","Dalton","Daltons","dalton","daltons"], 6.0221415e26, "mass", ["<kilogram>"]],
"<slug>" : [["slug","slugs"], 14.5939029, "mass", ["<kilogram>"]],
"<short-ton>" : [["tn","ton"], 907.18474, "mass", ["<kilogram>"]],
"<metric-ton>":[["tonne"], 1000, "mass", ["<kilogram>"]],
"<carat>" : [["ct","carat","carats"], 0.0002, "mass", ["<kilogram>"]],
"<pound>" : [["lbs","lb","pound","pounds","#"], 0.45359237, "mass", ["<kilogram>"]],
"<ounce>" : [["oz","ounce","ounces"], 0.0283495231, "mass", ["<kilogram>"]],
"<gram>" : [["g","gram","grams","gramme","grammes"], 1e-3, "mass", ["<kilogram>"]],
"<grain>" : [["grain","grains","gr"], 6.479891e-5, "mass", ["<kilogram>"]],
"<dram>" : [["dram","drams","dr"], 0.0017718452, "mass",["<kilogram>"]],
"<stone>" : [["stone","stones","st"],6.35029318, "mass",["<kilogram>"]],
/* area */
"<hectare>":[["hectare"], 10000, "area", ["<meter>","<meter>"]],
"<acre>":[["acre","acres"], 4046.85642, "area", ["<meter>","<meter>"]],
"<sqft>":[["sqft"], 1, "area", ["<feet>","<feet>"]],
/* volume */
"<liter>" : [["l","L","liter","liters","litre","litres"], 0.001, "volume", ["<meter>","<meter>","<meter>"]],
"<gallon>": [["gal","gallon","gallons"], 0.0037854118, "volume", ["<meter>","<meter>","<meter>"]],
"<quart>": [["qt","quart","quarts"], 0.00094635295, "volume", ["<meter>","<meter>","<meter>"]],
"<pint>": [["pt","pint","pints"], 0.000473176475, "volume", ["<meter>","<meter>","<meter>"]],
"<cup>": [["cu","cup","cups"], 0.000236588238, "volume", ["<meter>","<meter>","<meter>"]],
"<fluid-ounce>": [["floz","fluid-ounce"], 2.95735297e-5, "volume", ["<meter>","<meter>","<meter>"]],
"<tablespoon>": [["tbs","tablespoon","tablespoons"], 1.47867648e-5, "volume", ["<meter>","<meter>","<meter>"]],
"<teaspoon>": [["tsp","teaspoon","teaspoons"], 4.92892161e-6, "volume", ["<meter>","<meter>","<meter>"]],
/* speed */
"<kph>" : [["kph"], 0.277777778, "speed", ["<meter>"], ["<second>"]],
"<mph>" : [["mph"], 0.44704, "speed", ["<meter>"], ["<second>"]],
"<knot>" : [["kt","kn","kts","knot","knots"], 0.514444444, "speed", ["<meter>"], ["<second>"]],
"<fps>" : [["fps"], 0.3048, "speed", ["<meter>"], ["<second>"]],
/* acceleration */
"<gee>" : [["gee"], 9.80665, "acceleration", ["<meter>"], ["<second>","<second>"]],
/* temperature_difference */
"<kelvin>" : [["degK","kelvin"], 1.0, "temperature", ["<kelvin>"]],
"<celsius>" : [["degC","celsius","celsius","centigrade"], 1.0, "temperature", ["<kelvin>"]],
"<fahrenheit>" : [["degF","fahrenheit"], 5/9, "temperature", ["<kelvin>"]],
"<rankine>" : [["degR","rankine"], 5/9, "temperature", ["<kelvin>"]],
"<temp-K>" : [["tempK"], 1.0, "temperature", ["<temp-K>"]],
"<temp-C>" : [["tempC"], 1.0, "temperature", ["<temp-K>"]],
"<temp-F>" : [["tempF"], 5/9, "temperature", ["<temp-K>"]],
"<temp-R>" : [["tempR"], 5/9, "temperature", ["<temp-K>"]],
/* time */
"<second>": [["s","sec","second","seconds"], 1.0, "time", ["<second>"]],
"<minute>": [["min","minute","minutes"], 60.0, "time", ["<second>"]],
"<hour>": [["h","hr","hrs","hour","hours"], 3600.0, "time", ["<second>"]],
"<day>": [["d","day","days"], 3600*24, "time", ["<second>"]],
"<week>": [["wk","week","weeks"], 7*3600*24, "time", ["<second>"]],
"<fortnight>": [["fortnight","fortnights"], 1209600, "time", ["<second>"]],
"<year>": [["y","yr","year","years","annum"], 31556926, "time", ["<second>"]],
"<decade>":[["decade","decades"], 315569260, "time", ["<second>"]],
"<century>":[["century","centuries"], 3155692600, "time", ["<second>"]],
/* pressure */
"<pascal>" : [["Pa","pascal","Pascal"], 1.0, "pressure", ["<kilogram>"],["<meter>","<second>","<second>"]],
"<bar>" : [["bar","bars"], 100000, "pressure", ["<kilogram>"],["<meter>","<second>","<second>"]],
"<mmHg>" : [["mmHg"], 133.322368, "pressure", ["<kilogram>"],["<meter>","<second>","<second>"]],
"<inHg>" : [["inHg"], 3386.3881472, "pressure", ["<kilogram>"],["<meter>","<second>","<second>"]],
"<torr>" : [["torr"], 133.322368, "pressure", ["<kilogram>"],["<meter>","<second>","<second>"]],
"<atm>" : [["atm","ATM","atmosphere","atmospheres"], 101325, "pressure", ["<kilogram>"],["<meter>","<second>","<second>"]],
"<psi>" : [["psi"], 6894.76, "pressure", ["<kilogram>"],["<meter>","<second>","<second>"]],
"<cmh2o>" : [["cmH2O"], 98.0638, "pressure", ["<kilogram>"],["<meter>","<second>","<second>"]],
"<inh2o>" : [["inH2O"], 249.082052, "pressure", ["<kilogram>"],["<meter>","<second>","<second>"]],
/* viscosity */
"<poise>" : [["P","poise"], 0.1, "viscosity", ["<kilogram>"],["<meter>","<second>"] ],
"<stokes>" : [["St","stokes"], 1e-4, "viscosity", ["<meter>","<meter>"], ["<second>"]],
/* substance */
"<mole>" : [["mol","mole"], 1.0, "substance", ["<mole>"]],
/* concentration */
"<molar>" : [["M","molar"], 1000, "concentration", ["<mole>"], ["<meter>","<meter>","<meter>"]],
"<wtpercent>" : [["wt%","wtpercent"], 10, "concentration", ["<kilogram>"], ["<meter>","<meter>","<meter>"]],
/* activity */
"<katal>" : [["kat","katal","Katal"], 1.0, "activity", ["<mole>"], ["<second>"]],
"<unit>" : [["U","enzUnit"], 16.667e-16, "activity", ["<mole>"], ["<second>"]],
/* capacitance */
"<farad>" : [["F","farad","Farad"], 1.0, "capacitance", ["<farad>"]],
/* charge */
"<coulomb>" : [["C","coulomb","Coulomb"], 1.0, "charge", ["<ampere>","<second>"]],
/* current */
"<ampere>" : [["A","Ampere","ampere","amp","amps"], 1.0, "current", ["<ampere>"]],
/* conductance */
"<siemens>" : [["S","Siemens","siemens"], 1.0, "conductance", ["<second>","<second>","<second>","<ampere>","<ampere>"], ["<kilogram>","<meter>","<meter>"]],
/* inductance */
"<henry>" : [["H","Henry","henry"], 1.0, "inductance", ["<meter>","<meter>","<kilogram>"], ["<second>","<second>","<ampere>","<ampere>"]],
/* potential */
"<volt>" : [["V","Volt","volt","volts"], 1.0, "potential", ["<meter>","<meter>","<kilogram>"], ["<second>","<second>","<second>","<ampere>"]],
/* resistance */
"<ohm>" : [["Ohm","ohm"], 1.0, "resistance", ["<meter>","<meter>","<kilogram>"],["<second>","<second>","<second>","<ampere>","<ampere>"]],
/* magnetism */
"<weber>" : [["Wb","weber","webers"], 1.0, "magnetism", ["<meter>","<meter>","<kilogram>"], ["<second>","<second>","<ampere>"]],
"<tesla>" : [["T","tesla","teslas"], 1.0, "magnetism", ["<kilogram>"], ["<second>","<second>","<ampere>"]],
"<gauss>" : [["G","gauss"], 1e-4, "magnetism", ["<kilogram>"], ["<second>","<second>","<ampere>"]],
"<maxwell>" : [["Mx","maxwell","maxwells"], 1e-8, "magnetism", ["<meter>","<meter>","<kilogram>"], ["<second>","<second>","<ampere>"]],
"<oersted>" : [["Oe","oersted","oersteds"], 250.0/Math.PI, "magnetism", ["<ampere>"], ["<meter>"]],
/* energy */
"<joule>" : [["J","joule","Joule","joules"], 1.0, "energy", ["<meter>","<meter>","<kilogram>"], ["<second>","<second>"]],
"<erg>" : [["erg","ergs"], 1e-7, "energy", ["<meter>","<meter>","<kilogram>"], ["<second>","<second>"]],
"<btu>" : [["BTU","btu","BTUs"], 1055.056, "energy", ["<meter>","<meter>","<kilogram>"], ["<second>","<second>"]],
"<calorie>" : [["cal","calorie","calories"], 4.18400, "energy",["<meter>","<meter>","<kilogram>"], ["<second>","<second>"]],
"<Calorie>" : [["Cal","Calorie","Calories"], 4184.00, "energy",["<meter>","<meter>","<kilogram>"], ["<second>","<second>"]],
"<therm-US>" : [["th","therm","therms","Therm"], 105480400, "energy",["<meter>","<meter>","<kilogram>"], ["<second>","<second>"]],
/* force */
"<newton>" : [["N","Newton","newton"], 1.0, "force", ["<kilogram>","<meter>"], ["<second>","<second>"]],
"<dyne>" : [["dyn","dyne"], 1e-5, "force", ["<kilogram>","<meter>"], ["<second>","<second>"]],
"<pound-force>" : [["lbf","pound-force"], 4.448222, "force", ["<kilogram>","<meter>"], ["<second>","<second>"]],
/* frequency */
"<hertz>" : [["Hz","hertz","Hertz"], 1.0, "frequency", ["<1>"], ["<second>"]],
/* angle */
"<radian>" :[["rad","radian","radian"], 1.0, "angle", ["<radian>"]],
"<degree>" :[["deg","degree","degrees"], Math.PI / 180.0, "angle", ["<radian>"]],
"<grad>" :[["grad","gradian","grads"], Math.PI / 200.0, "angle", ["<radian>"]],
"<steradian>" : [["sr","steradian","steradians"], 1.0, "solid_angle", ["<steradian>"]],
/* rotation */
"<rotation>" : [["rotation"], 2.0*Math.PI, "angle", ["<radian>"]],
"<rpm>" :[["rpm"], 2.0*Math.PI / 60.0, "angular_velocity", ["<radian>"], ["<second>"]],
/* memory */
"<byte>" :[["B","byte"], 1.0, "memory", ["<byte>"]],
"<bit>" :[["b","bit"], 0.125, "memory", ["<byte>"]],
/* currency */
"<dollar>":[["USD","dollar"], 1.0, "currency", ["<dollar>"]],
"<cents>" :[["cents"], 0.01, "currency", ["<dollar>"]],
/* luminosity */
"<candela>" : [["cd","candela"], 1.0, "luminosity", ["<candela>"]],
"<lumen>" : [["lm","lumen"], 1.0, "luminous_power", ["<candela>","<steradian>"]],
"<lux>" :[["lux"], 1.0, "illuminance", ["<candela>","<steradian>"], ["<meter>","<meter>"]],
/* power */
"<watt>" : [["W","watt","watts"], 1.0, "power", ["<kilogram>","<meter>","<meter>"], ["<second>","<second>","<second>"]],
"<horsepower>" : [["hp","horsepower"], 745.699872, "power", ["<kilogram>","<meter>","<meter>"], ["<second>","<second>","<second>"]],
/* radiation */
"<gray>" : [["Gy","gray","grays"], 1.0, "radiation", ["<meter>","<meter>"], ["<second>","<second>"]],
"<roentgen>" : [["R","roentgen"], 0.009330, "radiation", ["<meter>","<meter>"], ["<second>","<second>"]],
"<sievert>" : [["Sv","sievert","sieverts"], 1.0, "radiation", ["<meter>","<meter>"], ["<second>","<second>"]],
"<becquerel>" : [["Bq","bequerel","bequerels"], 1.0, "radiation", ["<1>"],["<second>"]],
"<curie>" : [["Ci","curie","curies"], 3.7e10, "radiation", ["<1>"],["<second>"]],
/* rate */
"<cpm>" : [["cpm"], 1.0/60.0, "rate", ["<count>"],["<second>"]],
"<dpm>" : [["dpm"], 1.0/60.0, "rate", ["<count>"],["<second>"]],
"<bpm>" : [["bpm"], 1.0/60.0, "rate", ["<count>"],["<second>"]],
/* resolution / typography */
"<dot>" : [["dot","dots"], 1, "resolution", ["<each>"]],
"<pixel>" : [["pixel","px"], 1, "resolution", ["<each>"]],
"<ppi>" : [["ppi"], 1, "resolution", ["<pixel>"], ["<inch>"]],
"<dpi>" : [["dpi"], 1, "typography", ["<dot>"], ["<inch>"]],
/* other */
"<cell>" : [["cells","cell"], 1, "counting", ["<each>"]],
"<each>" : [["each"], 1.0, "counting", ["<each>"]],
"<count>" : [["count"], 1.0, "counting", ["<each>"]],
"<base-pair>" : [["bp"], 1.0, "counting", ["<each>"]],
"<nucleotide>" : [["nt"], 1.0, "counting", ["<each>"]],
"<molecule>" : [["molecule","molecules"], 1.0, "counting", ["<1>"]],
"<dozen>" : [["doz","dz","dozen"],12.0,"prefix_only", ["<each>"]],
"<percent>": [["%","percent"], 0.01, "prefix_only", ["<1>"]],
"<ppm>" : [["ppm"],1e-6, "prefix_only", ["<1>"]],
"<ppt>" : [["ppt"],1e-9, "prefix_only", ["<1>"]],
"<gross>" : [["gr","gross"],144.0, "prefix_only", ["<dozen>","<dozen>"]],
"<decibel>" : [["dB","decibel","decibels"], 1.0, "logarithmic", ["<decibel>"]]
};
var BASE_UNITS = ["<meter>","<kilogram>","<second>","<mole>", "<farad>", "<ampere>","<radian>","<kelvin>","<temp-K>","<byte>","<dollar>","<candela>","<each>","<steradian>","<decibel>"];
var UNITY = "<1>";
var UNITY_ARRAY= [UNITY];
var SCI_NUMBER = "([+-]?\\d*(?:\\.\\d+)?(?:[Ee][+-]?\\d+)?)";
//var SCI_NUMBER_REGEX = new RegExp(SCI_NUMBER);
var QTY_STRING = SCI_NUMBER + "\\s*([^/]*)(?:\/(.+))?";
var QTY_STRING_REGEX = new RegExp("^" + QTY_STRING + "$");
var POWER_OP = "\\^|\\*{2}";
var TOP_REGEX = new RegExp ("([^ \\*]+?)(?:" + POWER_OP + ")?(-?\\d+)");
var BOTTOM_REGEX = new RegExp("([^ \\*]+?)(?:" + POWER_OP + ")?(\\d+)");
var SIGNATURE_VECTOR = ["length", "time", "temperature", "mass", "current", "substance", "luminosity", "currency", "memory", "angle", "capacitance"];
var KINDS = {
"-312058": "resistance",
"-312038": "inductance",
"-152040": "magnetism",
"-152038": "magnetism",
"-152058": "potential",
"-39": "acceleration",
"-38": "radiation",
"-20": "frequency",
"-19": "speed",
"-18": "viscosity",
"0": "unitless",
"1": "length",
"2": "area",
"3": "volume",
"20": "time",
"400": "temperature",
"7942": "power",
"7959": "pressure",
"7962": "energy",
"7979": "viscosity",
"7981": "force",
"7997": "mass_concentration",
"8000": "mass",
"159999": "magnetism",
"160000": "current",
"160020": "charge",
"312058": "conductance",
"3199980": "activity",
"3199997": "molar_concentration",
"3200000": "substance",
"63999998": "illuminance",
"64000000": "luminous_power",
"1280000000": "currency",
"25600000000": "memory",
"511999999980": "angular_velocity",
"512000000000": "angle",
"10240000000000": "capacitance"
};
var baseUnitCache = {};
function Qty(initValue) {
this.scalar = null;
this.baseScalar = null;
this.signature = null;
this._conversionCache = {};
this.numerator = UNITY_ARRAY;
this.denominator = UNITY_ARRAY;
if(initValue.constructor === String) {
initValue = initValue.trim();
parse.call(this, initValue);
}
else {
this.scalar = initValue.scalar;
this.numerator = (initValue.numerator && initValue.numerator.length !== 0)? initValue.numerator : UNITY_ARRAY;
this.denominator = (initValue.denominator && initValue.denominator.length !== 0)? initValue.denominator : UNITY_ARRAY;
}
// math with temperatures is very limited
if(this.denominator.join("*").indexOf("temp") >= 0) {
throw new QtyError("Cannot divide with temperatures");
}
if(this.numerator.join("*").indexOf("temp") >= 0) {
if(this.numerator.length > 1) {
throw new QtyError("Cannot multiply by temperatures");
}
if(!compareArray(this.denominator, UNITY_ARRAY)) {
throw new QtyError("Cannot divide with temperatures");
}
}
this.initValue = initValue;
updateBaseScalar.call(this);
if(this.isTemperature() && this.baseScalar < 0) {
throw new QtyError("Temperatures must not be less than absolute zero");
}
}
/**
* Parses a string as a quantity
* @param {string} value - quantity as text
* @throws if value is not a string
* @returns {Qty|null} Parsed quantity or null if unrecognized
*/
Qty.parse = function parse(value) {
if(typeof value !== "string" && !(value instanceof String)) {
throw new QtyError("Argument should be a string");
}
try {
return new Qty(value);
}
catch(e) {
return null;
}
};
/**
* Configures and returns a fast function to convert
* Number values from units to others.
* Useful to efficiently convert large array of values
* with same units into others with iterative methods.
* Does not take care of rounding issues.
*
* @param {string} srcUnits Units of values to convert
* @param {string} dstUnits Units to convert to
*
* @returns {Function} Converting function accepting Number value
* and returning converted value
*
* @throws "Incompatible units" if units are incompatible
*
* @example
* // Converting large array of numbers with the same units
* // into other units
* var converter = Qty.swiftConverter("m/h", "ft/s");
* var convertedSerie = largeSerie.map(converter);
*
*/
Qty.swiftConverter = function swiftConverter(srcUnits, dstUnits) {
var srcQty = new Qty(srcUnits);
var dstQty = new Qty(dstUnits);
if(srcQty.eq(dstQty)) {
return identity;
}
if(!srcQty.isTemperature()) {
return function(value) {
return value * srcQty.baseScalar / dstQty.baseScalar;
};
}
else {
return function(value) {
// TODO Not optimized
return srcQty.mul(value).to(dstQty).scalar;
};
}
};
var updateBaseScalar = function () {
if(this.baseScalar) {
return this.baseScalar;
}
if(this.isBase()) {
this.baseScalar = this.scalar;
this.signature = unitSignature.call(this);
}
else {
var base = this.toBase();
this.baseScalar = base.scalar;
this.signature = base.signature;
}
};
/*
calculates the unit signature id for use in comparing compatible units and simplification
the signature is based on a simple classification of units and is based on the following publication
Novak, G.S., Jr. "Conversion of units of measurement", IEEE Transactions on Software Engineering,
21(8), Aug 1995, pp.651-661
doi://10.1109/32.403789
http://ieeexplore.ieee.org/Xplore/login.jsp?url=/iel1/32/9079/00403789.pdf?isnumber=9079&prod=JNL&arnumber=403789&arSt=651&ared=661&arAuthor=Novak%2C+G.S.%2C+Jr.
*/
var unitSignature = function () {
if(this.signature) {
return this.signature;
}
var vector = unitSignatureVector.call(this);
for(var i = 0; i < vector.length; i++) {
vector[i] *= Math.pow(20, i);
}
return vector.reduce(function(previous, current) {return previous + current;}, 0);
};
// calculates the unit signature vector used by unit_signature
var unitSignatureVector = function () {
if(!this.isBase()) {
return unitSignatureVector.call(this.toBase());
}
var vector = new Array(SIGNATURE_VECTOR.length);
for(var i = 0; i < vector.length; i++) {
vector[i] = 0;
}
var r, n;
for(var j = 0; j < this.numerator.length; j++) {
if((r = UNITS[this.numerator[j]])) {
n = SIGNATURE_VECTOR.indexOf(r[2]);
if(n >= 0) {
vector[n] = vector[n] + 1;
}
}
}
for(var k = 0; k < this.denominator.length; k++) {
if((r = UNITS[this.denominator[k]])) {
n = SIGNATURE_VECTOR.indexOf(r[2]);
if(n >= 0) {
vector[n] = vector[n] - 1;
}
}
}
return vector;
};
/* parse a string into a unit object.
* Typical formats like :
* "5.6 kg*m/s^2"
* "5.6 kg*m*s^-2"
* "5.6 kilogram*meter*second^-2"
* "2.2 kPa"
* "37 degC"
* "1" -- creates a unitless constant with value 1
* "GPa" -- creates a unit with scalar 1 with units 'GPa'
* 6'4" -- recognized as 6 feet + 4 inches
* 8 lbs 8 oz -- recognized as 8 lbs + 8 ounces
*/
var parse = function (val) {
var result = QTY_STRING_REGEX.exec(val);
if(!result) {
throw new QtyError(val + ": Quantity not recognized");
}
this.scalar = result[1] ? parseFloat(result[1]) : 1.0;
var top = result[2];
var bottom = result[3];
var n, x, nx;
// TODO DRY me
while((result = TOP_REGEX.exec(top))) {
n = parseFloat(result[2]);
if(isNaN(n)) {
// Prevents infinite loops
throw new QtyError("Unit exponent is not a number");
}
// Disallow unrecognized unit even if exponent is 0
if(n === 0 && !UNIT_TEST_REGEX.test(result[1])) {
throw new QtyError("Unit not recognized");
}
x = result[1] + " ";
nx = "";
for(var i = 0; i < Math.abs(n) ; i++) {
nx += x;
}
if(n >= 0) {
top = top.replace(result[0], nx);
}
else {
bottom = bottom ? bottom + nx : nx;
top = top.replace(result[0], "");
}
}
while((result = BOTTOM_REGEX.exec(bottom))) {
n = parseFloat(result[2]);
if(isNaN(n)) {
// Prevents infinite loops
throw new QtyError("Unit exponent is not a number");
}
// Disallow unrecognized unit even if exponent is 0
if(n === 0 && !UNIT_TEST_REGEX.test(result[1])) {
throw new QtyError("Unit not recognized");
}
x = result[1] + " ";
nx = "";
for(var j = 0; j < n ; j++) {
nx += x;
}
bottom = bottom.replace(result[0], nx, "g");
}
if(top) {
this.numerator = parseUnits(top.trim());
}
if(bottom) {
this.denominator = parseUnits(bottom.trim());
}
};
/*
* Throws incompatible units error
*
* @throws "Incompatible units" error
*/
function throwIncompatibleUnits() {
throw new QtyError("Incompatible units");
}
Qty.prototype = {
// Properly set up constructor
constructor: Qty,
// Converts the unit back to a float if it is unitless. Otherwise raises an exception
toFloat: function() {
if(this.isUnitless()) {
return this.scalar;
}
throw new QtyError("Can't convert to Float unless unitless. Use Unit#scalar");
},
// returns true if no associated units
// false, even if the units are "unitless" like 'radians, each, etc'
isUnitless: function() {
return compareArray(this.numerator, UNITY_ARRAY) && compareArray(this.denominator, UNITY_ARRAY);
},
/*
check to see if units are compatible, but not the scalar part
this check is done by comparing signatures for performance reasons
if passed a string, it will create a unit object with the string and then do the comparison
this permits a syntax like:
unit =~ "mm"
if you want to do a regexp on the unit string do this ...
unit.units =~ /regexp/
*/
isCompatible: function(other) {
if(other && other.constructor === String) {
return this.isCompatible(new Qty(other));
}
if(!(other instanceof Qty)) {
return false;
}
if(other.signature !== undefined) {
return this.signature === other.signature;
}
else {
return false;
}
},
/*
check to see if units are inverse of each other, but not the scalar part
this check is done by comparing signatures for performance reasons
if passed a string, it will create a unit object with the string and then do the comparison
this permits a syntax like:
unit =~ "mm"
if you want to do a regexp on the unit string do this ...
unit.units =~ /regexp/
*/
isInverse: function(other) {
return this.inverse().isCompatible(other);
},
kind: function() {
return KINDS[this.signature.toString()];
},
// Returns 'true' if the Unit is represented in base units
isBase: function() {
if(this._isBase !== undefined) {
return this._isBase;
}
if(this.isDegrees() && this.numerator[0].match(/<(kelvin|temp-K)>/)) {
this._isBase = true;
return this._isBase;
}
this.numerator.concat(this.denominator).forEach(function(item) {
if(item !== UNITY && BASE_UNITS.indexOf(item) === -1 ) {
this._isBase = false;
}
}, this);
if(this._isBase === false) {
return this._isBase;
}
this._isBase = true;
return this._isBase;
},
// convert to base SI units
// results of the conversion are cached so subsequent calls to this will be fast
toBase: function() {
if(this.isBase()) {
return this;
}
if(this.isTemperature()) {
return toTempK(this);
}
var cached = baseUnitCache[this.units()];
if(!cached) {
cached = toBaseUnits(this.numerator,this.denominator);
baseUnitCache[this.units()] = cached;
}
return cached.mul(this.scalar);
},
// returns the 'unit' part of the Unit object without the scalar
units: function() {
if(this._units !== undefined) {
return this._units;
}
var numIsUnity = compareArray(this.numerator, UNITY_ARRAY),
denIsUnity = compareArray(this.denominator, UNITY_ARRAY);
if(numIsUnity && denIsUnity) {
this._units = "";
return this._units;
}
var numUnits = stringifyUnits(this.numerator),
denUnits = stringifyUnits(this.denominator);
this._units = numUnits + (denIsUnity ? "":("/" + denUnits));
return this._units;
},
eq: function(other) {
return this.compareTo(other) === 0;
},
lt: function(other) {
return this.compareTo(other) === -1;
},
lte: function(other) {
return this.eq(other) || this.lt(other);
},
gt: function(other) {
return this.compareTo(other) === 1;
},
gte: function(other) {
return this.eq(other) || this.gt(other);
},
/**
* Returns the nearest multiple of quantity passed as
* precision
*
* @param {(Qty|string|number)} prec-quantity - Quantity, string formated
* quantity or number as expected precision
*
* @returns {Qty} Nearest multiple of precQuantity
*
* @example
* new Qty('5.5 ft').toPrec('2 ft'); // returns 6 ft
* new Qty('0.8 cu').toPrec('0.25 cu'); // returns 0.75 cu
* new Qty('6.3782 m').toPrec('cm'); // returns 6.38 m
* new Qty('1.146 MPa').toPrec('0.1 bar'); // returns 1.15 MPa
*
*/
toPrec: function(precQuantity) {
if(precQuantity && precQuantity.constructor === String) {
precQuantity = new Qty(precQuantity);
}
if(typeof precQuantity === "number") {
precQuantity = new Qty(precQuantity + " " + this.units());
}
if(!this.isUnitless()) {
precQuantity = precQuantity.to(this.units());
}
else if(!precQuantity.isUnitless()) {
throwIncompatibleUnits();
}
if(precQuantity.scalar === 0) {
throw new QtyError("Divide by zero");
}
var precRoundedResult = mulSafe(Math.round(this.scalar/precQuantity.scalar),
precQuantity.scalar);
return new Qty(precRoundedResult + this.units());
},
/**
* Stringifies the quantity
*
* @param {(number|string|Qty)} targetUnitsOrMaxDecimalsOrPrec -
* target units if string,
* max number of decimals if number,
* passed to #toPrec before converting if Qty
*
* @param {number=} maxDecimals - Maximum number of decimals of
* formatted output
*
* @returns {string} reparseable quantity as string
*/
toString: function(targetUnitsOrMaxDecimalsOrPrec, maxDecimals) {
var targetUnits;
if(typeof targetUnitsOrMaxDecimalsOrPrec === "number") {
targetUnits = this.units();
maxDecimals = targetUnitsOrMaxDecimalsOrPrec;
}
else if(typeof targetUnitsOrMaxDecimalsOrPrec === "string") {
targetUnits = targetUnitsOrMaxDecimalsOrPrec;
}
else if(targetUnitsOrMaxDecimalsOrPrec instanceof Qty) {
return this.toPrec(targetUnitsOrMaxDecimalsOrPrec).toString(maxDecimals);
}
var out = this.to(targetUnits);
var outScalar = maxDecimals !== undefined ? round(out.scalar, maxDecimals) : out.scalar;
out = (outScalar + " " + out.units()).trim();
return out;
},
// Compare two Qty objects. Throws an exception if they are not of compatible types.
// Comparisons are done based on the value of the quantity in base SI units.
//
// NOTE: We cannot compare inverses as that breaks the general compareTo contract:
// if a.compareTo(b) < 0 then b.compareTo(a) > 0
// if a.compareTo(b) == 0 then b.compareTo(a) == 0
//
// Since "10S" == ".1ohm" (10 > .1) and "10ohm" == ".1S" (10 > .1)
// new Qty("10S").inverse().compareTo("10ohm") == -1
// new Qty("10ohm").inverse().compareTo("10S") == -1
//
// If including inverses in the sort is needed, I suggest writing: Qty.sort(qtyArray,units)
compareTo: function(other) {
if(other && other.constructor === String) {
return this.compareTo(new Qty(other));
}
if(!this.isCompatible(other)) {
throwIncompatibleUnits();
}
if(this.baseScalar < other.baseScalar) {
return -1;
}
else if(this.baseScalar === other.baseScalar) {
return 0;
}
else if(this.baseScalar > other.baseScalar) {
return 1;
}
},
// Return true if quantities and units match
// Unit("100 cm").same(Unit("100 cm")) # => true
// Unit("100 cm").same(Unit("1 m")) # => false
same: function(other) {
return (this.scalar === other.scalar) && (this.units() === other.units());
},
// Returns a Qty that is the inverse of this Qty,
inverse: function() {
if(this.isTemperature()) {
throw new QtyError("Cannot divide with temperatures");
}
if(this.scalar === 0) {
throw new QtyError("Divide by zero");
}
return new Qty({"scalar": 1/this.scalar, "numerator": this.denominator, "denominator": this.numerator});
},
isDegrees: function() {
// signature may not have been calculated yet
return (this.signature === null || this.signature === 400) &&
this.numerator.length === 1 &&
compareArray(this.denominator, UNITY_ARRAY) &&
(this.numerator[0].match(/<temp-[CFRK]>/) || this.numerator[0].match(/<(kelvin|celsius|rankine|fahrenheit)>/));
},
isTemperature: function() {
return this.isDegrees() && this.numerator[0].match(/<temp-[CFRK]>/);
},
/**
* Converts to other compatible units.
* Instance's converted quantities are cached for faster subsequent calls.
*
* @param {(string|Qty)} other - Target units as string or retrieved from
* other Qty instance (scalar is ignored)
*
* @returns {Qty} New converted Qty instance with target units
*
* @throws {QtyError} if target units are incompatible
*
* @example
* var weight = new Qty("25 kg");
* weight.to("lb"); // => new Qty("55.11556554621939 lbs");
* weight.to(new Qty("3 g")); // => new Qty("25000 g"); // scalar of passed Qty is ignored
*/
to: function(other) {
var cached, target;
if(!other) {
return this;
}
if(other.constructor !== String) {
return this.to(other.units());
}
cached = this._conversionCache[other];
if(cached) {
return cached;
}
// Instantiating target to normalize units
target = new Qty(other);
if(target.units() === this.units()) {
return this;
}
if(!this.isCompatible(target)) {
if(this.isInverse(target)) {
target = this.inverse().to(other);
}
else {
throwIncompatibleUnits();
}
}
else {
if(target.isTemperature()) {
target = toTemp(this,target);
}
else if(target.isDegrees()) {
target = toDegrees(this,target);
}
else {
var q = divSafe(this.baseScalar, target.baseScalar);
target = new Qty({"scalar": q, "numerator": target.numerator, "denominator": target.denominator});
}
}
this._conversionCache[other] = target;
return target;
},
// Quantity operators
// Returns new instance with this units
add: function(other) {
if(other && other.constructor === String) {
other = new Qty(other);
}
if(!this.isCompatible(other)) {
throwIncompatibleUnits();
}
if(this.isTemperature() && other.isTemperature()) {
throw new QtyError("Cannot add two temperatures");
}
else if(this.isTemperature()) {
return addTempDegrees(this,other);
}
else if(other.isTemperature()) {
return addTempDegrees(other,this);
}
return new Qty({"scalar": this.scalar + other.to(this).scalar, "numerator": this.numerator, "denominator": this.denominator});
},
sub: function(other) {
if(other && other.constructor === String) {
other = new Qty(other);
}
if(!this.isCompatible(other)) {
throwIncompatibleUnits();
}
if(this.isTemperature() && other.isTemperature()) {
return subtractTemperatures(this,other);
}
else if(this.isTemperature()) {
return subtractTempDegrees(this,other);
}
else if(other.isTemperature()) {
throw new QtyError("Cannot subtract a temperature from a differential degree unit");
}
return new Qty({"scalar": this.scalar - other.to(this).scalar, "numerator": this.numerator, "denominator": this.denominator});
},
mul: function(other) {
if(typeof other === "number") {
return new Qty({"scalar": mulSafe(this.scalar, other), "numerator": this.numerator, "denominator": this.denominator});
}
else if(other && other.constructor === String) {
other = new Qty(other);
}
if((this.isTemperature()||other.isTemperature()) && !(this.isUnitless()||other.isUnitless())) {
throw new QtyError("Cannot multiply by temperatures");
}
// Quantities should be multiplied with same units if compatible, with base units else
var op1 = this;
var op2 = other;
// so as not to confuse results, multiplication and division between temperature degrees will maintain original unit info in num/den
// multiplication and division between deg[CFRK] can never factor each other out, only themselves: "degK*degC/degC^2" == "degK/degC"
if(op1.isCompatible(op2) && op1.signature !== 400) {
op2 = op2.to(op1);
}
var numden = cleanTerms(op1.numerator.concat(op2.numerator), op1.denominator.concat(op2.denominator));
return new Qty({"scalar": mulSafe(op1.scalar, op2.scalar) , "numerator": numden[0], "denominator": numden[1]});
},
div: function(other) {
if(typeof other === "number") {
if(other === 0) {
throw new QtyError("Divide by zero");
}
return new Qty({"scalar": this.scalar / other, "numerator": this.numerator, "denominator": this.denominator});
}
else if(other && other.constructor === String) {
other = new Qty(other);
}
if(other.scalar === 0) {
throw new QtyError("Divide by zero");
}
if(other.isTemperature()) {
throw new QtyError("Cannot divide with temperatures");
}
else if(this.isTemperature() && !other.isUnitless()) {
throw new QtyError("Cannot divide with temperatures");
}
// Quantities should be multiplied with same units if compatible, with base units else
var op1 = this;
var op2 = other;
// so as not to confuse results, multiplication and division between temperature degrees will maintain original unit info in num/den
// multiplication and division between deg[CFRK] can never factor each other out, only themselves: "degK*degC/degC^2" == "degK/degC"
if(op1.isCompatible(op2) && op1.signature !== 400) {
op2 = op2.to(op1);
}
var numden = cleanTerms(op1.numerator.concat(op2.denominator), op1.denominator.concat(op2.numerator));
return new Qty({"scalar": op1.scalar / op2.scalar, "numerator": numden[0], "denominator": numden[1]});
}
};
function toBaseUnits (numerator,denominator) {
var num = [];
var den = [];
var q = 1;
var unit;
for(var i = 0; i < numerator.length; i++) {
unit = numerator[i];
if(PREFIX_VALUES[unit]) {
// workaround to fix
// 0.1 * 0.1 => 0.010000000000000002
q = mulSafe(q, PREFIX_VALUES[unit]);
}
else {
if(UNIT_VALUES[unit]) {
q *= UNIT_VALUES[unit].scalar;
if(UNIT_VALUES[unit].numerator) {
num.push(UNIT_VALUES[unit].numerator);
}
if(UNIT_VALUES[unit].denominator) {
den.push(UNIT_VALUES[unit].denominator);
}
}
}
}
for(var j = 0; j < denominator.length; j++) {
unit = denominator[j];
if(PREFIX_VALUES[unit]) {
q /= PREFIX_VALUES[unit];
}
else {
if(UNIT_VALUES[unit]) {
q /= UNIT_VALUES[unit].scalar;
if(UNIT_VALUES[unit].numerator) {
den.push(UNIT_VALUES[unit].numerator);
}
if(UNIT_VALUES[unit].denominator) {
num.push(UNIT_VALUES[unit].denominator);
}
}
}
}
// Flatten
num = num.reduce(function(a,b) {
return a.concat(b);
}, []);
den = den.reduce(function(a,b) {
return a.concat(b);
}, []);
return new Qty({"scalar": q, "numerator": num, "denominator": den});
}
var parsedUnitsCache = {};
/**
* Parses and converts units string to normalized unit array.
* Result is cached to speed up next calls.
*
* @param {string} units Units string
* @returns {string[]} Array of normalized units
*
* @example
* // Returns ["<second>", "<meter>", "<second>"]
* parseUnits("s m s");
*
*/
function parseUnits(units) {
var cached = parsedUnitsCache[units];
if(cached) {
return cached;
}
var unitMatch, normalizedUnits = [];
// Scan
if(!UNIT_TEST_REGEX.test(units)) {
throw new QtyError("Unit not recognized");
}
while((unitMatch = UNIT_MATCH_REGEX.exec(units))) {
normalizedUnits.push(unitMatch.slice(1));
}
normalizedUnits = normalizedUnits.map(function(item) {
return PREFIX_MAP[item[0]] ? [PREFIX_MAP[item[0]], UNIT_MAP[item[1]]] : [UNIT_MAP[item[1]]];
});
// Flatten and remove null elements
normalizedUnits = normalizedUnits.reduce(function(a,b) {
return a.concat(b);
}, []);
normalizedUnits = normalizedUnits.filter(function(item) {
return item;
});
parsedUnitsCache[units] = normalizedUnits;
return normalizedUnits;
}
function NestedMap() {}
NestedMap.prototype.get = function(keys) {
// Allows to pass key1, key2, ... instead of [key1, key2, ...]
if(arguments.length > 1) {
// Slower with Firefox but faster with Chrome than
// Array.prototype.slice.call(arguments)
// See http://jsperf.com/array-apply-versus-array-prototype-slice-call
keys = Array.apply(null, arguments);
}
return keys.reduce(function(map, key, index) {
if (map) {
var childMap = map[key];
if (index === keys.length - 1) {
return childMap ? childMap.data : undefined;
}
else {
return childMap;
}
}
},
this);
};
NestedMap.prototype.set = function(keys, value) {
if(arguments.length > 2) {
keys = Array.prototype.slice.call(arguments, 0, -1);
value = arguments[arguments.length - 1];
}
return keys.reduce(function(map, key, index) {
var childMap = map[key];
if (childMap === undefined) {
childMap = map[key] = {};
}
if (index === keys.length - 1) {
childMap.data = value;
return value;
}
else {
return childMap;
}
},
this);
};
var stringifiedUnitsCache = new NestedMap();
/**
* Returns a string representing a normalized unit array
*
* @param {string[]} units Normalized unit array
* @returns {string} String representing passed normalized unit array and
* suitable for output
*
*/
function stringifyUnits(units) {
var stringified = stringifiedUnitsCache.get(units);
if(stringified) {
return stringified;
}
var isUnity = compareArray(units, UNITY_ARRAY);
if(isUnity) {
stringified = "1";
}
else {
stringified = simplify(getOutputNames(units)).join("*");
}
// Cache result
stringifiedUnitsCache.set(units, stringified);
return stringified;
}
function getOutputNames(units) {
var unitNames = [], token, tokenNext;
for(var i = 0; i < units.length; i++) {
token = units[i];
tokenNext = units[i+1];
if(PREFIX_VALUES[token]) {
unitNames.push(OUTPUT_MAP[token] + OUTPUT_MAP[tokenNext]);
i++;
}
else {
unitNames.push(OUTPUT_MAP[token]);
}
}
return unitNames;
}
function simplify (units) {
// this turns ['s','m','s'] into ['s2','m']
var unitCounts = units.reduce(function(acc, unit) {
var unitCounter = acc[unit];
if(!unitCounter) {
acc.push(unitCounter = acc[unit] = [unit, 0]);
}
unitCounter[1]++;
return acc;
}, []);
return unitCounts.map(function(unitCount) {
return unitCount[0] + (unitCount[1] > 1 ? unitCount[1] : "");
});
}
function compareArray(array1, array2) {
if (array2.length !== array1.length) {
return false;
}
for (var i = 0; i < array1.length; i++) {
if (array2[i].compareArray) {
if (!array2[i].compareArray(array1[i])) {
return false;
}
}
if (array2[i] !== array1[i]) {
return false;
}
}
return true;
}
function round(val, decimals) {
return Math.round(val*Math.pow(10, decimals))/Math.pow(10, decimals);
}
var numRegex = /^-?(\d+)(?:\.(\d+))?$/;
var expRegex = /^-?(\d+)e-?(\d+)$/;
function subtractTemperatures(lhs,rhs) {
var lhsUnits = lhs.units();
var rhsConverted = rhs.to(lhsUnits);
var dstDegrees = new Qty(getDegreeUnits(lhsUnits));
return new Qty({"scalar": lhs.scalar - rhsConverted.scalar, "numerator": dstDegrees.numerator, "denominator": dstDegrees.denominator});
}
function subtractTempDegrees(temp,deg) {
var tempDegrees = deg.to(getDegreeUnits(temp.units()));
return new Qty({"scalar": temp.scalar - tempDegrees.scalar, "numerator": temp.numerator, "denominator": temp.denominator});
}
function addTempDegrees(temp,deg) {
var tempDegrees = deg.to(getDegreeUnits(temp.units()));
return new Qty({"scalar": temp.scalar + tempDegrees.scalar, "numerator": temp.numerator, "denominator": temp.denominator});
}
function getDegreeUnits(units) {
if(units === "tempK") {
return "degK";
}
else if(units === "tempC") {
return "degC";
}
else if(units === "tempF") {
return "degF";
}
else if(units === "tempR") {
return "degR";
}
else {
throw new QtyError("Unknown type for temp conversion from: " + units);
}
}
function toDegrees(src,dst) {
var srcDegK = toDegK(src);
var dstUnits = dst.units();
var dstScalar;
if(dstUnits === "degK") {
dstScalar = srcDegK.scalar;
}
else if(dstUnits === "degC") {
dstScalar = srcDegK.scalar ;
}
else if(dstUnits === "degF") {
dstScalar = srcDegK.scalar * 9/5;
}
else if(dstUnits === "degR") {
dstScalar = srcDegK.scalar * 9/5;
}
else {
throw new QtyError("Unknown type for degree conversion to: " + dstUnits);
}
return new Qty({"scalar": dstScalar, "numerator": dst.numerator, "denominator": dst.denominator});
}
function toDegK(qty) {
var units = qty.units();
var q;
if(units.match(/(deg)[CFRK]/)) {
q = qty.baseScalar;
}
else if(units === "tempK") {
q = qty.scalar;
}
else if(units === "tempC") {
q = qty.scalar;
}
else if(units === "tempF") {
q = qty.scalar * 5/9;
}
else if(units === "tempR") {
q = qty.scalar * 5/9;
}
else {
throw new QtyError("Unknown type for temp conversion from: " + units);
}
return new Qty({"scalar": q, "numerator": ["<kelvin>"], "denominator": UNITY_ARRAY});
}
function toTemp(src,dst) {
var dstUnits = dst.units();
var dstScalar;
if(dstUnits === "tempK") {
dstScalar = src.baseScalar;
}
else if(dstUnits === "tempC") {
dstScalar = src.baseScalar - 273.15;
}
else if(dstUnits === "tempF") {
dstScalar = (src.baseScalar * 9/5) - 459.67;
}
else if(dstUnits === "tempR") {
dstScalar = src.baseScalar * 9/5;
}
else {
throw new QtyError("Unknown type for temp conversion to: " + dstUnits);
}
return new Qty({"scalar": dstScalar, "numerator": dst.numerator, "denominator": dst.denominator});
}
function toTempK(qty) {
var units = qty.units();
var q;
if(units.match(/(deg)[CFRK]/)) {
q = qty.baseScalar;
}
else if(units === "tempK") {
q = qty.scalar;
}
else if(units === "tempC") {
q = qty.scalar + 273.15;
}
else if(units === "tempF") {
q = (qty.scalar + 459.67) * 5/9;
}
else if(units === "tempR") {
q = qty.scalar * 5/9;
}
else {
throw new QtyError("Unknown type for temp conversion from: " + units);
}
return new Qty({"scalar": q, "numerator": ["<temp-K>"], "denominator": UNITY_ARRAY});
}
/**
* Safely multiplies numbers while avoiding floating errors
* like 0.1 * 0.1 => 0.010000000000000002
*
* @returns {number} result
* @param {...number} number
*/
function mulSafe() {
var result = 1, decimals = 0;
for(var i = 0; i < arguments.length; i++) {
var arg = arguments[i];
decimals = decimals + getFractional(arg);
result *= arg;
}
return decimals !== 0 ? round(result, decimals) : result;
}
/**
* Safely divides two numbers while avoiding floating errors
* like 0.3 / 0.05 => 5.999999999999999
*
* @returns {number} result
* @param {number} num Numerator
* @param {number} den Denominator
*/
function divSafe(num, den) {
if(den === 0) {
throw new QtyError("Divide by zero");
}
var factor = Math.pow(10, getFractional(den));
var invDen = factor/(factor*den);
return mulSafe(num, invDen);
}
function getFractional(num) {
var fractional, match;
if((match = numRegex.exec(num)) && match[2]) {
fractional = match[2].length;
}
else if((match = expRegex.exec(num))) {
fractional = parseInt(match[2], 10);
}
// arg could be Infinities
return fractional || 0;
}
Qty.mulSafe = mulSafe;
Qty.divSafe = divSafe;
function cleanTerms(num, den) {
num = num.filter(function(val) {return val !== UNITY;});
den = den.filter(function(val) {return val !== UNITY;});
var combined = {};
var k;
for(var i = 0; i < num.length; i++) {
if(PREFIX_VALUES[num[i]]) {
k = [num[i], num[i+1]];
i++;
}
else {
k = num[i];
}
if(k && k !== UNITY) {
if(combined[k]) {
combined[k][0]++;
}
else {
combined[k] = [1, k];
}
}
}
for(var j = 0; j < den.length; j++) {
if(PREFIX_VALUES[den[j]]) {
k = [den[j], den[j+1]];
j++;
}
else {
k = den[j];
}
if(k && k !== UNITY) {
if(combined[k]) {
combined[k][0]--;
}
else {
combined[k] = [-1, k];
}
}
}
num = [];
den = [];
for(var prop in combined) {
if(combined.hasOwnProperty(prop)) {
var item = combined[prop];
var n;
if(item[0] > 0) {
for(n = 0; n < item[0]; n++) {
num.push(item[1]);
}
}
else if(item[0] < 0) {
for(n = 0; n < -item[0]; n++) {
den.push(item[1]);
}
}
}
}
if(num.length === 0) {
num = UNITY_ARRAY;
}
if(den.length === 0) {
den = UNITY_ARRAY;
}
// Flatten
num = num.reduce(function(a,b) {
return a.concat(b);
}, []);
den = den.reduce(function(a,b) {
return a.concat(b);
}, []);
return [num, den];
}
/*
* Identity function
*/
function identity(value) {
return value;
}
// Setup
var PREFIX_VALUES = {};
var PREFIX_MAP = {};
var UNIT_VALUES = {};
var UNIT_MAP = {};
var OUTPUT_MAP = {};
for(var unitDef in UNITS) {
if(UNITS.hasOwnProperty(unitDef)) {
var definition = UNITS[unitDef];
if(definition[2] === "prefix") {
PREFIX_VALUES[unitDef] = definition[1];
for(var i = 0; i < definition[0].length; i++) {
PREFIX_MAP[definition[0][i]] = unitDef;
}
}
else {
UNIT_VALUES[unitDef] = {
scalar: definition[1],
numerator: definition[3],
denominator: definition[4]
};
for(var j = 0; j < definition[0].length; j++) {
UNIT_MAP[definition[0][j]] = unitDef;
}
}
OUTPUT_MAP[unitDef] = definition[0][0];
}
}
var PREFIX_REGEX = Object.keys(PREFIX_MAP).sort(function(a, b) {
return b.length - a.length;
}).join("|");
var UNIT_REGEX = Object.keys(UNIT_MAP).sort(function(a, b) {
return b.length - a.length;
}).join("|");
var UNIT_MATCH = "(" + PREFIX_REGEX + ")*?(" + UNIT_REGEX + ")\\b";
var UNIT_MATCH_REGEX = new RegExp(UNIT_MATCH, "g"); // g flag for multiple occurences
var UNIT_TEST_REGEX = new RegExp("^\\s*(" + UNIT_MATCH + "\\s*\\*?\\s*)+$");
/**
* Custom error type definition
* @constructor
*/
function QtyError() {
var err;
if(!this) { // Allows to instantiate QtyError without new()
err = Object.create(QtyError.prototype);
QtyError.apply(err, arguments);
return err;
}
err = Error.apply(this, arguments);
this.name = "QtyError";
this.message = err.message;
this.stack = err.stack;
}
QtyError.prototype = Object.create(Error.prototype, {constructor: { value: QtyError }});
Qty.Error = QtyError;
return Qty;
}));
| mit |
flangelo/guacamole-client | guacamole/src/main/webapp/app/client/directives/guacThumbnail.js | 5520 | /*
* Copyright (C) 2014 Glyptodon LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/**
* A directive for displaying a Guacamole client as a non-interactive
* thumbnail.
*/
angular.module('client').directive('guacThumbnail', [function guacThumbnail() {
return {
// Element only
restrict: 'E',
replace: true,
scope: {
/**
* The client to display within this guacThumbnail directive.
*
* @type ManagedClient
*/
client : '='
},
templateUrl: 'app/client/templates/guacThumbnail.html',
controller: ['$scope', '$injector', '$element', function guacThumbnailController($scope, $injector, $element) {
// Required services
var $window = $injector.get('$window');
/**
* The optimal thumbnail width, in pixels.
*
* @type Number
*/
var THUMBNAIL_WIDTH = 320;
/**
* The optimal thumbnail height, in pixels.
*
* @type Number
*/
var THUMBNAIL_HEIGHT = 240;
/**
* The display of the current Guacamole client instance.
*
* @type Guacamole.Display
*/
var display = null;
/**
* The element associated with the display of the current
* Guacamole client instance.
*
* @type Element
*/
var displayElement = null;
/**
* The element which must contain the Guacamole display element.
*
* @type Element
*/
var displayContainer = $element.find('.display')[0];
/**
* The main containing element for the entire directive.
*
* @type Element
*/
var main = $element[0];
/**
* Updates the scale of the attached Guacamole.Client based on current window
* size and "auto-fit" setting.
*/
$scope.updateDisplayScale = function updateDisplayScale() {
if (!display) return;
// Fit within available area
display.scale(Math.min(
main.offsetWidth / Math.max(display.getWidth(), 1),
main.offsetHeight / Math.max(display.getHeight(), 1)
));
};
// Attach any given managed client
$scope.$watch('client', function attachManagedClient(managedClient) {
// Remove any existing display
displayContainer.innerHTML = "";
// Only proceed if a client is given
if (!managedClient)
return;
// Get Guacamole client instance
var client = managedClient.client;
// Attach possibly new display
display = client.getDisplay();
// Add display element
displayElement = display.getElement();
displayContainer.appendChild(displayElement);
});
// Update scale when display is resized
$scope.$watch('client.managedDisplay.size', function setDisplaySize(size) {
var width;
var height;
// If no display size yet, assume optimal thumbnail size
if (!size || size.width === 0 || size.height === 0) {
width = THUMBNAIL_WIDTH;
height = THUMBNAIL_HEIGHT;
}
// Otherwise, generate size that fits within thumbnail bounds
else {
var scale = Math.min(THUMBNAIL_WIDTH / size.width, THUMBNAIL_HEIGHT / size.height, 1);
width = size.width * scale;
height = size.height * scale;
}
// Generate dummy background image
var thumbnail = document.createElement("canvas");
thumbnail.width = width;
thumbnail.height = height;
$scope.thumbnail = thumbnail.toDataURL("image/png");
// Init display scale
$scope.$evalAsync($scope.updateDisplayScale);
});
}]
};
}]); | mit |
andycochran/foundation-sites | test/javascript/components/accordion.js | 5611 | describe('Accordion', function() {
var plugin;
var $html;
var template = `<ul class="accordion" data-accordion>
<li class="accordion-item is-active" data-accordion-item>
<a href="#" class="accordion-title">Accordion 1</a>
<div class="accordion-content" data-tab-content >
<p>Panel 1. Lorem ipsum dolor</p>
<a href="#">Nowhere to Go</a>
</div>
</li>
<li class="accordion-item" data-accordion-item>
<a href="#" class="accordion-title">Accordion 2</a>
<div class="accordion-content" data-tab-content>
<textarea></textarea>
<button class="button">I do nothing!</button>
</div>
</li>
<li class="accordion-item" data-accordion-item>
<a href="#" class="accordion-title">Accordion 3</a>
<div class="accordion-content" data-tab-content>
Pick a date!
<input type="date"></input>
</div>
</li>
</ul>`;
afterEach(function() {
plugin.destroy();
$html.remove();
});
describe('constructor()', function() {
it('stores the element and plugin options', function() {
$html = $(template).appendTo('body');
plugin = new Foundation.Accordion($html, {});
plugin.$element.should.be.an('object');
plugin.options.should.be.an('object');
});
});
describe('up()', function(done) {
it('closes the targeted container if allowAllClosed is true', function() {
$html = $(template).appendTo('body');
plugin = new Foundation.Accordion($html, {allowAllClosed: true});
plugin.up($html.find('.accordion-content').eq(0));
$html.find('.accordion-content').eq(0).should.have.attr('aria-hidden', 'true');
$html.on('up.zf.accordion', function() {
$html.find('.accordion-content').eq(0).should.be.hidden;
done();
});
});
it('toggles attributes of title of the targeted container', function() {
$html = $(template).appendTo('body');
plugin = new Foundation.Accordion($html, {allowAllClosed: true});
plugin.up($html.find('.accordion-content').eq(0));
$html.find('.accordion-title').eq(0).should.have.attr('aria-expanded', 'false');
$html.find('.accordion-title').eq(0).should.have.attr('aria-selected', 'false');
});
it('not closes the open container if allowAllClosed is false', function() {
$html = $(template).appendTo('body');
plugin = new Foundation.Accordion($html, {allowAllClosed: false});
plugin.up($html.find('.accordion-content').eq(0));
$html.find('.accordion-content').eq(0).should.be.visible;
// Element has aria-hidden="false" not set if it has not been actively toggled so far
// Therefor check if it has not aria-hidden="true"
$html.find('.accordion-content').eq(0).should.not.have.attr('aria-hidden', 'true');
});
});
describe('down()', function() {
it('opens the targeted container', function() {
$html = $(template).appendTo('body');
plugin = new Foundation.Accordion($html, {});
plugin.down($html.find('.accordion-content').eq(1));
$html.find('.accordion-content').eq(1).should.be.visible;
$html.find('.accordion-content').eq(1).should.have.attr('aria-hidden', 'false');
});
it('toggles attributes of title of the targeted container', function() {
$html = $(template).appendTo('body');
plugin = new Foundation.Accordion($html, {});
plugin.down($html.find('.accordion-content').eq(1));
$html.find('.accordion-title').eq(1).should.have.attr('aria-expanded', 'true');
$html.find('.accordion-title').eq(1).should.have.attr('aria-selected', 'true');
});
it('closes open container if multiExpand is false', function(done) {
$html = $(template).appendTo('body');
plugin = new Foundation.Accordion($html, {multiExpand: false});
plugin.down($html.find('.accordion-content').eq(1));
$html.find('.accordion-content').eq(0).should.have.attr('aria-hidden', 'true');
$html.on('up.zf.accordion', function() {
$html.find('.accordion-content').eq(0).should.be.hidden;
done();
});
});
it('not closes open container if multiExpand is true', function() {
$html = $(template).appendTo('body');
plugin = new Foundation.Accordion($html, {multiExpand: true});
plugin.down($html.find('.accordion-content').eq(1));
$html.find('.accordion-content').eq(0).should.be.visible;
// Element has aria-hidden="false" not set if it has not been actively toggled so far
// Therefor check if it has not aria-hidden="true"
$html.find('.accordion-content').eq(0).should.not.have.attr('aria-hidden', 'true');
});
});
describe('toggle()', function(done) {
it('closes the only open container if allowAllClosed is true', function() {
$html = $(template).appendTo('body');
plugin = new Foundation.Accordion($html, {allowAllClosed: true});
plugin.toggle($html.find('.accordion-content').eq(0));
$html.find('.accordion-content').eq(0).should.have.attr('aria-hidden', 'true');
$html.on('up.zf.accordion', function() {
$html.find('.accordion-content').eq(0).should.be.hidden;
done();
});
});
it('not closes the only open container if allowAllClosed is false', function() {
$html = $(template).appendTo('body');
plugin = new Foundation.Accordion($html, {allowAllClosed: false});
plugin.toggle($html.find('.accordion-content').eq(0));
$html.find('.accordion-content').eq(0).should.be.visible;
$html.find('.accordion-content').eq(0).should.have.attr('aria-hidden', 'false');
});
});
}); | mit |
ChristopheB/framework | src/Illuminate/Support/Testing/Fakes/QueueFake.php | 10713 | <?php
namespace Illuminate\Support\Testing\Fakes;
use BadMethodCallException;
use Closure;
use Illuminate\Contracts\Queue\Queue;
use Illuminate\Queue\QueueManager;
use Illuminate\Support\Traits\ReflectsClosures;
use PHPUnit\Framework\Assert as PHPUnit;
class QueueFake extends QueueManager implements Queue
{
use ReflectsClosures;
/**
* All of the jobs that have been pushed.
*
* @var array
*/
protected $jobs = [];
/**
* Assert if a job was pushed based on a truth-test callback.
*
* @param string|\Closure $job
* @param callable|int|null $callback
* @return void
*/
public function assertPushed($job, $callback = null)
{
if ($job instanceof Closure) {
[$job, $callback] = [$this->firstClosureParameterType($job), $job];
}
if (is_numeric($callback)) {
return $this->assertPushedTimes($job, $callback);
}
PHPUnit::assertTrue(
$this->pushed($job, $callback)->count() > 0,
"The expected [{$job}] job was not pushed."
);
}
/**
* Assert if a job was pushed a number of times.
*
* @param string $job
* @param int $times
* @return void
*/
protected function assertPushedTimes($job, $times = 1)
{
$count = $this->pushed($job)->count();
PHPUnit::assertSame(
$times, $count,
"The expected [{$job}] job was pushed {$count} times instead of {$times} times."
);
}
/**
* Assert if a job was pushed based on a truth-test callback.
*
* @param string $queue
* @param string|\Closure $job
* @param callable|null $callback
* @return void
*/
public function assertPushedOn($queue, $job, $callback = null)
{
if ($job instanceof Closure) {
[$job, $callback] = [$this->firstClosureParameterType($job), $job];
}
return $this->assertPushed($job, function ($job, $pushedQueue) use ($callback, $queue) {
if ($pushedQueue !== $queue) {
return false;
}
return $callback ? $callback(...func_get_args()) : true;
});
}
/**
* Assert if a job was pushed with chained jobs based on a truth-test callback.
*
* @param string $job
* @param array $expectedChain
* @param callable|null $callback
* @return void
*/
public function assertPushedWithChain($job, $expectedChain = [], $callback = null)
{
PHPUnit::assertTrue(
$this->pushed($job, $callback)->isNotEmpty(),
"The expected [{$job}] job was not pushed."
);
PHPUnit::assertTrue(
collect($expectedChain)->isNotEmpty(),
'The expected chain can not be empty.'
);
$this->isChainOfObjects($expectedChain)
? $this->assertPushedWithChainOfObjects($job, $expectedChain, $callback)
: $this->assertPushedWithChainOfClasses($job, $expectedChain, $callback);
}
/**
* Assert if a job was pushed with an empty chain based on a truth-test callback.
*
* @param string $job
* @param callable|null $callback
* @return void
*/
public function assertPushedWithoutChain($job, $callback = null)
{
PHPUnit::assertTrue(
$this->pushed($job, $callback)->isNotEmpty(),
"The expected [{$job}] job was not pushed."
);
$this->assertPushedWithChainOfClasses($job, [], $callback);
}
/**
* Assert if a job was pushed with chained jobs based on a truth-test callback.
*
* @param string $job
* @param array $expectedChain
* @param callable|null $callback
* @return void
*/
protected function assertPushedWithChainOfObjects($job, $expectedChain, $callback)
{
$chain = collect($expectedChain)->map(function ($job) {
return serialize($job);
})->all();
PHPUnit::assertTrue(
$this->pushed($job, $callback)->filter(function ($job) use ($chain) {
return $job->chained == $chain;
})->isNotEmpty(),
'The expected chain was not pushed.'
);
}
/**
* Assert if a job was pushed with chained jobs based on a truth-test callback.
*
* @param string $job
* @param array $expectedChain
* @param callable|null $callback
* @return void
*/
protected function assertPushedWithChainOfClasses($job, $expectedChain, $callback)
{
$matching = $this->pushed($job, $callback)->map->chained->map(function ($chain) {
return collect($chain)->map(function ($job) {
return get_class(unserialize($job));
});
})->filter(function ($chain) use ($expectedChain) {
return $chain->all() === $expectedChain;
});
PHPUnit::assertTrue(
$matching->isNotEmpty(), 'The expected chain was not pushed.'
);
}
/**
* Determine if the given chain is entirely composed of objects.
*
* @param array $chain
* @return bool
*/
protected function isChainOfObjects($chain)
{
return ! collect($chain)->contains(function ($job) {
return ! is_object($job);
});
}
/**
* Determine if a job was pushed based on a truth-test callback.
*
* @param string|\Closure $job
* @param callable|null $callback
* @return void
*/
public function assertNotPushed($job, $callback = null)
{
if ($job instanceof Closure) {
[$job, $callback] = [$this->firstClosureParameterType($job), $job];
}
PHPUnit::assertCount(
0, $this->pushed($job, $callback),
"The unexpected [{$job}] job was pushed."
);
}
/**
* Assert that no jobs were pushed.
*
* @return void
*/
public function assertNothingPushed()
{
PHPUnit::assertEmpty($this->jobs, 'Jobs were pushed unexpectedly.');
}
/**
* Get all of the jobs matching a truth-test callback.
*
* @param string $job
* @param callable|null $callback
* @return \Illuminate\Support\Collection
*/
public function pushed($job, $callback = null)
{
if (! $this->hasPushed($job)) {
return collect();
}
$callback = $callback ?: function () {
return true;
};
return collect($this->jobs[$job])->filter(function ($data) use ($callback) {
return $callback($data['job'], $data['queue']);
})->pluck('job');
}
/**
* Determine if there are any stored jobs for a given class.
*
* @param string $job
* @return bool
*/
public function hasPushed($job)
{
return isset($this->jobs[$job]) && ! empty($this->jobs[$job]);
}
/**
* Resolve a queue connection instance.
*
* @param mixed $value
* @return \Illuminate\Contracts\Queue\Queue
*/
public function connection($value = null)
{
return $this;
}
/**
* Get the size of the queue.
*
* @param string|null $queue
* @return int
*/
public function size($queue = null)
{
return collect($this->jobs)->flatten(1)->filter(function ($job) use ($queue) {
return $job['queue'] === $queue;
})->count();
}
/**
* Push a new job onto the queue.
*
* @param string $job
* @param mixed $data
* @param string|null $queue
* @return mixed
*/
public function push($job, $data = '', $queue = null)
{
$this->jobs[is_object($job) ? get_class($job) : $job][] = [
'job' => $job,
'queue' => $queue,
];
}
/**
* Push a raw payload onto the queue.
*
* @param string $payload
* @param string|null $queue
* @param array $options
* @return mixed
*/
public function pushRaw($payload, $queue = null, array $options = [])
{
//
}
/**
* Push a new job onto the queue after a delay.
*
* @param \DateTimeInterface|\DateInterval|int $delay
* @param string $job
* @param mixed $data
* @param string|null $queue
* @return mixed
*/
public function later($delay, $job, $data = '', $queue = null)
{
return $this->push($job, $data, $queue);
}
/**
* Push a new job onto the queue.
*
* @param string $queue
* @param string $job
* @param mixed $data
* @return mixed
*/
public function pushOn($queue, $job, $data = '')
{
return $this->push($job, $data, $queue);
}
/**
* Push a new job onto the queue after a delay.
*
* @param string $queue
* @param \DateTimeInterface|\DateInterval|int $delay
* @param string $job
* @param mixed $data
* @return mixed
*/
public function laterOn($queue, $delay, $job, $data = '')
{
return $this->push($job, $data, $queue);
}
/**
* Pop the next job off of the queue.
*
* @param string|null $queue
* @return \Illuminate\Contracts\Queue\Job|null
*/
public function pop($queue = null)
{
//
}
/**
* Push an array of jobs onto the queue.
*
* @param array $jobs
* @param mixed $data
* @param string|null $queue
* @return mixed
*/
public function bulk($jobs, $data = '', $queue = null)
{
foreach ($jobs as $job) {
$this->push($job, $data, $queue);
}
}
/**
* Get the jobs that have been pushed.
*
* @return array
*/
public function pushedJobs()
{
return $this->jobs;
}
/**
* Get the connection name for the queue.
*
* @return string
*/
public function getConnectionName()
{
//
}
/**
* Set the connection name for the queue.
*
* @param string $name
* @return $this
*/
public function setConnectionName($name)
{
return $this;
}
/**
* Override the QueueManager to prevent circular dependency.
*
* @param string $method
* @param array $parameters
* @return mixed
*
* @throws \BadMethodCallException
*/
public function __call($method, $parameters)
{
throw new BadMethodCallException(sprintf(
'Call to undefined method %s::%s()', static::class, $method
));
}
}
| mit |
uahengojr/NCA-Web-App | Admin HTML/vendor/holderjs/gulpfile.js | 1873 | var gulp = require('gulp');
var concat = require('gulp-concat');
var uglify = require('gulp-uglifyjs');
var header = require('gulp-header');
var jshint = require('gulp-jshint');
var todo = require('gulp-todo');
var gulputil = require('gulp-util');
var moment = require('moment');
var pkg = require('./package.json');
var banner =
'/*!\n\n' +
'<%= pkg.name %> - <%= pkg.summary %>\nVersion <%= pkg.version %>+<%= build %>\n' +
'\u00A9 <%= year %> <%= pkg.author.name %> - <%= pkg.author.url %>\n\n' +
'Site: <%= pkg.homepage %>\n'+
'Issues: <%= pkg.bugs.url %>\n' +
'License: <%= pkg.license.url %>\n\n' +
'*/\n';
function generateBuild(){
var date = new Date;
return Math.floor((date - (new Date(date.getFullYear(),0,0)))/1000).toString(36)
}
var build = generateBuild();
var paths = {
scripts: ["src/ondomready.js", "src/polyfills.js", "src/augment.js", "src/holder.js"]
}
gulp.task('jshint', function () {
return gulp.src(paths.scripts[paths.scripts.length - 1])
.pipe(jshint())
.pipe(jshint.reporter('default'));
});
gulp.task('todo', function(){
return gulp.src(paths.scripts)
.pipe(todo())
.pipe(gulp.dest('./'));
});
gulp.task('scripts', ['jshint'], function () {
return gulp.src(paths.scripts)
.pipe(concat("holder.js"))
.pipe(gulp.dest("./"));
});
gulp.task('minify', ['scripts'], function () {
return gulp.src("holder.js")
.pipe(uglify("holder.min.js"))
.pipe(gulp.dest("./"));
});
gulp.task('banner', ['minify'], function () {
return gulp.src(["holder*.js"])
.pipe(header(banner, {
pkg: pkg,
year: moment().format("YYYY"),
build: build
}))
.pipe(gulp.dest("./"));
});
gulp.task('watch', function(){
gulp.watch(paths.scripts, ['default']);
});
gulp.task('default', ['todo', 'jshint', 'scripts', 'minify', 'banner'], function(){
gulputil.log("Finished build "+build);
build = generateBuild();
});
| mit |
tempbottle/codis | pkg/utils/math2/math.go | 874 | // Copyright 2016 CodisLabs. All Rights Reserved.
// Licensed under the MIT (MIT-LICENSE.txt) license.
package math2
import (
"fmt"
"time"
)
func MaxInt(a, b int) int {
if a > b {
return a
} else {
return b
}
}
func MinInt(a, b int) int {
if a < b {
return a
} else {
return b
}
}
func MinMaxInt(v, min, max int) int {
if min <= max {
v = MaxInt(v, min)
v = MinInt(v, max)
return v
}
panic(fmt.Sprintf("min = %d, max = %d", min, max))
}
func MaxDuration(a, b time.Duration) time.Duration {
if a > b {
return a
} else {
return b
}
}
func MinDuration(a, b time.Duration) time.Duration {
if a < b {
return a
} else {
return b
}
}
func MinMaxDuration(v, min, max time.Duration) time.Duration {
if min <= max {
v = MaxDuration(v, min)
v = MinDuration(v, max)
return v
}
panic(fmt.Sprintf("min = %s, max = %s", min, max))
}
| mit |
Vegetam/BoostrapPageGenerator4 | bootstap 4 alpha 6/ckeditor/plugins/smiley/plugin.js | 3805 | /**
* @license Copyright (c) 2003-2017, CKSource - Frederico Knabben. All rights reserved.
* For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.add( 'smiley', {
requires: 'dialog',
// jscs:disable maximumLineLength
lang: 'af,ar,az,bg,bn,bs,ca,cs,cy,da,de,de-ch,el,en,en-au,en-ca,en-gb,eo,es,et,eu,fa,fi,fo,fr,fr-ca,gl,gu,he,hi,hr,hu,id,is,it,ja,ka,km,ko,ku,lt,lv,mk,mn,ms,nb,nl,no,oc,pl,pt,pt-br,ro,ru,si,sk,sl,sq,sr,sr-latn,sv,th,tr,tt,ug,uk,vi,zh,zh-cn', // %REMOVE_LINE_CORE%
// jscs:enable maximumLineLength
icons: 'smiley', // %REMOVE_LINE_CORE%
hidpi: true, // %REMOVE_LINE_CORE%
init: function( editor ) {
editor.config.smiley_path = editor.config.smiley_path || ( this.path + 'images/' );
editor.addCommand( 'smiley', new CKEDITOR.dialogCommand( 'smiley', {
allowedContent: 'img[alt,height,!src,title,width]',
requiredContent: 'img'
} ) );
editor.ui.addButton && editor.ui.addButton( 'Smiley', {
label: editor.lang.smiley.toolbar,
command: 'smiley',
toolbar: 'insert,50'
} );
CKEDITOR.dialog.add( 'smiley', this.path + 'dialogs/smiley.js' );
}
} );
/**
* The base path used to build the URL for the smiley images. It must end with a slash.
*
* config.smiley_path = 'http://www.example.com/images/smileys/';
*
* config.smiley_path = '/images/smileys/';
*
* @cfg {String} [smiley_path=CKEDITOR.basePath + 'plugins/smiley/images/']
* @member CKEDITOR.config
*/
/**
* The file names for the smileys to be displayed. These files must be
* contained inside the URL path defined with the {@link #smiley_path} setting.
*
* // This is actually the default value.
* config.smiley_images = [
* 'regular_smile.png','sad_smile.png','wink_smile.png','teeth_smile.png','confused_smile.png','tongue_smile.png',
* 'embarrassed_smile.png','omg_smile.png','whatchutalkingabout_smile.png','angry_smile.png','angel_smile.png','shades_smile.png',
* 'devil_smile.png','cry_smile.png','lightbulb.png','thumbs_down.png','thumbs_up.png','heart.png',
* 'broken_heart.png','kiss.png','envelope.png'
* ];
*
* @cfg
* @member CKEDITOR.config
*/
CKEDITOR.config.smiley_images = [
'regular_smile.png', 'sad_smile.png', 'wink_smile.png', 'teeth_smile.png', 'confused_smile.png', 'tongue_smile.png',
'embarrassed_smile.png', 'omg_smile.png', 'whatchutalkingabout_smile.png', 'angry_smile.png', 'angel_smile.png', 'shades_smile.png',
'devil_smile.png', 'cry_smile.png', 'lightbulb.png', 'thumbs_down.png', 'thumbs_up.png', 'heart.png',
'broken_heart.png', 'kiss.png', 'envelope.png'
];
/**
* The description to be used for each of the smileys defined in the
* {@link CKEDITOR.config#smiley_images} setting. Each entry in this array list
* must match its relative pair in the {@link CKEDITOR.config#smiley_images}
* setting.
*
* // Default settings.
* config.smiley_descriptions = [
* 'smiley', 'sad', 'wink', 'laugh', 'frown', 'cheeky', 'blush', 'surprise',
* 'indecision', 'angry', 'angel', 'cool', 'devil', 'crying', 'enlightened', 'no',
* 'yes', 'heart', 'broken heart', 'kiss', 'mail'
* ];
*
* // Use textual emoticons as description.
* config.smiley_descriptions = [
* ':)', ':(', ';)', ':D', ':/', ':P', ':*)', ':-o',
* ':|', '>:(', 'o:)', '8-)', '>:-)', ';(', '', '', '',
* '', '', ':-*', ''
* ];
*
* @cfg
* @member CKEDITOR.config
*/
CKEDITOR.config.smiley_descriptions = [
'smiley', 'sad', 'wink', 'laugh', 'frown', 'cheeky', 'blush', 'surprise',
'indecision', 'angry', 'angel', 'cool', 'devil', 'crying', 'enlightened', 'no',
'yes', 'heart', 'broken heart', 'kiss', 'mail'
];
/**
* The number of columns to be generated by the smilies matrix.
*
* config.smiley_columns = 6;
*
* @since 3.3.2
* @cfg {Number} [smiley_columns=8]
* @member CKEDITOR.config
*/
| mit |
joeyparrish/cdnjs | ajax/libs/angular-ui-grid/4.4.0/ui-grid.expandable.js | 21518 | /*!
* ui-grid - v4.4.0 - 2018-03-15
* Copyright (c) 2018 ; License: MIT
*/
(function () {
'use strict';
/**
* @ngdoc overview
* @name ui.grid.expandable
* @description
*
* # ui.grid.expandable
*
* <div class="alert alert-warning" role="alert"><strong>Alpha</strong> This feature is in development. There will almost certainly be breaking api changes, or there are major outstanding bugs.</div>
*
* This module provides the ability to create subgrids with the ability to expand a row
* to show the subgrid.
*
* <div doc-module-components="ui.grid.expandable"></div>
*/
var module = angular.module('ui.grid.expandable', ['ui.grid']);
/**
* @ngdoc service
* @name ui.grid.expandable.service:uiGridExpandableService
*
* @description Services for the expandable grid
*/
module.service('uiGridExpandableService', ['gridUtil', '$compile', function (gridUtil, $compile) {
var service = {
initializeGrid: function (grid) {
grid.expandable = {};
grid.expandable.expandedAll = false;
/**
* @ngdoc object
* @name enableExpandable
* @propertyOf ui.grid.expandable.api:GridOptions
* @description Whether or not to use expandable feature, allows you to turn off expandable on specific grids
* within your application, or in specific modes on _this_ grid. Defaults to true.
* @example
* <pre>
* $scope.gridOptions = {
* enableExpandable: false
* }
* </pre>
*/
grid.options.enableExpandable = grid.options.enableExpandable !== false;
/**
* @ngdoc object
* @name showExpandAllButton
* @propertyOf ui.grid.expandable.api:GridOptions
* @description Whether or not to display the expand all button, allows you to hide expand all button on specific grids
* within your application, or in specific modes on _this_ grid. Defaults to true.
* @example
* <pre>
* $scope.gridOptions = {
* showExpandAllButton: false
* }
* </pre>
*/
grid.options.showExpandAllButton = grid.options.showExpandAllButton !== false;
/**
* @ngdoc object
* @name expandableRowHeight
* @propertyOf ui.grid.expandable.api:GridOptions
* @description Height in pixels of the expanded subgrid. Defaults to
* 150
* @example
* <pre>
* $scope.gridOptions = {
* expandableRowHeight: 150
* }
* </pre>
*/
grid.options.expandableRowHeight = grid.options.expandableRowHeight || 150;
/**
* @ngdoc object
* @name expandableRowHeaderWidth
* @propertyOf ui.grid.expandable.api:GridOptions
* @description Width in pixels of the expandable column. Defaults to 40
* @example
* <pre>
* $scope.gridOptions = {
* expandableRowHeaderWidth: 40
* }
* </pre>
*/
grid.options.expandableRowHeaderWidth = grid.options.expandableRowHeaderWidth || 40;
/**
* @ngdoc object
* @name expandableRowTemplate
* @propertyOf ui.grid.expandable.api:GridOptions
* @description Mandatory. The template for your expanded row
* @example
* <pre>
* $scope.gridOptions = {
* expandableRowTemplate: 'expandableRowTemplate.html'
* }
* </pre>
*/
if ( grid.options.enableExpandable && !grid.options.expandableRowTemplate ){
gridUtil.logError( 'You have not set the expandableRowTemplate, disabling expandable module' );
grid.options.enableExpandable = false;
}
/**
* @ngdoc object
* @name ui.grid.expandable.api:PublicApi
*
* @description Public Api for expandable feature
*/
/**
* @ngdoc object
* @name ui.grid.expandable.api:GridRow
*
* @description Additional properties added to GridRow when using the expandable module
*/
/**
* @ngdoc object
* @name ui.grid.expandable.api:GridOptions
*
* @description Options for configuring the expandable feature, these are available to be
* set using the ui-grid {@link ui.grid.class:GridOptions gridOptions}
*/
var publicApi = {
events: {
expandable: {
/**
* @ngdoc event
* @name rowExpandedStateChanged
* @eventOf ui.grid.expandable.api:PublicApi
* @description raised when row expanded or collapsed
* <pre>
* gridApi.expandable.on.rowExpandedStateChanged(scope,function(row){})
* </pre>
* @param {GridRow} row the row that was expanded
*/
rowExpandedBeforeStateChanged: function(scope,row){
},
rowExpandedStateChanged: function (scope, row) {
}
}
},
methods: {
expandable: {
/**
* @ngdoc method
* @name toggleRowExpansion
* @methodOf ui.grid.expandable.api:PublicApi
* @description Toggle a specific row
* <pre>
* gridApi.expandable.toggleRowExpansion(rowEntity);
* </pre>
* @param {object} rowEntity the data entity for the row you want to expand
*/
toggleRowExpansion: function (rowEntity) {
var row = grid.getRow(rowEntity);
if (row !== null) {
service.toggleRowExpansion(grid, row);
}
},
/**
* @ngdoc method
* @name expandAllRows
* @methodOf ui.grid.expandable.api:PublicApi
* @description Expand all subgrids.
* <pre>
* gridApi.expandable.expandAllRows();
* </pre>
*/
expandAllRows: function() {
service.expandAllRows(grid);
},
/**
* @ngdoc method
* @name collapseAllRows
* @methodOf ui.grid.expandable.api:PublicApi
* @description Collapse all subgrids.
* <pre>
* gridApi.expandable.collapseAllRows();
* </pre>
*/
collapseAllRows: function() {
service.collapseAllRows(grid);
},
/**
* @ngdoc method
* @name toggleAllRows
* @methodOf ui.grid.expandable.api:PublicApi
* @description Toggle all subgrids.
* <pre>
* gridApi.expandable.toggleAllRows();
* </pre>
*/
toggleAllRows: function() {
service.toggleAllRows(grid);
},
/**
* @ngdoc function
* @name expandRow
* @methodOf ui.grid.expandable.api:PublicApi
* @description Expand the data row
* @param {object} rowEntity gridOptions.data[] array instance
*/
expandRow: function (rowEntity) {
var row = grid.getRow(rowEntity);
if (row !== null && !row.isExpanded) {
service.toggleRowExpansion(grid, row);
}
},
/**
* @ngdoc function
* @name collapseRow
* @methodOf ui.grid.expandable.api:PublicApi
* @description Collapse the data row
* @param {object} rowEntity gridOptions.data[] array instance
*/
collapseRow: function (rowEntity) {
var row = grid.getRow(rowEntity);
if (row !== null && row.isExpanded) {
service.toggleRowExpansion(grid, row);
}
},
/**
* @ngdoc function
* @name getExpandedRows
* @methodOf ui.grid.expandable.api:PublicApi
* @description returns all expandedRow's entity references
*/
getExpandedRows: function () {
return service.getExpandedRows(grid).map(function (gridRow) {
return gridRow.entity;
});
}
}
}
};
grid.api.registerEventsFromObject(publicApi.events);
grid.api.registerMethodsFromObject(publicApi.methods);
},
toggleRowExpansion: function (grid, row) {
// trigger the "before change" event. Can change row height dynamically this way.
grid.api.expandable.raise.rowExpandedBeforeStateChanged(row);
/**
* @ngdoc object
* @name isExpanded
* @propertyOf ui.grid.expandable.api:GridRow
* @description Whether or not the row is currently expanded.
* @example
* <pre>
* $scope.api.expandable.on.rowExpandedStateChanged($scope, function (row) {
* if (row.isExpanded) {
* //...
* }
* });
* </pre>
*/
row.isExpanded = !row.isExpanded;
if (angular.isUndefined(row.expandedRowHeight)){
row.expandedRowHeight = grid.options.expandableRowHeight;
}
if (row.isExpanded) {
row.height = row.grid.options.rowHeight + row.expandedRowHeight;
}
else {
row.height = row.grid.options.rowHeight;
grid.expandable.expandedAll = false;
}
grid.api.expandable.raise.rowExpandedStateChanged(row);
},
expandAllRows: function(grid, $scope) {
grid.renderContainers.body.visibleRowCache.forEach( function(row) {
if (!row.isExpanded && !(row.entity.subGridOptions && row.entity.subGridOptions.disableRowExpandable)) {
service.toggleRowExpansion(grid, row);
}
});
grid.expandable.expandedAll = true;
grid.queueGridRefresh();
},
collapseAllRows: function(grid) {
grid.renderContainers.body.visibleRowCache.forEach( function(row) {
if (row.isExpanded) {
service.toggleRowExpansion(grid, row);
}
});
grid.expandable.expandedAll = false;
grid.queueGridRefresh();
},
toggleAllRows: function(grid) {
if (grid.expandable.expandedAll) {
service.collapseAllRows(grid);
}
else {
service.expandAllRows(grid);
}
},
getExpandedRows: function (grid) {
return grid.rows.filter(function (row) {
return row.isExpanded;
});
}
};
return service;
}]);
/**
* @ngdoc object
* @name enableExpandableRowHeader
* @propertyOf ui.grid.expandable.api:GridOptions
* @description Show a rowHeader to provide the expandable buttons. If set to false then implies
* you're going to use a custom method for expanding and collapsing the subgrids. Defaults to true.
* @example
* <pre>
* $scope.gridOptions = {
* enableExpandableRowHeader: false
* }
* </pre>
*/
module.directive('uiGridExpandable', ['uiGridExpandableService', '$templateCache',
function (uiGridExpandableService, $templateCache) {
return {
replace: true,
priority: 0,
require: '^uiGrid',
scope: false,
compile: function () {
return {
pre: function ($scope, $elm, $attrs, uiGridCtrl) {
uiGridExpandableService.initializeGrid(uiGridCtrl.grid);
if (!uiGridCtrl.grid.options.enableExpandable) {
return;
}
if (uiGridCtrl.grid.options.enableExpandableRowHeader !== false ) {
var expandableRowHeaderColDef = {
name: 'expandableButtons',
displayName: '',
exporterSuppressExport: true,
enableColumnResizing: false,
enableColumnMenu: false,
width: uiGridCtrl.grid.options.expandableRowHeaderWidth || 40
};
expandableRowHeaderColDef.cellTemplate = $templateCache.get('ui-grid/expandableRowHeader');
expandableRowHeaderColDef.headerCellTemplate = $templateCache.get('ui-grid/expandableTopRowHeader');
uiGridCtrl.grid.addRowHeaderColumn(expandableRowHeaderColDef, -90);
}
},
post: function ($scope, $elm, $attrs, uiGridCtrl) {
}
};
}
};
}]);
/**
* @ngdoc directive
* @name ui.grid.expandable.directive:uiGrid
* @description stacks on the uiGrid directive to register child grid with parent row when child is created
*/
module.directive('uiGrid', ['uiGridExpandableService', '$templateCache',
function (uiGridExpandableService, $templateCache) {
return {
replace: true,
priority: 599,
require: '^uiGrid',
scope: false,
compile: function () {
return {
pre: function ($scope, $elm, $attrs, uiGridCtrl) {
uiGridCtrl.grid.api.core.on.renderingComplete($scope, function() {
//if a parent grid row is on the scope, then add the parentRow property to this childGrid
if ($scope.row && $scope.row.grid && $scope.row.grid.options && $scope.row.grid.options.enableExpandable) {
/**
* @ngdoc directive
* @name ui.grid.expandable.class:Grid
* @description Additional Grid properties added by expandable module
*/
/**
* @ngdoc object
* @name parentRow
* @propertyOf ui.grid.expandable.class:Grid
* @description reference to the expanded parent row that owns this grid
*/
uiGridCtrl.grid.parentRow = $scope.row;
//todo: adjust height on parent row when child grid height changes. we need some sort of gridHeightChanged event
// uiGridCtrl.grid.core.on.canvasHeightChanged($scope, function(oldHeight, newHeight) {
// uiGridCtrl.grid.parentRow = newHeight;
// });
}
});
},
post: function ($scope, $elm, $attrs, uiGridCtrl) {
}
};
}
};
}]);
/**
* @ngdoc directive
* @name ui.grid.expandable.directive:uiGridExpandableRow
* @description directive to render the expandable row template
*/
module.directive('uiGridExpandableRow',
['uiGridExpandableService', '$timeout', '$compile', 'uiGridConstants','gridUtil','$interval', '$log',
function (uiGridExpandableService, $timeout, $compile, uiGridConstants, gridUtil, $interval, $log) {
return {
replace: false,
priority: 0,
scope: false,
compile: function () {
return {
pre: function ($scope, $elm, $attrs, uiGridCtrl) {
gridUtil.getTemplate($scope.grid.options.expandableRowTemplate).then(
function (template) {
if ($scope.grid.options.expandableRowScope) {
/**
* @ngdoc object
* @name expandableRowScope
* @propertyOf ui.grid.expandable.api:GridOptions
* @description Variables of object expandableScope will be available in the scope of the expanded subgrid
* @example
* <pre>
* $scope.gridOptions = {
* expandableRowScope: expandableScope
* }
* </pre>
*/
var expandableRowScope = $scope.grid.options.expandableRowScope;
for (var property in expandableRowScope) {
if (expandableRowScope.hasOwnProperty(property)) {
$scope[property] = expandableRowScope[property];
}
}
}
var expandedRowElement = angular.element(template);
$elm.append(expandedRowElement);
expandedRowElement = $compile(expandedRowElement)($scope);
$scope.row.expandedRendered = true;
});
},
post: function ($scope, $elm, $attrs, uiGridCtrl) {
$scope.$on('$destroy', function() {
$scope.row.expandedRendered = false;
});
}
};
}
};
}]);
/**
* @ngdoc directive
* @name ui.grid.expandable.directive:uiGridRow
* @description stacks on the uiGridRow directive to add support for expandable rows
*/
module.directive('uiGridRow',
['$compile', 'gridUtil', '$templateCache',
function ($compile, gridUtil, $templateCache) {
return {
priority: -200,
scope: false,
compile: function ($elm, $attrs) {
return {
pre: function ($scope, $elm, $attrs, controllers) {
if (!$scope.grid.options.enableExpandable) {
return;
}
$scope.expandableRow = {};
$scope.expandableRow.shouldRenderExpand = function () {
var ret = $scope.colContainer.name === 'body' && $scope.grid.options.enableExpandable !== false && $scope.row.isExpanded && (!$scope.grid.isScrollingVertically || $scope.row.expandedRendered);
return ret;
};
$scope.expandableRow.shouldRenderFiller = function () {
var ret = $scope.row.isExpanded && ( $scope.colContainer.name !== 'body' || ($scope.grid.isScrollingVertically && !$scope.row.expandedRendered));
return ret;
};
/*
* Commented out @PaulL1. This has no purpose that I can see, and causes #2964. If this code needs to be reinstated for some
* reason it needs to use drawnWidth, not width, and needs to check column visibility. It should really use render container
* visible column cache also instead of checking column.renderContainer.
function updateRowContainerWidth() {
var grid = $scope.grid;
var colWidth = 0;
grid.columns.forEach( function (column) {
if (column.renderContainer === 'left') {
colWidth += column.width;
}
});
colWidth = Math.floor(colWidth);
return '.grid' + grid.id + ' .ui-grid-pinned-container-' + $scope.colContainer.name + ', .grid' + grid.id +
' .ui-grid-pinned-container-' + $scope.colContainer.name + ' .ui-grid-render-container-' + $scope.colContainer.name +
' .ui-grid-viewport .ui-grid-canvas .ui-grid-row { width: ' + colWidth + 'px; }';
}
if ($scope.colContainer.name === 'left') {
$scope.grid.registerStyleComputation({
priority: 15,
func: updateRowContainerWidth
});
}*/
},
post: function ($scope, $elm, $attrs, controllers) {
}
};
}
};
}]);
/**
* @ngdoc directive
* @name ui.grid.expandable.directive:uiGridViewport
* @description stacks on the uiGridViewport directive to append the expandable row html elements to the
* default gridRow template
*/
module.directive('uiGridViewport',
['$compile', 'gridUtil', '$templateCache',
function ($compile, gridUtil, $templateCache) {
return {
priority: -200,
scope: false,
compile: function ($elm, $attrs) {
//todo: this adds ng-if watchers to each row even if the grid is not using expandable directive
// or options.enableExpandable == false
// The alternative is to compile the template and append to each row in a uiGridRow directive
var rowRepeatDiv = angular.element($elm.children().children()[0]);
var expandedRowFillerElement = $templateCache.get('ui-grid/expandableScrollFiller');
var expandedRowElement = $templateCache.get('ui-grid/expandableRow');
rowRepeatDiv.append(expandedRowElement);
rowRepeatDiv.append(expandedRowFillerElement);
return {
pre: function ($scope, $elm, $attrs, controllers) {
},
post: function ($scope, $elm, $attrs, controllers) {
}
};
}
};
}]);
})();
| mit |
Fidur/concrete5 | concrete/controllers/dialog/file/properties.php | 3069 | <?php
namespace Concrete\Controller\Dialog\File;
use \Concrete\Controller\Backend\UserInterface\File as BackendInterfaceFileController;
use \Concrete\Core\Http\ResponseAssetGroup;
use Permissions;
use File;
use FileAttributeKey;
use \Concrete\Core\File\EditResponse as FileEditResponse;
use Loader;
use Exception;
class Properties extends BackendInterfaceFileController {
protected $viewPath = '/dialogs/file/properties';
protected function canAccess() {
return $this->permissions->canViewFileInFileManager();
}
public function view() {
$r = ResponseAssetGroup::get();
$r->requireAsset('core/app/editable-fields');
if (isset($_REQUEST['fvID'])) {
$fv = $this->file->getVersion(Loader::helper('security')->sanitizeInt($_REQUEST['fvID']));
$this->set('previewMode', true);
} else {
$fv = $this->file->getApprovedVersion();
$this->set('previewMode', false);
}
$this->set('fv', $fv);
$this->set('form', Loader::helper('form'));
$this->set('dateHelper', Loader::helper('date'));
}
public function clear_attribute() {
if ($this->validateAction()) {
$fp = new Permissions($this->file);
if ($fp->canEditFileProperties()) {
$fv = $this->file->getVersionToModify();
$ak = FileAttributeKey::get($_REQUEST['akID']);
$fv->clearAttribute($ak);
$sr = new FileEditResponse();
$sr->setFile($this->file);
$sr->setMessage(t('Attribute cleared successfully.'));
$sr->outputJSON();
}
}
throw new Exception(t('Access Denied'));
}
public function update_attribute() {
if ($this->validateAction()) {
$fp = new Permissions($this->file);
if ($fp->canEditFileProperties()) {
$fv = $this->file->getVersionToModify();
$ak = FileAttributeKey::get($_REQUEST['name']);
$ak->saveAttributeForm($fv);
$file = File::getByID($this->file->getFileID());
$val = $file->getAttributeValueObject($ak); // ugh this is some kind of race condition or cache issue.
$sr = new FileEditResponse();
$sr->setFile($this->file);
$sr->setMessage(t('Attribute saved successfully.'));
$sr->setAdditionalDataAttribute('value', $val->getValue('displaySanitized','display'));
$sr->outputJSON();
}
}
throw new Exception(t('Access Denied'));
}
public function save() {
if ($this->validateAction()) {
$fp = new Permissions($this->file);
if ($fp->canEditFileProperties()) {
$fv = $this->file->getVersionToModify();
$value = $this->request->request->get('value');
switch($this->request->request->get('name')) {
case 'fvTitle':
$fv->updateTitle($value);
break;
case 'fvDescription':
$fv->updateDescription($value);
break;
case 'fvTags':
$fv->updateTags($value);
break;
}
$sr = new FileEditResponse();
$sr->setFile($this->file);
$sr->setMessage(t('File updated successfully.'));
$sr->setAdditionalDataAttribute('value', $value);
$sr->outputJSON();
} else {
throw new Exception(t('Access Denied.'));
}
} else {
throw new Exception(t('Access Denied.'));
}
}
}
| mit |
sureshmohan/tilt-game-android | temple_core/src/main/java/temple/core/ui/form/validation/rules/EqualStringValidationRule.java | 547 | package temple.core.ui.form.validation.rules;
import temple.core.common.interfaces.IHasValue;
/**
* Created by erikpoort on 29/07/14.
* MediaMonks
*/
public class EqualStringValidationRule extends AbstractValidationRule implements IValidationRule {
private final IHasValue _target2;
public EqualStringValidationRule(IHasValue element, IHasValue element2) {
super(element);
_target2 = element2;
}
@Override
public boolean isValid() {
return _target.getValue().equals(_target2.getValue());
}
} | mit |
Hansoft/meteor | packages/modules/server.js | 82 | require("./install-packages.js");
require("./process.js");
require("./reify.js");
| mit |
enslyon/ensl | core/modules/language/tests/src/Kernel/OverriddenConfigImportTest.php | 3431 | <?php
namespace Drupal\Tests\language\Kernel;
use Drupal\Core\Config\ConfigImporter;
use Drupal\Core\Config\StorageComparer;
use Drupal\KernelTests\KernelTestBase;
/**
* Tests importing of config with language overrides.
*
* @group language
*/
class OverriddenConfigImportTest extends KernelTestBase {
/**
* Config Importer object used for testing.
*
* @var \Drupal\Core\Config\ConfigImporter
*/
protected $configImporter;
/**
* {@inheritdoc}
*/
protected static $modules = ['system', 'language'];
/**
* {@inheritdoc}
*/
protected function setUp() {
parent::setUp();
$this->installConfig(['system']);
$this->copyConfig($this->container->get('config.storage'), $this->container->get('config.storage.sync'));
// Set up the ConfigImporter object for testing.
$storage_comparer = new StorageComparer(
$this->container->get('config.storage.sync'),
$this->container->get('config.storage')
);
$this->configImporter = new ConfigImporter(
$storage_comparer->createChangelist(),
$this->container->get('event_dispatcher'),
$this->container->get('config.manager'),
$this->container->get('lock'),
$this->container->get('config.typed'),
$this->container->get('module_handler'),
$this->container->get('module_installer'),
$this->container->get('theme_handler'),
$this->container->get('string_translation')
);
}
/**
* Tests importing overridden config alongside config in the default language.
*/
public function testConfigImportUpdates() {
$storage = $this->container->get('config.storage');
$sync = $this->container->get('config.storage.sync');
/** @var \Drupal\language\ConfigurableLanguageManagerInterface $language_manager */
$language_manager = $this->container->get('language_manager');
// Make a change to the site configuration in the default collection.
$data = $storage->read('system.site');
$data['name'] = 'English site name';
$sync->write('system.site', $data);
// Also make a change to the same config object, but using a language
// override.
/* @var \Drupal\Core\Config\StorageInterface $overridden_sync */
$overridden_sync = $sync->createCollection('language.fr');
$overridden_sync->write('system.site', ['name' => 'French site name']);
// Before we start the import, the change to the site name should not be
// present. This action also primes the cache in the config factory so that
// we can test whether the cached data is correctly updated.
$config = $this->config('system.site');
$this->assertNotEquals('English site name', $config->getRawData()['name']);
// Before the import is started the site name should not yet be overridden.
$this->assertFalse($config->hasOverrides());
$override = $language_manager->getLanguageConfigOverride('fr', 'system.site');
$this->assertTrue($override->isNew());
// Start the import of the new configuration.
$this->configImporter->reset()->import();
// Verify the new site name in the default language.
$config = $this->config('system.site')->getRawData();
$this->assertEquals('English site name', $config['name']);
// Verify the overridden site name.
$override = $language_manager->getLanguageConfigOverride('fr', 'system.site');
$this->assertEquals('French site name', $override->get('name'));
}
}
| gpl-2.0 |
enslyon/ensl | core/tests/Drupal/KernelTests/Core/Config/ConfigDependencyTest.php | 32794 | <?php
namespace Drupal\KernelTests\Core\Config;
use Drupal\entity_test\Entity\EntityTest;
use Drupal\KernelTests\Core\Entity\EntityKernelTestBase;
/**
* Tests for configuration dependencies.
*
* @coversDefaultClass \Drupal\Core\Config\ConfigManager
*
* @group config
*/
class ConfigDependencyTest extends EntityKernelTestBase {
/**
* Modules to enable.
*
* The entity_test module is enabled to provide content entity types.
*
* @var array
*/
public static $modules = ['config_test', 'entity_test', 'user'];
/**
* Tests that calculating dependencies for system module.
*/
public function testNonEntity() {
$this->installConfig(['system']);
$config_manager = \Drupal::service('config.manager');
$dependents = $config_manager->findConfigEntityDependents('module', ['system']);
$this->assertTrue(isset($dependents['system.site']), 'Simple configuration system.site has a UUID key even though it is not a configuration entity and therefore is found when looking for dependencies of the System module.');
// Ensure that calling
// \Drupal\Core\Config\ConfigManager::findConfigEntityDependentsAsEntities()
// does not try to load system.site as an entity.
$config_manager->findConfigEntityDependentsAsEntities('module', ['system']);
}
/**
* Tests creating dependencies on configuration entities.
*/
public function testDependencyManagement() {
/** @var \Drupal\Core\Config\ConfigManagerInterface $config_manager */
$config_manager = \Drupal::service('config.manager');
$storage = $this->container->get('entity.manager')->getStorage('config_test');
// Test dependencies between modules.
$entity1 = $storage->create(
[
'id' => 'entity1',
'dependencies' => [
'enforced' => [
'module' => ['node'],
],
],
]
);
$entity1->save();
$dependents = $config_manager->findConfigEntityDependents('module', ['node']);
$this->assertTrue(isset($dependents['config_test.dynamic.entity1']), 'config_test.dynamic.entity1 has a dependency on the Node module.');
$dependents = $config_manager->findConfigEntityDependents('module', ['config_test']);
$this->assertTrue(isset($dependents['config_test.dynamic.entity1']), 'config_test.dynamic.entity1 has a dependency on the config_test module.');
$dependents = $config_manager->findConfigEntityDependents('module', ['views']);
$this->assertFalse(isset($dependents['config_test.dynamic.entity1']), 'config_test.dynamic.entity1 does not have a dependency on the Views module.');
// Ensure that the provider of the config entity is not actually written to
// the dependencies array.
$raw_config = $this->config('config_test.dynamic.entity1');
$root_module_dependencies = $raw_config->get('dependencies.module');
$this->assertTrue(empty($root_module_dependencies), 'Node module is not written to the root dependencies array as it is enforced.');
// Create additional entities to test dependencies on config entities.
$entity2 = $storage->create(['id' => 'entity2', 'dependencies' => ['enforced' => ['config' => [$entity1->getConfigDependencyName()]]]]);
$entity2->save();
$entity3 = $storage->create(['id' => 'entity3', 'dependencies' => ['enforced' => ['config' => [$entity2->getConfigDependencyName()]]]]);
$entity3->save();
$entity4 = $storage->create(['id' => 'entity4', 'dependencies' => ['enforced' => ['config' => [$entity3->getConfigDependencyName()]]]]);
$entity4->save();
// Test getting $entity1's dependencies as configuration dependency objects.
$dependents = $config_manager->findConfigEntityDependents('config', [$entity1->getConfigDependencyName()]);
$this->assertFalse(isset($dependents['config_test.dynamic.entity1']), 'config_test.dynamic.entity1 does not have a dependency on itself.');
$this->assertTrue(isset($dependents['config_test.dynamic.entity2']), 'config_test.dynamic.entity2 has a dependency on config_test.dynamic.entity1.');
$this->assertTrue(isset($dependents['config_test.dynamic.entity3']), 'config_test.dynamic.entity3 has a dependency on config_test.dynamic.entity1.');
$this->assertTrue(isset($dependents['config_test.dynamic.entity4']), 'config_test.dynamic.entity4 has a dependency on config_test.dynamic.entity1.');
// Test getting $entity2's dependencies as entities.
$dependents = $config_manager->findConfigEntityDependentsAsEntities('config', [$entity2->getConfigDependencyName()]);
$dependent_ids = $this->getDependentIds($dependents);
$this->assertFalse(in_array('config_test:entity1', $dependent_ids), 'config_test.dynamic.entity1 does not have a dependency on config_test.dynamic.entity1.');
$this->assertFalse(in_array('config_test:entity2', $dependent_ids), 'config_test.dynamic.entity2 does not have a dependency on itself.');
$this->assertTrue(in_array('config_test:entity3', $dependent_ids), 'config_test.dynamic.entity3 has a dependency on config_test.dynamic.entity2.');
$this->assertTrue(in_array('config_test:entity4', $dependent_ids), 'config_test.dynamic.entity4 has a dependency on config_test.dynamic.entity2.');
// Test getting node module's dependencies as configuration dependency
// objects.
$dependents = $config_manager->findConfigEntityDependents('module', ['node']);
$this->assertTrue(isset($dependents['config_test.dynamic.entity1']), 'config_test.dynamic.entity1 has a dependency on the Node module.');
$this->assertTrue(isset($dependents['config_test.dynamic.entity2']), 'config_test.dynamic.entity2 has a dependency on the Node module.');
$this->assertTrue(isset($dependents['config_test.dynamic.entity3']), 'config_test.dynamic.entity3 has a dependency on the Node module.');
$this->assertTrue(isset($dependents['config_test.dynamic.entity4']), 'config_test.dynamic.entity4 has a dependency on the Node module.');
// Test getting node module's dependencies as configuration dependency
// objects after making $entity3 also dependent on node module but $entity1
// no longer depend on node module.
$entity1->setEnforcedDependencies([])->save();
$entity3->setEnforcedDependencies(['module' => ['node'], 'config' => [$entity2->getConfigDependencyName()]])->save();
$dependents = $config_manager->findConfigEntityDependents('module', ['node']);
$this->assertFalse(isset($dependents['config_test.dynamic.entity1']), 'config_test.dynamic.entity1 does not have a dependency on the Node module.');
$this->assertFalse(isset($dependents['config_test.dynamic.entity2']), 'config_test.dynamic.entity2 does not have a dependency on the Node module.');
$this->assertTrue(isset($dependents['config_test.dynamic.entity3']), 'config_test.dynamic.entity3 has a dependency on the Node module.');
$this->assertTrue(isset($dependents['config_test.dynamic.entity4']), 'config_test.dynamic.entity4 has a dependency on the Node module.');
// Test dependency on a content entity.
$entity_test = EntityTest::create([
'name' => $this->randomString(),
'type' => 'entity_test',
]);
$entity_test->save();
$entity2->setEnforcedDependencies(['config' => [$entity1->getConfigDependencyName()], 'content' => [$entity_test->getConfigDependencyName()]])->save();
$dependents = $config_manager->findConfigEntityDependents('content', [$entity_test->getConfigDependencyName()]);
$this->assertFalse(isset($dependents['config_test.dynamic.entity1']), 'config_test.dynamic.entity1 does not have a dependency on the content entity.');
$this->assertTrue(isset($dependents['config_test.dynamic.entity2']), 'config_test.dynamic.entity2 has a dependency on the content entity.');
$this->assertTrue(isset($dependents['config_test.dynamic.entity3']), 'config_test.dynamic.entity3 has a dependency on the content entity (via entity2).');
$this->assertTrue(isset($dependents['config_test.dynamic.entity4']), 'config_test.dynamic.entity4 has a dependency on the content entity (via entity3).');
// Create a configuration entity of a different type with the same ID as one
// of the entities already created.
$alt_storage = $this->container->get('entity.manager')->getStorage('config_query_test');
$alt_storage->create(['id' => 'entity1', 'dependencies' => ['enforced' => ['config' => [$entity1->getConfigDependencyName()]]]])->save();
$alt_storage->create(['id' => 'entity2', 'dependencies' => ['enforced' => ['module' => ['views']]]])->save();
$dependents = $config_manager->findConfigEntityDependentsAsEntities('config', [$entity1->getConfigDependencyName()]);
$dependent_ids = $this->getDependentIds($dependents);
$this->assertFalse(in_array('config_test:entity1', $dependent_ids), 'config_test.dynamic.entity1 does not have a dependency on itself.');
$this->assertTrue(in_array('config_test:entity2', $dependent_ids), 'config_test.dynamic.entity2 has a dependency on config_test.dynamic.entity1.');
$this->assertTrue(in_array('config_test:entity3', $dependent_ids), 'config_test.dynamic.entity3 has a dependency on config_test.dynamic.entity1.');
$this->assertTrue(in_array('config_test:entity4', $dependent_ids), 'config_test.dynamic.entity4 has a dependency on config_test.dynamic.entity1.');
$this->assertTrue(in_array('config_query_test:entity1', $dependent_ids), 'config_query_test.dynamic.entity1 has a dependency on config_test.dynamic.entity1.');
$this->assertFalse(in_array('config_query_test:entity2', $dependent_ids), 'config_query_test.dynamic.entity2 does not have a dependency on config_test.dynamic.entity1.');
$dependents = $config_manager->findConfigEntityDependentsAsEntities('module', ['node', 'views']);
$dependent_ids = $this->getDependentIds($dependents);
$this->assertFalse(in_array('config_test:entity1', $dependent_ids), 'config_test.dynamic.entity1 does not have a dependency on Views or Node.');
$this->assertFalse(in_array('config_test:entity2', $dependent_ids), 'config_test.dynamic.entity2 does not have a dependency on Views or Node.');
$this->assertTrue(in_array('config_test:entity3', $dependent_ids), 'config_test.dynamic.entity3 has a dependency on Views or Node.');
$this->assertTrue(in_array('config_test:entity4', $dependent_ids), 'config_test.dynamic.entity4 has a dependency on Views or Node.');
$this->assertFalse(in_array('config_query_test:entity1', $dependent_ids), 'config_test.query.entity1 does not have a dependency on Views or Node.');
$this->assertTrue(in_array('config_query_test:entity2', $dependent_ids), 'config_test.query.entity2 has a dependency on Views or Node.');
$dependents = $config_manager->findConfigEntityDependentsAsEntities('module', ['config_test']);
$dependent_ids = $this->getDependentIds($dependents);
$this->assertTrue(in_array('config_test:entity1', $dependent_ids), 'config_test.dynamic.entity1 has a dependency on config_test module.');
$this->assertTrue(in_array('config_test:entity2', $dependent_ids), 'config_test.dynamic.entity2 has a dependency on config_test module.');
$this->assertTrue(in_array('config_test:entity3', $dependent_ids), 'config_test.dynamic.entity3 has a dependency on config_test module.');
$this->assertTrue(in_array('config_test:entity4', $dependent_ids), 'config_test.dynamic.entity4 has a dependency on config_test module.');
$this->assertTrue(in_array('config_query_test:entity1', $dependent_ids), 'config_test.query.entity1 has a dependency on config_test module.');
$this->assertTrue(in_array('config_query_test:entity2', $dependent_ids), 'config_test.query.entity2 has a dependency on config_test module.');
// Test the ability to find missing content dependencies.
$missing_dependencies = $config_manager->findMissingContentDependencies();
$this->assertEqual([], $missing_dependencies);
$expected = [
$entity_test->uuid() => [
'entity_type' => 'entity_test',
'bundle' => $entity_test->bundle(),
'uuid' => $entity_test->uuid(),
],
];
// Delete the content entity so that is it now missing.
$entity_test->delete();
$missing_dependencies = $config_manager->findMissingContentDependencies();
$this->assertEqual($expected, $missing_dependencies);
// Add a fake missing dependency to ensure multiple missing dependencies
// work.
$entity1->setEnforcedDependencies(['content' => [$entity_test->getConfigDependencyName(), 'entity_test:bundle:uuid']])->save();
$expected['uuid'] = [
'entity_type' => 'entity_test',
'bundle' => 'bundle',
'uuid' => 'uuid',
];
$missing_dependencies = $config_manager->findMissingContentDependencies();
$this->assertEqual($expected, $missing_dependencies);
}
/**
* Tests ConfigManager::uninstall() and config entity dependency management.
*/
public function testConfigEntityUninstall() {
/** @var \Drupal\Core\Config\ConfigManagerInterface $config_manager */
$config_manager = \Drupal::service('config.manager');
/** @var \Drupal\Core\Config\Entity\ConfigEntityStorage $storage */
$storage = $this->container->get('entity.manager')
->getStorage('config_test');
// Test dependencies between modules.
$entity1 = $storage->create(
[
'id' => 'entity1',
'dependencies' => [
'enforced' => [
'module' => ['node', 'config_test'],
],
],
]
);
$entity1->save();
$entity2 = $storage->create(
[
'id' => 'entity2',
'dependencies' => [
'enforced' => [
'config' => [$entity1->getConfigDependencyName()],
],
],
]
);
$entity2->save();
// Perform a module rebuild so we can know where the node module is located
// and uninstall it.
// @todo Remove as part of https://www.drupal.org/node/2186491
system_rebuild_module_data();
// Test that doing a config uninstall of the node module deletes entity2
// since it is dependent on entity1 which is dependent on the node module.
$config_manager->uninstall('module', 'node');
$this->assertFalse($storage->load('entity1'), 'Entity 1 deleted');
$this->assertFalse($storage->load('entity2'), 'Entity 2 deleted');
}
/**
* Data provider for self::testConfigEntityUninstallComplex().
*/
public function providerConfigEntityUninstallComplex() {
// Ensure that alphabetical order has no influence on dependency fixing and
// removal.
return [
[['a', 'b', 'c', 'd', 'e']],
[['e', 'd', 'c', 'b', 'a']],
[['e', 'c', 'd', 'a', 'b']],
];
}
/**
* Tests complex configuration entity dependency handling during uninstall.
*
* Configuration entities can be deleted or updated during module uninstall
* because they have dependencies on the module.
*
* @param array $entity_id_suffixes
* The suffixes to add to the 4 entities created by the test.
*
* @dataProvider providerConfigEntityUninstallComplex
*/
public function testConfigEntityUninstallComplex(array $entity_id_suffixes) {
/** @var \Drupal\Core\Config\ConfigManagerInterface $config_manager */
$config_manager = \Drupal::service('config.manager');
/** @var \Drupal\Core\Config\Entity\ConfigEntityStorage $storage */
$storage = $this->container->get('entity.manager')
->getStorage('config_test');
// Entity 1 will be deleted because it depends on node.
$entity_1 = $storage->create(
[
'id' => 'entity_' . $entity_id_suffixes[0],
'dependencies' => [
'enforced' => [
'module' => ['node', 'config_test'],
],
],
]
);
$entity_1->save();
// Entity 2 has a dependency on entity 1 but it can be fixed because
// \Drupal\config_test\Entity::onDependencyRemoval() will remove the
// dependency before config entities are deleted.
$entity_2 = $storage->create(
[
'id' => 'entity_' . $entity_id_suffixes[1],
'dependencies' => [
'enforced' => [
'config' => [$entity_1->getConfigDependencyName()],
],
],
]
);
$entity_2->save();
// Entity 3 will be unchanged because it is dependent on entity 2 which can
// be fixed. The ConfigEntityInterface::onDependencyRemoval() method will
// not be called for this entity.
$entity_3 = $storage->create(
[
'id' => 'entity_' . $entity_id_suffixes[2],
'dependencies' => [
'enforced' => [
'config' => [$entity_2->getConfigDependencyName()],
],
],
]
);
$entity_3->save();
// Entity 4's config dependency will be fixed but it will still be deleted
// because it also depends on the node module.
$entity_4 = $storage->create(
[
'id' => 'entity_' . $entity_id_suffixes[3],
'dependencies' => [
'enforced' => [
'config' => [$entity_1->getConfigDependencyName()],
'module' => ['node', 'config_test'],
],
],
]
);
$entity_4->save();
// Entity 5 will be fixed because it is dependent on entity 3, which is
// unchanged, and entity 1 which will be fixed because
// \Drupal\config_test\Entity::onDependencyRemoval() will remove the
// dependency.
$entity_5 = $storage->create(
[
'id' => 'entity_' . $entity_id_suffixes[4],
'dependencies' => [
'enforced' => [
'config' => [
$entity_1->getConfigDependencyName(),
$entity_3->getConfigDependencyName(),
],
],
],
]
);
$entity_5->save();
// Set a more complicated test where dependencies will be fixed.
\Drupal::state()->set('config_test.fix_dependencies', [$entity_1->getConfigDependencyName()]);
\Drupal::state()->set('config_test.on_dependency_removal_called', []);
// Do a dry run using
// \Drupal\Core\Config\ConfigManager::getConfigEntitiesToChangeOnDependencyRemoval().
$config_entities = $config_manager->getConfigEntitiesToChangeOnDependencyRemoval('module', ['node']);
// Assert that \Drupal\config_test\Entity\ConfigTest::onDependencyRemoval()
// is called as expected and with the correct dependencies.
$called = \Drupal::state()->get('config_test.on_dependency_removal_called', []);
$this->assertArrayNotHasKey($entity_3->id(), $called, 'ConfigEntityInterface::onDependencyRemoval() is not called for entity 3.');
$this->assertSame([$entity_1->id(), $entity_4->id(), $entity_2->id(), $entity_5->id()], array_keys($called), 'The most dependent entites have ConfigEntityInterface::onDependencyRemoval() called first.');
$this->assertSame(['config' => [], 'content' => [], 'module' => ['node'], 'theme' => []], $called[$entity_1->id()]);
$this->assertSame(['config' => [$entity_1->getConfigDependencyName()], 'content' => [], 'module' => [], 'theme' => []], $called[$entity_2->id()]);
$this->assertSame(['config' => [$entity_1->getConfigDependencyName()], 'content' => [], 'module' => ['node'], 'theme' => []], $called[$entity_4->id()]);
$this->assertSame(['config' => [$entity_1->getConfigDependencyName()], 'content' => [], 'module' => [], 'theme' => []], $called[$entity_5->id()]);
$this->assertEqual($entity_1->uuid(), $config_entities['delete'][1]->uuid(), 'Entity 1 will be deleted.');
$this->assertEqual($entity_2->uuid(), $config_entities['update'][0]->uuid(), 'Entity 2 will be updated.');
$this->assertEqual($entity_3->uuid(), reset($config_entities['unchanged'])->uuid(), 'Entity 3 is not changed.');
$this->assertEqual($entity_4->uuid(), $config_entities['delete'][0]->uuid(), 'Entity 4 will be deleted.');
$this->assertEqual($entity_5->uuid(), $config_entities['update'][1]->uuid(), 'Entity 5 is updated.');
// Perform a module rebuild so we can know where the node module is located
// and uninstall it.
// @todo Remove as part of https://www.drupal.org/node/2186491
system_rebuild_module_data();
// Perform the uninstall.
$config_manager->uninstall('module', 'node');
// Test that expected actions have been performed.
$this->assertFalse($storage->load($entity_1->id()), 'Entity 1 deleted');
$entity_2 = $storage->load($entity_2->id());
$this->assertTrue($entity_2, 'Entity 2 not deleted');
$this->assertEqual($entity_2->calculateDependencies()->getDependencies()['config'], [], 'Entity 2 dependencies updated to remove dependency on entity 1.');
$entity_3 = $storage->load($entity_3->id());
$this->assertTrue($entity_3, 'Entity 3 not deleted');
$this->assertEqual($entity_3->calculateDependencies()->getDependencies()['config'], [$entity_2->getConfigDependencyName()], 'Entity 3 still depends on entity 2.');
$this->assertFalse($storage->load($entity_4->id()), 'Entity 4 deleted');
}
/**
* @covers ::uninstall
* @covers ::getConfigEntitiesToChangeOnDependencyRemoval
*/
public function testConfigEntityUninstallThirdParty() {
/** @var \Drupal\Core\Config\ConfigManagerInterface $config_manager */
$config_manager = \Drupal::service('config.manager');
/** @var \Drupal\Core\Config\Entity\ConfigEntityStorage $storage */
$storage = $this->container->get('entity_type.manager')
->getStorage('config_test');
// Entity 1 will be fixed because it only has a dependency via third-party
// settings, which are fixable.
$entity_1 = $storage->create([
'id' => 'entity_1',
'dependencies' => [
'enforced' => [
'module' => ['config_test'],
],
],
'third_party_settings' => [
'node' => [
'foo' => 'bar',
],
],
]);
$entity_1->save();
// Entity 2 has a dependency on entity 1.
$entity_2 = $storage->create([
'id' => 'entity_2',
'dependencies' => [
'enforced' => [
'config' => [$entity_1->getConfigDependencyName()],
],
],
'third_party_settings' => [
'node' => [
'foo' => 'bar',
],
],
]);
$entity_2->save();
// Entity 3 will be unchanged because it is dependent on entity 2 which can
// be fixed. The ConfigEntityInterface::onDependencyRemoval() method will
// not be called for this entity.
$entity_3 = $storage->create([
'id' => 'entity_3',
'dependencies' => [
'enforced' => [
'config' => [$entity_2->getConfigDependencyName()],
],
],
]);
$entity_3->save();
// Entity 4's config dependency will be fixed but it will still be deleted
// because it also depends on the node module.
$entity_4 = $storage->create([
'id' => 'entity_4',
'dependencies' => [
'enforced' => [
'config' => [$entity_1->getConfigDependencyName()],
'module' => ['node', 'config_test'],
],
],
]);
$entity_4->save();
\Drupal::state()->set('config_test.fix_dependencies', []);
\Drupal::state()->set('config_test.on_dependency_removal_called', []);
// Do a dry run using
// \Drupal\Core\Config\ConfigManager::getConfigEntitiesToChangeOnDependencyRemoval().
$config_entities = $config_manager->getConfigEntitiesToChangeOnDependencyRemoval('module', ['node']);
$config_entity_ids = [
'update' => [],
'delete' => [],
'unchanged' => [],
];
foreach ($config_entities as $type => $config_entities_by_type) {
foreach ($config_entities_by_type as $config_entity) {
$config_entity_ids[$type][] = $config_entity->id();
}
}
$expected = [
'update' => [$entity_1->id(), $entity_2->id()],
'delete' => [$entity_4->id()],
'unchanged' => [$entity_3->id()],
];
$this->assertSame($expected, $config_entity_ids);
$called = \Drupal::state()->get('config_test.on_dependency_removal_called', []);
$this->assertArrayNotHasKey($entity_3->id(), $called, 'ConfigEntityInterface::onDependencyRemoval() is not called for entity 3.');
$this->assertSame([$entity_1->id(), $entity_4->id(), $entity_2->id()], array_keys($called), 'The most dependent entities have ConfigEntityInterface::onDependencyRemoval() called first.');
$this->assertSame(['config' => [], 'content' => [], 'module' => ['node'], 'theme' => []], $called[$entity_1->id()]);
$this->assertSame(['config' => [], 'content' => [], 'module' => ['node'], 'theme' => []], $called[$entity_2->id()]);
$this->assertSame(['config' => [], 'content' => [], 'module' => ['node'], 'theme' => []], $called[$entity_4->id()]);
// Perform a module rebuild so we can know where the node module is located
// and uninstall it.
// @todo Remove as part of https://www.drupal.org/node/2186491
system_rebuild_module_data();
// Perform the uninstall.
$config_manager->uninstall('module', 'node');
// Test that expected actions have been performed.
$entity_1 = $storage->load($entity_1->id());
$this->assertTrue($entity_1, 'Entity 1 not deleted');
$this->assertSame($entity_1->getThirdPartySettings('node'), [], 'Entity 1 third party settings updated.');
$entity_2 = $storage->load($entity_2->id());
$this->assertTrue($entity_2, 'Entity 2 not deleted');
$this->assertSame($entity_2->getThirdPartySettings('node'), [], 'Entity 2 third party settings updated.');
$this->assertSame($entity_2->calculateDependencies()->getDependencies()['config'], [$entity_1->getConfigDependencyName()], 'Entity 2 still depends on entity 1.');
$entity_3 = $storage->load($entity_3->id());
$this->assertTrue($entity_3, 'Entity 3 not deleted');
$this->assertSame($entity_3->calculateDependencies()->getDependencies()['config'], [$entity_2->getConfigDependencyName()], 'Entity 3 still depends on entity 2.');
$this->assertFalse($storage->load($entity_4->id()), 'Entity 4 deleted');
}
/**
* Tests deleting a configuration entity and dependency management.
*/
public function testConfigEntityDelete() {
/** @var \Drupal\Core\Config\ConfigManagerInterface $config_manager */
$config_manager = \Drupal::service('config.manager');
/** @var \Drupal\Core\Config\Entity\ConfigEntityStorage $storage */
$storage = $this->container->get('entity.manager')->getStorage('config_test');
// Test dependencies between configuration entities.
$entity1 = $storage->create(
[
'id' => 'entity1',
]
);
$entity1->save();
$entity2 = $storage->create(
[
'id' => 'entity2',
'dependencies' => [
'enforced' => [
'config' => [$entity1->getConfigDependencyName()],
],
],
]
);
$entity2->save();
// Do a dry run using
// \Drupal\Core\Config\ConfigManager::getConfigEntitiesToChangeOnDependencyRemoval().
$config_entities = $config_manager->getConfigEntitiesToChangeOnDependencyRemoval('config', [$entity1->getConfigDependencyName()]);
$this->assertEqual($entity2->uuid(), reset($config_entities['delete'])->uuid(), 'Entity 2 will be deleted.');
$this->assertTrue(empty($config_entities['update']), 'No dependent configuration entities will be updated.');
$this->assertTrue(empty($config_entities['unchanged']), 'No dependent configuration entities will be unchanged.');
// Test that doing a delete of entity1 deletes entity2 since it is dependent
// on entity1.
$entity1->delete();
$this->assertFalse($storage->load('entity1'), 'Entity 1 deleted');
$this->assertFalse($storage->load('entity2'), 'Entity 2 deleted');
// Set a more complicated test where dependencies will be fixed.
\Drupal::state()->set('config_test.fix_dependencies', [$entity1->getConfigDependencyName()]);
// Entity1 will be deleted by the test.
$entity1 = $storage->create(
[
'id' => 'entity1',
]
);
$entity1->save();
// Entity2 has a dependency on Entity1 but it can be fixed because
// \Drupal\config_test\Entity::onDependencyRemoval() will remove the
// dependency before config entities are deleted.
$entity2 = $storage->create(
[
'id' => 'entity2',
'dependencies' => [
'enforced' => [
'config' => [$entity1->getConfigDependencyName()],
],
],
]
);
$entity2->save();
// Entity3 will be unchanged because it is dependent on Entity2 which can
// be fixed.
$entity3 = $storage->create(
[
'id' => 'entity3',
'dependencies' => [
'enforced' => [
'config' => [$entity2->getConfigDependencyName()],
],
],
]
);
$entity3->save();
// Do a dry run using
// \Drupal\Core\Config\ConfigManager::getConfigEntitiesToChangeOnDependencyRemoval().
$config_entities = $config_manager->getConfigEntitiesToChangeOnDependencyRemoval('config', [$entity1->getConfigDependencyName()]);
$this->assertTrue(empty($config_entities['delete']), 'No dependent configuration entities will be deleted.');
$this->assertEqual($entity2->uuid(), reset($config_entities['update'])->uuid(), 'Entity 2 will be updated.');
$this->assertEqual($entity3->uuid(), reset($config_entities['unchanged'])->uuid(), 'Entity 3 is not changed.');
// Perform the uninstall.
$entity1->delete();
// Test that expected actions have been performed.
$this->assertFalse($storage->load('entity1'), 'Entity 1 deleted');
$entity2 = $storage->load('entity2');
$this->assertTrue($entity2, 'Entity 2 not deleted');
$this->assertEqual($entity2->calculateDependencies()->getDependencies()['config'], [], 'Entity 2 dependencies updated to remove dependency on Entity1.');
$entity3 = $storage->load('entity3');
$this->assertTrue($entity3, 'Entity 3 not deleted');
$this->assertEqual($entity3->calculateDependencies()->getDependencies()['config'], [$entity2->getConfigDependencyName()], 'Entity 3 still depends on Entity 2.');
}
/**
* Tests getConfigEntitiesToChangeOnDependencyRemoval() with content entities.
*
* At the moment there is no runtime code that calculates configuration
* dependencies on content entity delete because this calculation is expensive
* and all content dependencies are soft. This test ensures that the code
* works for content entities.
*
* @see \Drupal\Core\Config\ConfigManager::getConfigEntitiesToChangeOnDependencyRemoval()
*/
public function testContentEntityDelete() {
$this->installEntitySchema('entity_test');
/** @var \Drupal\Core\Config\ConfigManagerInterface $config_manager */
$config_manager = \Drupal::service('config.manager');
$content_entity = EntityTest::create();
$content_entity->save();
/** @var \Drupal\Core\Config\Entity\ConfigEntityStorage $storage */
$storage = $this->container->get('entity.manager')->getStorage('config_test');
$entity1 = $storage->create(
[
'id' => 'entity1',
'dependencies' => [
'enforced' => [
'content' => [$content_entity->getConfigDependencyName()],
],
],
]
);
$entity1->save();
$entity2 = $storage->create(
[
'id' => 'entity2',
'dependencies' => [
'enforced' => [
'config' => [$entity1->getConfigDependencyName()],
],
],
]
);
$entity2->save();
// Create a configuration entity that is not in the dependency chain.
$entity3 = $storage->create(['id' => 'entity3']);
$entity3->save();
$config_entities = $config_manager->getConfigEntitiesToChangeOnDependencyRemoval('content', [$content_entity->getConfigDependencyName()]);
$this->assertEqual($entity1->uuid(), $config_entities['delete'][1]->uuid(), 'Entity 1 will be deleted.');
$this->assertEqual($entity2->uuid(), $config_entities['delete'][0]->uuid(), 'Entity 2 will be deleted.');
$this->assertTrue(empty($config_entities['update']), 'No dependencies of the content entity will be updated.');
$this->assertTrue(empty($config_entities['unchanged']), 'No dependencies of the content entity will be unchanged.');
}
/**
* Gets a list of identifiers from an array of configuration entities.
*
* @param \Drupal\Core\Config\Entity\ConfigEntityInterface[] $dependents
* An array of configuration entities.
*
* @return array
* An array with values of entity_type_id:ID
*/
protected function getDependentIds(array $dependents) {
$dependent_ids = [];
foreach ($dependents as $dependent) {
$dependent_ids[] = $dependent->getEntityTypeId() . ':' . $dependent->id();
}
return $dependent_ids;
}
}
| gpl-2.0 |
amol-crescente/liveform | wp-content/themes/Avada/includes/class-avada-maintenance.php | 1818 | <?php
class Avada_Maintenance {
/**
* Determines if we should activate the maintenance mode or not.
*
* @access private
* @var bool
*/
private $maintenance = false;
/**
* The message that will be displayed to all non-admins.
* This will be displayed on the frontend instead of the normal site.
*
* @access private
* @var string
*/
private $users_warning = '';
/**
* Same as $users_warning but for admins.
*
* @access private
* @var string
*/
private $admin_warning = '';
/**
* @access public
*
* @param $maintenance bool
* @param $users_warning string
* @param $admin_warning string
*/
public function __construct( $maintenance = false, $users_warning = '', $admin_warning = '' ) {
// No need to do anything if we're not in maintenance mode.
if ( true !== $maintenance ) {
return;
}
// Only continue if we're on the frontend
if ( is_admin() ) {
return;
}
$this->maintenance = $maintenance;
$this->users_warning = $users_warning;
$this->admin_warning = $admin_warning;
if ( is_admin() || ( in_array( $GLOBALS['pagenow'], array( 'wp-login.php', 'wp-register.php' ) ) ) ) {
return;
}
$this->maintenance_page();
}
/**
* Displays the maintenance page
*
* @access public
*/
public function maintenance_page() { ?>
<div class="wrapper" style="width:800px;max-width:95%;background:#f7f7f7;border:1px solid #f2f2f2;border-radius:3px;margin:auto;margin-top:200px;">
<div class="inner" style="padding:2rem;font-size:1.2rem;color:#333;">
<?php if ( current_user_can( 'install_plugins' ) ) : // current user is an admin ?>
<p><?php echo $this->admin_warning; ?></p>
<?php else : ?>
<p><?php echo $this->users_warning; ?></p>
<?php endif; ?>
</div>
</div>
<?php
exit;
}
}
| gpl-2.0 |